In [1]:
import os, random, glob
import numpy as np
import pandas as pd
from PIL import Image
from sklearn.model_selection import train_test_split
from sklearn.metrics import log_loss
from sklearn.preprocessing import LabelEncoder
import matplotlib.pyplot as plt
from matplotlib import ticker
import seaborn as sns
%matplotlib inline
from keras.models import Sequential, Model, load_model
from keras.layers import GlobalAveragePooling2D, Flatten, Dropout, Dense
from keras.optimizers import Adam
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils
from keras import backend as K
K.set_image_dim_ordering('tf')
In [2]:
from skimage.data import imread
from skimage.io import imshow,imsave
from skimage import img_as_float
import pandas as pd
import numpy as np
import cv2
from skimage.util import crop
from skimage.transform import rotate
from skimage.transform import resize
import matplotlib.pyplot as plt
%matplotlib inline
import math
In [3]:
TRAIN_DIR = '../data/train/'
TEST_DIR = '../data/test_stg1/'
FISH_CLASSES = ['ALB', 'BET', 'DOL', 'LAG', 'NoF', 'OTHER', 'SHARK', 'YFT']
#FISH_CLASSES = ['ALB', 'BET', 'DOL', 'LAG', 'OTHER', 'SHARK', 'YFT']
modelStr = 'Classifier'
ROWS = 224
COLS = 224
BatchSize = 64
LearningRate = 1e-4
le = LabelEncoder()
le.fit(FISH_CLASSES)
le.transform(FISH_CLASSES)
Out[3]:
In [4]:
def deg_angle_between(x1,y1,x2,y2):
from math import atan2, degrees, pi
dx = x2 - x1
dy = y2 - y1
rads = atan2(dy,dx)
#rads %= 2*pi
degs = degrees(rads)
return(degs)
def get_rotated_cropped_fish(image,x1,y1,x2,y2):
(h,w) = image.shape[:2]
#calculate center and angle
center = ( (x1+x2) / 2,(y1+y2) / 2)
angle = np.floor(deg_angle_between(x1,y1,x2,y2))
#print('angle=' +str(angle) + ' ')
#print('center=' +str(center))
M = cv2.getRotationMatrix2D(center, angle, 1.0)
rotated = cv2.warpAffine(image, M, (w, h))
fish_length = np.sqrt((x1-x2)**2+(y1-y2)**2)
cropped = rotated[(max((center[1]-fish_length/1.8),0)):(max((center[1]+fish_length/1.8),0)) ,
(max((center[0]- fish_length/1.8),0)):(max((center[0]+fish_length/1.8),0))]
#imshow(image)
#imshow(rotated)
#imshow(cropped)
resized = resize(cropped,(ROWS,COLS))
return(resized)
In [5]:
#Loading data
import pickle
if os.path.exists('../data/data_train_annoCrop_{}_{}.pickle'.format(ROWS, COLS)):
print ('Exist data_train_annoCrop_{}_{}.pickle. Loading data from file.'.format(ROWS, COLS))
with open('../data/data_train_annoCrop_{}_{}.pickle'.format(ROWS, COLS), 'rb') as f:
data_train = pickle.load(f)
X_train = data_train['X_train']
y_train = data_train['y_train']
del data_train
else:
print ('Loading data from original images. Generating data_train_annoCrop_{}_{}.pickle.'.format(ROWS, COLS))
images = []
y_train = []
crop_classes=FISH_CLASSES[:]
crop_classes.remove('NoF')
crop_classes
for c in crop_classes:
labels = pd.read_json('../data/annotation/'+c.lower()+'_labels.json')
for i in range(len(labels)):
try:
img_filename = labels.iloc[i,2]
print(img_filename)
l1 = pd.DataFrame((labels[labels.filename==img_filename].annotations).iloc[0])
image = imread(TRAIN_DIR+c+'/'+img_filename)
images.append(get_rotated_cropped_fish(image,np.floor(l1.iloc[0,1]),np.floor(l1.iloc[0,2]),np.floor(l1.iloc[1,1]),np.floor(l1.iloc[1,2])))
print('success')
y_train.append(c)
except:
pass
X_train = np.asarray(images, dtype=np.float32)
# One Hot Encoding Labels
y_train = le.transform(y_train)
y_train = np_utils.to_categorical(y_train)
#save data to file
data_train = {'X_train': X_train,'y_train': y_train }
with open('../data/data_train_annoCrop_{}_{}.pickle'.format(ROWS, COLS), 'wb') as f:
pickle.dump(data_train, f)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size=0.2, random_state=None, stratify=y_train)
In [7]:
#data preprocessing
train_datagen = ImageDataGenerator(
#featurewise_center=True,
#featurewise_std_normalization=True,
#rescale=1./255,
#rotation_range=180,
#shear_range=np.pi/6.,
#zoom_range=[1,1.1],
#width_shift_range=0.1,
#height_shift_range=0.1,
#horizontal_flip=True,
vertical_flip=True)
#train_datagen.fit(X_train)
train_generator = train_datagen.flow(X_train, y_train, batch_size=BatchSize, shuffle=True, seed=None)
valid_datagen = ImageDataGenerator()
valid_generator = valid_datagen.flow(X_valid, y_valid, batch_size=BatchSize, shuffle=True, seed=None)
In [8]:
#callbacks
early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1, mode='auto')
model_checkpoint = ModelCheckpoint(filepath='./checkpoints/weights.{epoch:03d}-{val_loss:.4f}.hdf5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto')
learningrate_schedule = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=5, verbose=1, mode='auto', epsilon=0.001, cooldown=0, min_lr=0)
tensorboard = TensorBoard(log_dir='./logs', histogram_freq=0, write_graph=True, write_images=True)
In [9]:
#stg1 training
from keras.applications.vgg16 import VGG16
optimizer = Adam(lr=LearningRate)
base_model = VGG16(weights='imagenet', include_top=False)
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(256, init='glorot_normal', activation='relu')(x)
#x = Dropout(0.5)(x)
x = Dense(256, init='glorot_normal', activation='relu')(x)
#x = Dropout(0.5)(x)
predictions = Dense(len(FISH_CLASSES), init='glorot_normal', activation='softmax')(x)
# this is the model we will train
model = Model(input=base_model.input, output=predictions)
# first: train only the top layers (which were randomly initialized)
# i.e. freeze all convolutional VGG16 layers
for layer in base_model.layers:
layer.trainable = False
# compile the model (should be done *after* setting layers to non-trainable)
model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
# train the model on the new data for a few epochs
hist = model.fit_generator(train_generator, samples_per_epoch=len(X_train), nb_epoch=300, verbose=1,
callbacks=[early_stopping, model_checkpoint, learningrate_schedule, tensorboard],
validation_data=valid_generator, nb_val_samples=len(X_valid), nb_worker=3, pickle_safe=True)
In [ ]:
#stg2 training
from keras.applications.vgg16 import VGG16
optimizer = Adam(lr=LearningRate)
base_model = VGG16(weights='imagenet', include_top=False)
# at this point, the top layers are well trained and we can start fine-tuning
# convolutional layers from inception V3. We will freeze the bottom N layers
# and train the remaining top layers.
# let's visualize layer names and layer indices to see how many layers
# we should freeze:
for i, layer in enumerate(base_model.layers):
print(i, layer.name)
# we chose to train the top 2 inception blocks, i.e. we will freeze
# the first 172 layers and unfreeze the rest:
for layer in model.layers[:14]:
layer.trainable = False
for layer in model.layers[14:]:
layer.trainable = True
# we need to recompile the model for these modifications to take effect
# we use SGD with a low learning rate
model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
# we train our model again (this time fine-tuning the top 2 inception blocks
# alongside the top Dense layers
hist = model.fit_generator(train_generator, samples_per_epoch=len(X_train), nb_epoch=300, verbose=1,
callbacks=[early_stopping, model_checkpoint, learningrate_schedule, tensorboard],
validation_data=valid_generator, nb_val_samples=len(X_valid), nb_worker=3, pickle_safe=True)
In [ ]:
#resume training
files = glob.glob('./checkpoints/*')
val_losses = [float(f.split('-')[-1][:-5]) for f in files]
index = val_losses.index(min(val_losses))
print('Loading model from checkpoints file ' + files[index])
model = load_model(files[index])
hist = model.fit_generator(train_generator, samples_per_epoch=len(X_train), nb_epoch=300, verbose=1,
callbacks=[early_stopping, model_checkpoint, learningrate_schedule, tensorboard],
validation_data=valid_generator, nb_val_samples=len(X_valid), nb_worker=3, pickle_safe=True)
In [ ]:
#test submission
import datetime
if os.path.exists('../data/data_test_{}_{}.pickle'.format(ROWS, COLS)):
print ('Exist data_test_{}_{}.pickle. Loading test data from file.'.format(ROWS, COLS))
with open('../data/data_test_{}_{}.pickle'.format(ROWS, COLS), 'rb') as f:
data_test = pickle.load(f)
X_test = data_test['X_test']
test_files = data_test['test_files']
else:
print ('Loading test data from original images. Generating data_test_{}_{}.pickle.'.format(ROWS, COLS))
test_files = [im for im in os.listdir(TEST_DIR)]
X_test = np.ndarray((len(test_files), ROWS, COLS, 3), dtype=np.uint8)
for i, im in enumerate(test_files):
X_test[i] = read_image(TEST_DIR+im)
if i%300 == 0: print('Processed {} of {}'.format(i, len(test_files)))
data_test = {'X_test': X_test,'test_files': test_files }
with open('../data/data_test_{}_{}.pickle'.format(ROWS, COLS), 'wb') as f:
pickle.dump(data_test, f)
X_test = X_test / 255.
files = glob.glob('./checkpoints/*')
val_losses = [float(f.split('-')[-1][:-5]) for f in files]
index = val_losses.index(min(val_losses))
model = load_model(files[index])
test_preds = model.predict(X_test, batch_size=BatchSize, verbose=1)
#test_preds= test_preds / np.sum(test_preds,axis=1,keepdims=True)
submission = pd.DataFrame(test_preds, columns=FISH_CLASSES)
#submission.loc[:, 'image'] = pd.Series(test_files, index=submission.index)
submission.insert(0, 'image', test_files)
now = datetime.datetime.now()
info = modelStr + '{:.4f}'.format(min(val_losses))
sub_file = 'submission_' + info + '_' + str(now.strftime("%Y-%m-%d-%H-%M")) + '.csv'
submission.to_csv(sub_file, index=False)
In [ ]:
# summarize history for accuracy
plt.figure(figsize=(15, 5))
plt.subplot(1, 2, 1)
plt.plot(hist.history['acc']); plt.plot(hist.history['val_acc']);
plt.title('model accuracy'); plt.ylabel('accuracy');
plt.xlabel('epoch'); plt.legend(['train', 'valid'], loc='upper left');
# summarize history for loss
plt.subplot(1, 2, 2)
plt.plot(hist.history['loss']); plt.plot(hist.history['val_loss']);
plt.title('model loss'); plt.ylabel('loss');
plt.xlabel('epoch'); plt.legend(['train', 'valid'], loc='upper left');
plt.show()