In [10]:
import sys, os
current_dir = os.getcwd()
HOME_DIR = current_dir
DATA_HOME_DIR = current_dir + '/data/redux/'
print(current_dir, DATA_HOME_DIR)
In [14]:
from utils import *
%matplotlib inline
batch_size = 64
no_of_epochs = 3
path = DATA_HOME_DIR
model_path = path+'model'
In [1]:
import resnet50
from resnet50 import Resnet50
In [3]:
# omit the last dense layer, so that we don't have to do model.pop
# before we finetune the model to suit our purpose
rn0 = Resnet50(include_top=False).model
rn0.output_shape[1:]
Out[3]:
# from utils.py
def get_classes(path):
batches = get_batches(path+'train', shuffle=False, batch_size=1)
val_batches = get_batches(path+'valid', shuffle=False, batch_size=1)
test_batches = get_batches(path+'test', shuffle=False, batch_size=1)
return (val_batches.classes, batches.classes, onehot(val_batches.classes),
onehot(batches.classes), val_batches.filenames, batches.filenames,
test_batches.filenames)
In [11]:
batches = get_batches(path+'train', shuffle=False, batch_size=batch_size)
val_batches = get_batches(path+'valid', batch_size=batch_size*2, shuffle=False)
# labels are the one-hot encoded version of classes
(val_classes, trn_classes, val_labels, trn_labels, val_filenames, filenames,
test_filenames) = get_classes(path)
In [12]:
val_features = rn0.predict_generator(val_batches, val_batches.nb_sample)
In [13]:
trn_features = rn0.predict_generator(batches, batches.nb_sample)
def save_array(fname, arr):
c=bcolz.carray(arr, rootdir=fname, mode='w')
c.flush()
In [15]:
save_array(model_path + 'trn_rn0_conv.bc', trn_features)
save_array(model_path + 'val_rn0_conv.bc', val_features)
In [18]:
trn_features = load_array(model_path + 'trn_rn0_conv.bc')
val_features = load_array(model_path + 'val_rn0_conv.bc')
In [19]:
def get_fc_layers(p):
return [
BatchNormalization(axis=1, input_shape=rn0.output_shape[1:]),
Flatten(),
Dropout(p),
Dense(1024, activation='relu'),
BatchNormalization(),
Dropout(p/2),
Dense(1024, activation='relu'),
BatchNormalization(),
Dropout(p),
Dense(2, activation='softmax')
]
In [20]:
model = Sequential(get_fc_layers(.5))
In [21]:
model.compile(optimizer=Adam(), loss='categorical_crossentropy', metrics=['accuracy'])
In [23]:
model.fit(trn_features, trn_labels, nb_epoch=2,
batch_size=batch_size, validation_data=(val_features, val_labels))
Out[23]:
In [24]:
def get_ap_layers(p):
return [
GlobalAveragePooling2D(input_shape=rn0.output_shape[1:]),
Dropout(p),
Dense(2, activation='softmax')
]
In [25]:
model = Sequential(get_ap_layers(0.2))
In [26]:
model.compile(optimizer=Adam(), loss='categorical_crossentropy', metrics=['accuracy'])
In [27]:
model.fit(trn_features, trn_labels, nb_epoch=3,
batch_size=batch_size, validation_data=(val_features, val_labels))
Out[27]:
In [29]:
rn1 = Resnet50(include_top=False, size=(400, 400)).model
rn1.output_shape[1:]
Out[29]:
In [32]:
batches = get_batches(path+'train', shuffle=False, batch_size=batch_size,
target_size=(400, 400))
val_batches = get_batches(path+'valid', batch_size=batch_size*2, shuffle=False,
target_size=(400, 400))
# labels are the one-hot encoded version of classes
(val_classes, trn_classes, val_labels, trn_labels, val_filenames, filenames,
test_filenames) = get_classes(path)
In [33]:
val_features = rn1.predict_generator(val_batches, val_batches.nb_sample)
trn_features = rn1.predict_generator(val_batches, val_batches.nb_sample)
In [34]:
save_array(model_path + 'trn_rn1_conv.bc', trn_features)
save_array(model_path + 'val_rn1_conv.bc', val_features)
In [35]:
trn_features = load_array(model_path + 'trn_rn1_conv.bc')
val_features = load_array(model_path + 'val_rn1_conv.bc')
In [ ]:
def get_ap_layers(p):
return [
GlobalAveragePooling2D(input_shape=rn1.output_shape[1:]),
Dropout(p),
Dense(2, activation='softmax')
]
In [36]:
model.fit(trn_features, trn_labels, nb_epoch=3,
batch_size=batch_size, validation_data=(val_features, val_labels))
In [ ]: