In [1]:
import tensorflow as tf

from models.alexnet import AlexNet
# from models.vgg import VGG
# from models.vgg_slim import VGGslim
# from models.inception_v3 import InceptionV3

from helper.imageloader import load_image_paths_by_subfolder
from helper.retrainer import Retrainer

In [2]:
# Set Model
model_def = AlexNet

# Input settings
image_dir = '../../datasets/testing'
validation_ratio = 10 # e.g. every 5th element = 1/5 = 0.2 = 20%
skip_folder = ['yiwen']

# Learning/Network params
learning_rate = 0.005
num_epochs = 20
batch_size = 32
dropout_keep_prop = 1.0 # [0.5]
finetune_layers = ['fc6', 'fc7', 'fc8']

# Hardware usage
device = '/cpu:0'
memory_usage = 1.0

In [3]:
# Load image paths
image_paths = load_image_paths_by_subfolder(
    image_dir,
    validation_ratio,
    skip_folder,
    use_subfolder=True
)

# Make sure we have enough images to fill at least one training/validation batch
if image_paths['training_image_count'] < batch_size:
    print 'Not enough training images in \'%s\'' %image_dir

if image_paths['validation_image_count'] < batch_size:
    print 'Not enough validation images in \'%s\'' %image_dir


Looking for images in bad
=> Found 500 images
  => Training: 450
  => Validation 50
  => Labeling them with: bad (0)
Looking for images in good
=> Found 500 images
  => Training: 450
  => Validation 50
  => Labeling them with: good (1)

In [4]:
# Retrain
trainer = Retrainer(model_def, image_paths)
trainer.run(
    finetune_layers,
    num_epochs,
    learning_rate,
    batch_size,
    dropout_keep_prop,
    memory_usage=memory_usage,
    device=device,
    show_misclassified=True,
    validate_on_each_epoch=True
)


=> Will Restore:
  => <tf.Variable 'conv1/weights:0' shape=(11, 11, 3, 96) dtype=float32_ref>
  => <tf.Variable 'conv1/biases:0' shape=(96,) dtype=float32_ref>
  => <tf.Variable 'conv2/weights:0' shape=(5, 5, 48, 256) dtype=float32_ref>
  => <tf.Variable 'conv2/biases:0' shape=(256,) dtype=float32_ref>
  => <tf.Variable 'conv3/weights:0' shape=(3, 3, 256, 384) dtype=float32_ref>
  => <tf.Variable 'conv3/biases:0' shape=(384,) dtype=float32_ref>
  => <tf.Variable 'conv4/weights:0' shape=(3, 3, 192, 384) dtype=float32_ref>
  => <tf.Variable 'conv4/biases:0' shape=(384,) dtype=float32_ref>
  => <tf.Variable 'conv5/weights:0' shape=(3, 3, 192, 256) dtype=float32_ref>
  => <tf.Variable 'conv5/biases:0' shape=(256,) dtype=float32_ref>
=> Will train:
  => <tf.Variable 'fc6/weights:0' shape=(9216, 4096) dtype=float32_ref>
  => <tf.Variable 'fc6/biases:0' shape=(4096,) dtype=float32_ref>
  => <tf.Variable 'fc7/weights:0' shape=(4096, 4096) dtype=float32_ref>
  => <tf.Variable 'fc7/biases:0' shape=(4096,) dtype=float32_ref>
  => <tf.Variable 'fc8/weights:0' shape=(4096, 2) dtype=float32_ref>
  => <tf.Variable 'fc8/biases:0' shape=(2,) dtype=float32_ref>
=> Learningrate: 0.0050
=> Batchsize: 32
=> Dropout: 0.0000
##################################
=> Restoring weights from numpy file: ./weights/bvlc_alexnet.npy
2017-10-03 22:20:53.361642 Epoch number: 1
2017-10-03 22:20:53.362101 Start training...
2017-10-03 22:21:31.689822 Start validation...
2017-10-03 22:21:35.010682 Validation Accuracy = 0.4791666667
2017-10-03 22:21:35.017060 Epoch number: 2
2017-10-03 22:21:35.017124 Start training...
2017-10-03 22:22:13.719673 Start validation...
2017-10-03 22:22:17.075630 Validation Accuracy = 0.4791666667
2017-10-03 22:22:17.081772 Epoch number: 3
2017-10-03 22:22:17.081848 Start training...
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-4-17e47fa005b9> in <module>()
     10     device=device,
     11     show_misclassified=True,
---> 12     validate_on_each_epoch=True
     13 )

/Users/philipp/Uni/Masterarbeit/code/finetuneAlexVGG/helper/retrainer.py in run(self, finetune_layers, epochs, learning_rate, batch_size, keep_prob, memory_usage, device, show_misclassified, validate_on_each_epoch, ckpt_file)
    236                     epoch,
    237                     summary_op,
--> 238                     writer
    239                 )
    240 

/Users/philipp/Uni/Masterarbeit/code/finetuneAlexVGG/helper/retrainer.py in run_training(self, sess, train_op, iterator_op, get_next_batch_op, ph_images, ph_labels, ph_keep_prob, keep_prob, batches, epoch, summary_op, writer)
    285             sess.run(
    286                 train_op,
--> 287                 feed_dict={ph_images: img_batch, ph_labels: label_batch, ph_keep_prob: keep_prob}
    288             )
    289 

/Users/philipp/Envs/tf/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    893     try:
    894       result = self._run(None, fetches, feed_dict, options_ptr,
--> 895                          run_metadata_ptr)
    896       if run_metadata:
    897         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/Users/philipp/Envs/tf/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
   1122     if final_fetches or final_targets or (handle and feed_dict_tensor):
   1123       results = self._do_run(handle, final_targets, final_fetches,
-> 1124                              feed_dict_tensor, options, run_metadata)
   1125     else:
   1126       results = []

/Users/philipp/Envs/tf/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1319     if handle is None:
   1320       return self._do_call(_run_fn, self._session, feeds, fetches, targets,
-> 1321                            options, run_metadata)
   1322     else:
   1323       return self._do_call(_prun_fn, self._session, handle, feeds, fetches)

/Users/philipp/Envs/tf/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_call(self, fn, *args)
   1325   def _do_call(self, fn, *args):
   1326     try:
-> 1327       return fn(*args)
   1328     except errors.OpError as e:
   1329       message = compat.as_text(e.message)

/Users/philipp/Envs/tf/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1304           return tf_session.TF_Run(session, options,
   1305                                    feed_dict, fetch_list, target_list,
-> 1306                                    status, run_metadata)
   1307 
   1308     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [ ]: