IMBD Dataset


In [20]:
from keras.callbacks import TensorBoard
from datetime import datetime

logdir = "logs\scalars\" + datetime.now().strftime("%Y%m%d-%H%M%S")
tensorboard_callback = TensorBoard(log_dir=logdir)


  File "<ipython-input-20-f516a64e3be7>", line 4
    logdir = "logs\scalars\" + datetime.now().strftime("%Y%m%d-%H%M%S")
                                                               ^
SyntaxError: invalid syntax

In [16]:
%load_ext tensorboard
%tensorboard --logdir logs/scalars


The tensorboard extension is already loaded. To reload it, use:
  %reload_ext tensorboard
ERROR: Could not find `tensorboard`. Please ensure that your PATH
contains an executable `tensorboard` program, or explicitly specify
the path to a TensorBoard binary by setting the `TENSORBOARD_BINARY`
environment variable.

In [10]:
from keras.datasets import imdb
from keras.preprocessing import sequence

max_features = 15000
maxlen = 500
batch_size = 32

print('Loading data...')
(input_train, y_train), (input_test, y_test) = imdb.load_data(
     num_words=max_features)
print(len(input_train), 'train sequences')
print(len(input_test), 'test sequences')

print('Pad sequences (samples x time)')
input_train = sequence.pad_sequences(input_train, maxlen=maxlen)
input_test = sequence.pad_sequences(input_test, maxlen=maxlen)
print('input_train shape:', input_train.shape)
print('input_test shape:', input_test.shape)


Loading data...
25000 train sequences
25000 test sequences
Pad sequences (samples x time)
input_train shape: (25000, 500)
input_test shape: (25000, 500)

In [17]:
from keras import Sequential
from keras.layers import LSTM, Embedding, Dense
from keras.optimizers import RMSprop

# import tensorflow as tf
# strategy = tf.distribute.MirroredStrategy()
# with strategy.scope():
    
model = Sequential()
model.add(Embedding(max_features, 32))
model.add(LSTM(32, dropout=0.0, recurrent_dropout=0.7, return_sequences=True))
model.add(LSTM(32, dropout=0.0, recurrent_dropout=0.3, return_sequences=True))
model.add(LSTM(32, dropout=0.0, recurrent_dropout=0.0))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer=RMSprop(), loss='binary_crossentropy', metrics=['acc'])
model.summary()


Model: "sequential_3"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
embedding_3 (Embedding)      (None, None, 32)          480000    
_________________________________________________________________
lstm_7 (LSTM)                (None, None, 32)          8320      
_________________________________________________________________
lstm_8 (LSTM)                (None, None, 32)          8320      
_________________________________________________________________
lstm_9 (LSTM)                (None, 32)                8320      
_________________________________________________________________
dense_3 (Dense)              (None, 1)                 33        
=================================================================
Total params: 504,993
Trainable params: 504,993
Non-trainable params: 0
_________________________________________________________________

In [19]:
history = model.fit(
    input_train, 
    y_train, 
    epochs=10, 
    batch_size=1024, 
    validation_split=0.2, 
    callbacks=[tensorboard_callback]
)


Train on 20000 samples, validate on 5000 samples
---------------------------------------------------------------------------
NotFoundError                             Traceback (most recent call last)
<ipython-input-19-00288825cb15> in <module>
      5     batch_size=1024,
      6     validation_split=0.2,
----> 7     callbacks=[tensorboard_callback]
      8 )

c:\users\mcama\appdata\local\programs\python\python36\lib\site-packages\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
   1237                                         steps_per_epoch=steps_per_epoch,
   1238                                         validation_steps=validation_steps,
-> 1239                                         validation_freq=validation_freq)
   1240 
   1241     def evaluate(self,

c:\users\mcama\appdata\local\programs\python\python36\lib\site-packages\keras\engine\training_arrays.py in fit_loop(model, fit_function, fit_inputs, out_labels, batch_size, epochs, verbose, callbacks, val_function, val_inputs, shuffle, initial_epoch, steps_per_epoch, validation_steps, validation_freq)
    117         callback_metrics += ['val_' + n for n in model.metrics_names]
    118 
--> 119     callbacks.set_model(callback_model)
    120     callbacks.set_params({
    121         'batch_size': batch_size,

c:\users\mcama\appdata\local\programs\python\python36\lib\site-packages\keras\callbacks\callbacks.py in set_model(self, model)
     66         self.model = model
     67         for callback in self.callbacks:
---> 68             callback.set_model(model)
     69 
     70     def _call_batch_hook(self, mode, hook, batch, logs=None):

c:\users\mcama\appdata\local\programs\python\python36\lib\site-packages\keras\callbacks\tensorboard_v2.py in set_model(self, model)
    114         """Sets Keras model and writes graph if specified."""
    115         model.run_eagerly = False
--> 116         super(TensorBoard, self).set_model(model)

~\AppData\Roaming\Python\Python36\site-packages\tensorflow_core\python\keras\callbacks.py in set_model(self, model)
   1496       self._close_writers()
   1497       if self.write_graph:
-> 1498         with self._get_writer(self._train_run_name).as_default():
   1499           with summary_ops_v2.always_record_summaries():
   1500             if not model.run_eagerly:

~\AppData\Roaming\Python\Python36\site-packages\tensorflow_core\python\keras\callbacks.py in _get_writer(self, writer_name)
   1574     if writer_name not in self._writers:
   1575       path = os.path.join(self.log_dir, writer_name)
-> 1576       writer = summary_ops_v2.create_file_writer_v2(path)
   1577       self._writers[writer_name] = writer
   1578     return self._writers[writer_name]

~\AppData\Roaming\Python\Python36\site-packages\tensorflow_core\python\ops\summary_ops_v2.py in create_file_writer_v2(logdir, max_queue, flush_millis, filename_suffix, name)
    389               filename_suffix=filename_suffix),
    390           name=name,
--> 391           v2=True)
    392 
    393 

~\AppData\Roaming\Python\Python36\site-packages\tensorflow_core\python\ops\summary_ops_v2.py in __init__(self, shared_name, init_op_fn, name, v2)
    211     # TODO(nickfelt): cache other constructed ops in graph mode
    212     self._init_op_fn = init_op_fn
--> 213     self._init_op = init_op_fn(self._resource)
    214     self._v2 = v2
    215     self._closed = False

~\AppData\Roaming\Python\Python36\site-packages\tensorflow_core\python\ops\gen_summary_ops.py in create_summary_file_writer(writer, logdir, max_queue, flush_millis, filename_suffix, name)
    190       else:
    191         message = e.message
--> 192       _six.raise_from(_core._status_to_exception(e.code, message), None)
    193   # Add nodes to the TensorFlow graph.
    194   _, _, _op = _op_def_lib._apply_op_helper(

c:\users\mcama\appdata\local\programs\python\python36\lib\site-packages\six.py in raise_from(value, from_value)

NotFoundError: Failed to create a directory: logs/scalars/20200120-145447\train; No such file or directory [Op:CreateSummaryFileWriter]

In [4]:
# from keras import Sequential
# from keras.layers import LSTM, Embedding, Dense
# from keras.utils import multi_gpu_model

# model = Sequential()
# parallel_model = multi_gpu_model(model, gpus=2)

# parallel_model.add(Embedding(max_features, 32))
# parallel_model.add(LSTM(32))
# parallel_model.add(Dense(1, activation='sigmoid'))

# parallel_model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
# history = parallel_model.fit(input_train, y_train, epochs=10, batch_size=128, validation_split=0.2)

In [5]:
import matplotlib.pyplot as plt

acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']

epochs = range(1, len(acc) + 1)

plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()

plt.figure()

plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()

plt.show()



In [ ]: