In [20]:
import keras
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D

import numpy as np
import os
import wandb
from wandb.keras import WandbCallback
import matplotlib.pyplot as plt

In [44]:
wandb.init(project="cifar")
config = wandb.config
config.dropout = 0.25
config.dense_layer_nodes = 100
config.learn_rate = 0.08
config.batch_size = 256
config.epochs = 50

class_names = ['airplane','automobile','bird','cat','deer',
               'dog','frog','horse','ship','truck']
num_classes = len(class_names)

(X_train, y_train), (X_test, y_test) = cifar10.load_data()


W&B Run: https://app.wandb.ai/l2k2/cifar/runs/swfj8tg9
Call `%%wandb` in the cell containing your training loop to display live results.

In [22]:
plt.imshow(X_train[10])


Out[22]:
<matplotlib.image.AxesImage at 0x7f18f49893c8>

In [45]:
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

In [60]:
X_train = X_train.astype('float32') / 255.
X_test = X_test.astype('float32') / 255.

In [62]:
model = Sequential()
model.add(Conv2D(32, (3, 3), padding='same',
                 input_shape=X_train.shape[1:], activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(config.dropout))

model.add(Flatten())
model.add(Dense(config.dense_layer_nodes, activation='relu'))
model.add(Dropout(config.dropout))
model.add(Dense(num_classes, activation='softmax'))

In [63]:
model.compile(loss='categorical_crossentropy',
              optimizer="adam",
              metrics=['accuracy'])

In [58]:
wandb.init()
model.fit(X_train,y_train,batch_size=config.batch_size,
                        epochs=10,
                        validation_data=(X_test, y_test),
                        callbacks=[WandbCallback()]
)


W&B Run: https://app.wandb.ai/l2k2/cifar/runs/1iangnps
Call `%%wandb` in the cell containing your training loop to display live results.
Train on 50000 samples, validate on 10000 samples
Epoch 1/10
41984/50000 [========================>.....] - ETA: 0s - loss: 14.5041 - acc: 0.1001
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-58-4fa842e9ddb1> in <module>
      3                         epochs=10,
      4                         validation_data=(X_test, y_test),
----> 5                         callbacks=[WandbCallback()]
      6 )

/usr/local/lib/python3.6/dist-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)
   1037                                         initial_epoch=initial_epoch,
   1038                                         steps_per_epoch=steps_per_epoch,
-> 1039                                         validation_steps=validation_steps)
   1040 
   1041     def evaluate(self, x=None, y=None,

/usr/local/lib/python3.6/dist-packages/keras/engine/training_arrays.py in fit_loop(model, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)
    197                     ins_batch[i] = ins_batch[i].toarray()
    198 
--> 199                 outs = f(ins_batch)
    200                 outs = to_list(outs)
    201                 for l, o in zip(out_labels, outs):

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in __call__(self, inputs)
   2713                 return self._legacy_call(inputs)
   2714 
-> 2715             return self._call(inputs)
   2716         else:
   2717             if py_any(is_tensor(x) for x in inputs):

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in _call(self, inputs)
   2673             fetched = self._callable_fn(*array_vals, run_metadata=self.run_metadata)
   2674         else:
-> 2675             fetched = self._callable_fn(*array_vals)
   2676         return fetched[:len(self.outputs)]
   2677 

/usr/local/lib/python3.6/dist-packages/tensorflow/python/client/session.py in __call__(self, *args, **kwargs)
   1397           ret = tf_session.TF_SessionRunCallable(
   1398               self._session._session, self._handle, args, status,
-> 1399               run_metadata_ptr)
   1400         if run_metadata:
   1401           proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

KeyboardInterrupt: 

In [54]:
datagen = ImageDataGenerator()
datagen.fit(X_train)

In [64]:
wandb.init()
model.fit_generator(datagen.flow(X_train, y_train, batch_size=config.batch_size),
                        steps_per_epoch=X_train.shape[0] // config.batch_size,
                        epochs=config.epochs,
                        validation_data=(X_test, y_test),
                        callbacks=[WandbCallback()]
)


W&B Run: https://app.wandb.ai/l2k2/cifar/runs/ttmv2et0
Call `%%wandb` in the cell containing your training loop to display live results.
Epoch 1/50
195/195 [==============================] - 4s 22ms/step - loss: 1.7466 - acc: 0.3694 - val_loss: 1.4266 - val_acc: 0.5002
Epoch 2/50
195/195 [==============================] - 3s 18ms/step - loss: 1.4290 - acc: 0.4907 - val_loss: 1.2862 - val_acc: 0.5508
Epoch 3/50
195/195 [==============================] - 3s 17ms/step - loss: 1.3255 - acc: 0.5274 - val_loss: 1.2441 - val_acc: 0.5552
Epoch 4/50
195/195 [==============================] - 3s 18ms/step - loss: 1.2632 - acc: 0.5495 - val_loss: 1.1661 - val_acc: 0.5911
Epoch 5/50
195/195 [==============================] - 3s 17ms/step - loss: 1.2098 - acc: 0.5684 - val_loss: 1.1425 - val_acc: 0.5955
Epoch 6/50
195/195 [==============================] - 3s 17ms/step - loss: 1.1777 - acc: 0.5800 - val_loss: 1.1234 - val_acc: 0.6016
Epoch 7/50
195/195 [==============================] - 3s 17ms/step - loss: 1.1357 - acc: 0.5948 - val_loss: 1.1244 - val_acc: 0.6012
Epoch 8/50
195/195 [==============================] - 3s 17ms/step - loss: 1.1116 - acc: 0.6030 - val_loss: 1.0755 - val_acc: 0.6265
Epoch 9/50
195/195 [==============================] - 3s 18ms/step - loss: 1.0836 - acc: 0.6140 - val_loss: 1.0408 - val_acc: 0.6315
Epoch 10/50
195/195 [==============================] - 3s 17ms/step - loss: 1.0663 - acc: 0.6224 - val_loss: 1.0404 - val_acc: 0.6367
Epoch 11/50
195/195 [==============================] - 3s 17ms/step - loss: 1.0481 - acc: 0.6263 - val_loss: 1.0312 - val_acc: 0.6403
Epoch 12/50
195/195 [==============================] - 3s 17ms/step - loss: 1.0154 - acc: 0.6388 - val_loss: 1.0163 - val_acc: 0.6375
Epoch 13/50
195/195 [==============================] - 3s 17ms/step - loss: 0.9962 - acc: 0.6449 - val_loss: 1.0065 - val_acc: 0.6457
Epoch 14/50
195/195 [==============================] - 4s 18ms/step - loss: 0.9798 - acc: 0.6500 - val_loss: 1.0053 - val_acc: 0.6460
Epoch 15/50
195/195 [==============================] - 3s 18ms/step - loss: 0.9743 - acc: 0.6540 - val_loss: 0.9830 - val_acc: 0.6546
Epoch 16/50
195/195 [==============================] - 3s 18ms/step - loss: 0.9507 - acc: 0.6588 - val_loss: 1.0070 - val_acc: 0.6395
Epoch 17/50
195/195 [==============================] - 3s 18ms/step - loss: 0.9458 - acc: 0.6613 - val_loss: 0.9766 - val_acc: 0.6567
Epoch 18/50
195/195 [==============================] - 4s 18ms/step - loss: 0.9214 - acc: 0.6697 - val_loss: 0.9626 - val_acc: 0.6630
Epoch 19/50
195/195 [==============================] - 3s 18ms/step - loss: 0.9122 - acc: 0.6772 - val_loss: 0.9596 - val_acc: 0.6625
Epoch 20/50
195/195 [==============================] - 3s 17ms/step - loss: 0.9073 - acc: 0.6739 - val_loss: 0.9812 - val_acc: 0.6543
Epoch 21/50
195/195 [==============================] - 3s 17ms/step - loss: 0.8874 - acc: 0.6823 - val_loss: 0.9688 - val_acc: 0.6629
Epoch 22/50
195/195 [==============================] - 3s 17ms/step - loss: 0.8755 - acc: 0.6893 - val_loss: 0.9704 - val_acc: 0.6570
Epoch 23/50
195/195 [==============================] - 3s 18ms/step - loss: 0.8568 - acc: 0.6952 - val_loss: 0.9970 - val_acc: 0.6525
Epoch 24/50
195/195 [==============================] - 3s 18ms/step - loss: 0.8484 - acc: 0.6968 - val_loss: 0.9792 - val_acc: 0.6565
Epoch 25/50
195/195 [==============================] - 3s 17ms/step - loss: 0.8400 - acc: 0.7015 - val_loss: 0.9684 - val_acc: 0.6647
Epoch 26/50
195/195 [==============================] - 3s 17ms/step - loss: 0.8340 - acc: 0.7016 - val_loss: 0.9556 - val_acc: 0.6619
Epoch 27/50
195/195 [==============================] - 3s 16ms/step - loss: 0.8222 - acc: 0.7035 - val_loss: 0.9479 - val_acc: 0.6706
Epoch 28/50
195/195 [==============================] - 3s 17ms/step - loss: 0.8132 - acc: 0.7087 - val_loss: 0.9512 - val_acc: 0.6710
Epoch 29/50
195/195 [==============================] - 3s 17ms/step - loss: 0.8041 - acc: 0.7099 - val_loss: 0.9546 - val_acc: 0.6711
Epoch 30/50
195/195 [==============================] - 3s 16ms/step - loss: 0.7969 - acc: 0.7142 - val_loss: 0.9447 - val_acc: 0.6724
Epoch 31/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7842 - acc: 0.7170 - val_loss: 1.0046 - val_acc: 0.6560
Epoch 32/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7933 - acc: 0.7160 - val_loss: 0.9762 - val_acc: 0.6594
Epoch 33/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7717 - acc: 0.7225 - val_loss: 0.9661 - val_acc: 0.6697
Epoch 34/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7638 - acc: 0.7248 - val_loss: 0.9567 - val_acc: 0.6737
Epoch 35/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7560 - acc: 0.7234 - val_loss: 0.9898 - val_acc: 0.6602
Epoch 36/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7489 - acc: 0.7293 - val_loss: 0.9654 - val_acc: 0.6684
Epoch 37/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7462 - acc: 0.7285 - val_loss: 0.9508 - val_acc: 0.6713
Epoch 38/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7364 - acc: 0.7324 - val_loss: 0.9599 - val_acc: 0.6682
Epoch 39/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7245 - acc: 0.7385 - val_loss: 0.9781 - val_acc: 0.6616
Epoch 40/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7239 - acc: 0.7361 - val_loss: 0.9493 - val_acc: 0.6766
Epoch 41/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7065 - acc: 0.7435 - val_loss: 0.9581 - val_acc: 0.6720
Epoch 42/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7043 - acc: 0.7448 - val_loss: 0.9820 - val_acc: 0.6713
Epoch 43/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7043 - acc: 0.7426 - val_loss: 0.9995 - val_acc: 0.6645
Epoch 44/50
195/195 [==============================] - 3s 17ms/step - loss: 0.7063 - acc: 0.7418 - val_loss: 0.9682 - val_acc: 0.6693
Epoch 45/50
195/195 [==============================] - 3s 17ms/step - loss: 0.6937 - acc: 0.7451 - val_loss: 1.0024 - val_acc: 0.6666
Epoch 46/50
195/195 [==============================] - 3s 17ms/step - loss: 0.6950 - acc: 0.7441 - val_loss: 0.9858 - val_acc: 0.6715
Epoch 47/50
195/195 [==============================] - 3s 17ms/step - loss: 0.6841 - acc: 0.7514 - val_loss: 0.9757 - val_acc: 0.6730
Epoch 48/50
195/195 [==============================] - 3s 17ms/step - loss: 0.6798 - acc: 0.7506 - val_loss: 0.9863 - val_acc: 0.6691
Epoch 49/50
195/195 [==============================] - 3s 17ms/step - loss: 0.6793 - acc: 0.7521 - val_loss: 0.9945 - val_acc: 0.6681
Epoch 50/50
195/195 [==============================] - 3s 17ms/step - loss: 0.6705 - acc: 0.7561 - val_loss: 0.9881 - val_acc: 0.6748
Out[64]:
<keras.callbacks.History at 0x7f186557fc50>

In [43]:



Out[43]:
array([[[1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        ...,
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.]],

       [[1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        ...,
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [0., 1., 0., ..., 0., 0., 0.]],

       [[1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        ...,
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [0., 1., 0., ..., 0., 0., 0.]],

       ...,

       [[1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        ...,
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [0., 1., 0., ..., 0., 0., 0.]],

       [[1., 0., 0., ..., 0., 0., 0.],
        [0., 1., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        ...,
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.]],

       [[1., 0., 0., ..., 0., 0., 0.],
        [0., 1., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        ...,
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.],
        [1., 0., 0., ..., 0., 0., 0.]]], dtype=float32)

In [41]:


In [42]:



---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-42-8088862005ba> in <module>
----> 1 e[0]

TypeError: 'method' object is not subscriptable

In [ ]: