In [1]:
from keras.datasets import imdb
(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000)
In [2]:
train_data[0]
Out[2]:
In [3]:
train_labels[0]
Out[3]:
In [4]:
max([max(sequence) for sequence in train_data])
Out[4]:
In [5]:
word_index = imdb.get_word_index()
In [6]:
reverse_word_index = dict([(value, key) for (key, value) in word_index.items()])
In [7]:
decoded_review = ' '.join([reverse_word_index.get(i - 3, '?') for i in train_data[0]])
In [8]:
import numpy as np
def vectorize_sequences(sequences, dimension=10000):
results = np.zeros((len(sequences), dimension))
for i, sequence in enumerate(sequences):
results[i, sequence] = 1
return results
x_train = vectorize_sequences(train_data)
x_test = vectorize_sequences(test_data)
In [9]:
x_train[0]
Out[9]:
In [10]:
y_train = np.asarray(train_labels).astype('float32')
y_test = np.asarray(test_labels).astype('float32')
In [11]:
y_train
Out[11]:
In [12]:
from keras import models
from keras import layers
In [25]:
model = models.Sequential()
In [26]:
model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))
model.add(layers.Dense(16, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
In [27]:
from keras import losses, metrics, optimizers
In [28]:
model.compile(optimizer=optimizers.RMSprop(lr=0.001),
loss=losses.binary_crossentropy,
metrics=[metrics.binary_accuracy])
In [29]:
x_val = x_train[:10000]
partial_x_train = x_train[10000:]
y_val = y_train[:10000]
partial_y_train = y_train[10000:]
In [30]:
history = model.fit(partial_x_train, partial_y_train,
epochs=20, batch_size=512,
validation_data=(x_val, y_val))
In [31]:
history_dict = history.history
In [32]:
history_dict.keys()
Out[32]:
In [33]:
import matplotlib.pyplot as plt
history_dict = history.history
acc = history.history['binary_accuracy']
loss_values = history_dict['loss']
val_loss_values = history_dict['val_loss']
epochs = range(1, len(acc) + 1)
plt.plot(epochs, loss_values, 'bo', label='Training loss')
plt.plot(epochs, val_loss_values, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
In [35]:
acc = history_dict['binary_accuracy']
val_acc = history_dict['val_binary_accuracy']
epochs = range(1, len(acc) + 1)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation acc')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
In [36]:
results = model.evaluate(x_test, y_test)
In [37]:
results
Out[37]:
In [38]:
model.predict(x_test)
Out[38]:
In [ ]: