In [1]:
# Imports
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.utils import np_utils
from sklearn.cross_validation import train_test_split
import matplotlib.pyplot as plt
%matplotlib inline
In [2]:
N = 10000
batch_size = N//100
nb_epoch = 15
nb_dense = 512
nb_hidden = 1 # The number of hidden layers to use
p_dropout = 0.5
In [3]:
# Make data: y = 3x - x^2
x = np.random.uniform(-5, 5, N)
X = np.column_stack((x, x**2))
y = 3*X[:, 0] - X[:, 1] + np.random.normal(loc=0, scale=1, size=x.size)
plt.scatter(x, y, alpha=0.1)
Y = y[:, np.newaxis]
X_train, X_test, Y_train, Y_test = train_test_split(X, Y)
print(X_train.shape, X_test.shape, Y_train.shape, Y_test.shape)
In [4]:
print('Building model...')
model = Sequential()
model.add(Dense(nb_dense, input_shape=(X.shape[1],)))
model.add(Activation('linear'))
model.add(Dropout(p_dropout))
for _ in range(nb_hidden-1):
model.add(Dense(nb_dense))
model.add(Activation('linear'))
model.add(Dropout(p_dropout))
model.add(Dense(1))
model.add(Activation('linear'))
model.compile(loss='mean_squared_error',
optimizer='adam',
metrics=['accuracy'])
In [11]:
import time
t1 = time.time()
result = model.fit(X, Y,
nb_epoch=nb_epoch, batch_size=batch_size,
verbose=1, validation_split=0.1)
t2 = time.time()
print('Model training took {:.2g} minutes'.format((t2-t1)/60))
In [16]:
xplot = np.linspace(np.min(X[:, 0]), np.max(X[:, 0]), 100)
Xplot = np.column_stack((xplot, xplot**2))
Yplot = model.predict(Xplot)
plt.scatter(x, y, alpha=0.03)
plt.plot(xplot, Yplot[:, 0], 'r--', lw=3)
plt.plot(xplot, 3*xplot - xplot**2, 'g-.', lw=2)
Out[16]:
In [10]:
Out[10]:
In [ ]: