In [0]:
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.optimizers import SGD
import tensorflow as tf
import matplotlib.pyplot as plt
from numpy.random import seed
from tensorflow import set_random_seed
seed(1)
set_random_seed(2)
x = np.random.uniform(low=0,high=360,size=10000)
y = 1+np.sin(np.deg2rad(x))
model = Sequential()
model.add(Dense(4, input_shape=(1,), kernel_initializer='uniform', activation='relu'))
model.add(Dense(60,kernel_initializer='uniform', activation='relu'))
## CHANGING THE ACTIVATION TO ANYTHING OTHER THAN linear CAUSES THE MODEL TO NOT CONVERGE; WHY?
model.add(Dense(1, kernel_initializer='uniform', activation='linear'))
model.compile(loss='mean_squared_error', optimizer='adam')
history = model.fit(x,y, epochs=100, batch_size=32, verbose=0)
plt.plot(history.history['loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.show()
# print model.summary()
loss_and_metrics = model.evaluate(x, y)
print loss_and_metrics
y1 = model.predict(x)
plt.scatter(x, y,label='test data')
plt.scatter(x, y1,label="predicted")
plt.legend()
plt.show()