In [ ]:
import numpy as np
import seaborn as sns
import pandas as pd
from matplotlib import pyplot as plt, animation

from time import sleep

%matplotlib notebook

sns.set_context("paper")

In [ ]:
import tensorflow as tf

In [ ]:
with tf.device('/gpu:0'):
  a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')
  b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')
c = tf.matmul(a, b)
# Creates a session with log_device_placement set to True.
sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))
# Runs the op.
print(sess.run(c))

Unit Models

Exploration of model for basic constituent units of a neural network.

Perceptron

Starting with the perceptron, an early version model based on binary inputs and step function.


In [ ]:
# weights
W = np.array([-2,-2])
# bias
b = 3
# threshold. Can be discarded using the bias instead (bias=-threshold)
#threshold = 3

In [ ]:
# perceptron firing rule
perceptron = lambda x : 1 if np.dot(X, W) + b >0 else 0

In [ ]:
# input array
X = np.array([1,1])
# compute perceptron output
perceptron(X)

Sigmoid Neuron

Allowing real input values (range 0-1) and using non linear activation function.


In [ ]:
# neuron firing rule
neuron = lambda x : 1/(1 + np.exp(-np.dot(X, W) - b))

In [ ]:
# input array
X = np.array([1,1])
# compute neuron output
neuron(X)

Neural Network for Linear Regression

Solve a line-fitting problem using a vanilla keras neural network

Setup Data


In [ ]:
# line function
def line(intercept, slope, x):
    return x*slope + intercept

# sin modulated line function
def sin_line(x):
    return np.sin(x)

In [ ]:
# create our random data (line)
n = 1000
slope = 1.5
intercept = 5.
x = np.random.random(n)
y = line(intercept, slope, x)

In [ ]:
# create our random data (sin line)
n = 1000
x_data = np.linspace(-10., 10., n)
y_data = sin_line(x_data) + np.random.uniform(-0.5, 0.5, n)

In [ ]:
# plot data
sns.regplot(x_data, y_data)
sns.plt.show()

Train with TensorFlow


In [ ]:
import tensorflow as tf

In [ ]:
# Network parameters
X = tf.placeholder(tf.float32, name='X')
y = tf.placeholder(tf.float32, name='Y')
W = tf.Variable(tf.random_normal([1], dtype=tf.float32, stddev=0.1), name='weight')
b = tf.Variable(tf.constant([0], dtype=tf.float32), name='bias')

# computation
y_pred = W*X+b

In [ ]:
# cost definition
def cost_fun(y, y_pred):
    return tf.abs(y-y_pred)

cost = tf.reduce_mean(cost_fun(y, y_pred))

In [ ]:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost)

In [ ]:
n_iters = 10000

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    
    for i in range(n_iters):
        sess.run(optimizer, feed_dict={X: x_data, y: y_data})
        training_cost = sess.run(cost, feed_dict={X: x_data, y: y_data})

        if i%100 == 0:
            print(training_cost)
    
    ys_pred = y_pred.eval(feed_dict={X: x_data}, session=sess)

In [ ]:
fig, ax = plt.subplots(1, 1)
sns.regplot(x_data, y_data, fit_reg=False, ax=ax)
sns.regplot(x_data, ys_pred, fit_reg=False, ax=ax)
plt.show()

Train with Keras


In [ ]:
from keras import models
from keras import layers

In [ ]:
# Create neural network
nn = models.Sequential()
nn.add(layers.Dense(1, input_dim=1))
nn.compile(optimizer='sgd', loss='mse')

In [ ]:
# train model
# dummy way of training, for the sake of retrieving our weights at each single training step
theta_history = [] #weights
loss_history = []
for i in range(1000):
    loss_history.append(nn.fit(x, y, nb_epoch=1, verbose=0).history['loss'][0])
    theta_history.append((nn.layers[0].get_weights()[0][0][0], nn.layers[0].get_weights()[1][0]))

In [ ]:
# Plot SGD animation
from matplotlib import pyplot as plt, animation
fig = sns.plt.figure(dpi=100, figsize=(5, 4))
# original data
sns.regplot(x, y, fit_reg=False)
# initial parameters
init_slope, init_intercept = theta_history[0]
line, = plt.plot([0, 1.0], [init_intercept, line(init_intercept, init_slope, 1.0)], 'k-')
epoch_text = sns.plt.text(0, 0, "Epoch 0")
sns.plt.show()

def animate(i):
    current_slope, current_intercept = theta_history[i]
    line.set_ydata([current_intercept, line(current_intercept, current_slope, 1.0)])
    epoch_text.set_text("Epoch {}, cost {:.3f}".format(i, loss_history[i]))
    return line,

ani = animation.FuncAnimation(fig, animate, np.arange(0, len(theta_history)), interval=10)

In [ ]: