In [1]:
import numpy as np
import tensorflow as tf
from matplotlib import pyplot as plt
import math
from tqdm import tqdm_notebook
from IPython.display import clear_output
%matplotlib inline
k = 1
a = 0
b = math.pi*2
sigmoid_ammount = 9
n = 400
n_in_set = 32
dim = 1
x_np = np.linspace(a, b, n).reshape(n, 1)
def get_x_set(a,b,n):
return np.random.uniform(a,b + 1e-5, (n,1))
In [ ]:
In [ ]:
In [2]:
def rand_weights(maxval, nrows, ncols):
return 2*maxval*np.random.rand(nrows,ncols) - maxval
def get_nn_expression(dim, nsigms, maxval, sess = 'no'):
x_tf = tf.placeholder(dtype = tf.float64)
W = tf.Variable(rand_weights(maxval, 1, nsigms), dtype = tf.float64)
V = tf.Variable(rand_weights(maxval, dim, nsigms), dtype = tf.float64)
B1 = tf.Variable(rand_weights(maxval, 1, nsigms), dtype = tf.float64)
B2 = tf.Variable(rand_weights(maxval, 1, 1), dtype = tf.float64)
sigmoids_matrix = tf.sigmoid(tf.matmul(x_tf, V) + B1)
approximator = tf.matmul(sigmoids_matrix, tf.transpose(W)) + B2
if (sess != 'no'):
sess.run(tf.global_variables_initializer())
weights = [W, V, B1, B2]
return x_tf, approximator, weights
def plot_nn(x):
y = sess.run(nn, {x_tf : x})
fig = plt.figure(figsize = (12,8))
plt.grid(which='both', axis='both')
plt.title('Neural net output', fontsize=20)
plt.xlabel('x', fontsize=20)
plt.ylabel('N=N(x,w)', fontsize=20)
plt.plot(x, y)
In [3]:
sess = tf.Session()
x_tf, nn, W = get_nn_expression(dim, sigmoid_ammount, 5, sess)
plot_nn(x_np)
In [4]:
#f = tf.cos(k*tf.sqrt(tf.reduce_sum(tf.square(x_tf), axis = -1)))
f = tf.cos(k*x_tf)
J = (tf.reduce_sum(tf.square(f - nn)))*(1/n)
alpha = tf.Variable(1e-3, dtype = tf.float64)
#Gradients
grads_weights = tf.gradients(J, W)
updates = []
i = 0
for item in W:
updates.append(item.assign_sub(alpha*grads_weights[i]))
i+=1
In [5]:
def grad_descent(N, learn_rate):
N = int(N)
print(N)
Err = []
I = []
sess.run(tf.assign(alpha, learn_rate))
for i in range(N):
I.append(i)
_, j = sess.run([updates, J], {x_tf: get_x_set(a,b,n_in_set)})
Err.append(1/j)
if (i % 10000 == 0):
clear_output()
print('Current J(w) value: ', j, '; ', i*100.0/N, '%')
print('Res J: ', j)
return I, Err
In [6]:
iters = 1e6
alpha_1 = 1e-4
I, J_inv = grad_descent(iters, alpha_1)
fig = plt.figure(figsize = (12,8))
plt.grid(which='both', axis='both')
plt.title('Inverted value of the cost function', fontsize=20)
plt.xlabel('Number of iterations', fontsize=20)
plt.ylabel('1/J', fontsize=20)
plt.plot(I, J_inv)
Out[6]:
In [7]:
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.unicode'] = True
matplotlib.rcParams['text.latex.preamble'] = [r'\usepackage{amsmath}']
nn_val = sess.run(nn, {x_tf : x_np})
f_val = np.cos(x_np)
fig = plt.figure(figsize = (12,8))
plt.title(r'Neral net approximation of $\cos(x)$', fontsize=25)
plt.grid(True)
plt.xlabel('x', fontsize=25)
plt.plot(x_np, nn_val, 'g', label= 'Neural net solution')
plt.plot(x_np, f_val, 'b', label = r'$\cos(x)$')
plt.plot(x_np, f_val - nn_val, 'r--', label = 'Error')
plt.legend(loc=1, prop={'size': 20})