In [1]:
import tensorflow as tf
import numpy as np
import math
from matplotlib import pyplot as plt
from tqdm import tqdm_notebook
from IPython.display import clear_output
%matplotlib inline
m = 60
tTr = np.linspace(0, 3*math.pi, m)
om2 = tf.constant(1.0)
In [2]:
sess = tf.Session()
In [3]:
#Params
time = tf.placeholder(tf.float32)
A = tf.get_variable("A", initializer = tf.random_uniform(shape=[], minval = -1, maxval = 1))
B = tf.get_variable("B", initializer = tf.random_uniform(shape=[], minval = -1, maxval = 1))
C = tf.get_variable("C", initializer = tf.random_uniform(shape=[], minval = -1, maxval = 1))
#A = tf.get_variable("A", initializer = -1.0)
#B = tf.get_variable("B", initializer = -1.0)
#C = tf.get_variable("C", initializer = 0.0)
In [4]:
init = tf.global_variables_initializer()
sess.run(init)
In [5]:
#FORWARD
approximator = A * tf.sin(B*time + C)
#COST
alpha = tf.get_variable("Learning_Rate", initializer = 0.01)
v0 = tf.constant(1.0)
num = tf.constant(m, dtype = tf.float32)
t_0 = tf.constant(0.0)
vel = tf.gradients(approximator, time)
acc = tf.gradients(vel, time)
x0_app = A * tf.sin(B*t_0 + C)
v0_app = A * B * tf.cos(B*t_0 + C)
eq = acc + tf.multiply(approximator, om2)
J = tf.square(tf.reduce_sum(eq))/num + tf.square(x0_app) + tf.square(v0_app - v0)
Ag = tf.gradients(J, A)
Bg = tf.gradients(J, B)
Cg = tf.gradients(J, C)
update_A = tf.assign_sub(A, alpha*tf.reshape(Ag,[]))
update_B = tf.assign_sub(B, alpha*tf.reshape(Bg,[]))
update_C = tf.assign_sub(C, alpha*tf.reshape(Cg,[]))
In [6]:
Err = []
I = []
def grad_descent(N, learn_rate):
sess.run(tf.assign(alpha, learn_rate))
for i in tqdm_notebook(range(N)):
I.append(i)
j = sess.run(J, {time: tTr})
Err.append(j)
sess.run(update_A, {time: tTr})
sess.run(update_B, {time: tTr})
sess.run(update_C, {time: tTr})
if i%400==0:
clear_output()
print(j)
def showGrad():
grads = []
grads.append(sess.run(Ag, {time: tTr}))
grads.append(sess.run(Bg, {time: tTr}))
grads.append(sess.run(Cg, {time: tTr}))
return grads
In [7]:
grad_descent(2000, 0.05)
In [8]:
print(showGrad())
sess.run(tf.square(v0_app-v0), {time:tTr})
#print(sess.run(approximator, {time:1}))
Out[8]:
In [9]:
r=np.sin(tTr)
result = sess.run(approximator, {time: tTr})
plt.plot(tTr, result, 'red')
plt.plot(tTr, r, 'green')
plt.plot(tTr, result - r)
Out[9]:
In [10]:
'''
true_init = [tf.assign(A, 1),
tf.assign(B, 1),
tf.assign(C, 0)]
for assignment in true_init:
sess.run(assignment)
x_res = sess.run(approximator, {time: tTr})
acc_res = sess.run(acc, {time: tTr})
acc_th = -A*B*B*tf.sin(B*time+C)
plt.plot(tTr, x_res)
plt.plot(tTr, acc_res[0])
plt.plot(tTr, sess.run(acc_th, {time: tTr}) - acc_res[0])
'''
print(1)