In [32]:
# Необходмые команды импорта.
import sys
sys.path.append('../physlearn/')
sys.path.append('../source')
import numpy as np
from numpy import linalg as LA
import tensorflow as tf
from matplotlib import pylab as plt
import numpy.random as rand
from physlearn.NeuralNet.NeuralNet import NeuralNet
from physlearn.Optimizer.NelderMead.NelderMead import NelderMead
import math_util
import math

from mpl_toolkits.mplot3d.axes3d import Axes3D
%matplotlib notebook

def make_data(a,b,m):
    x = np.linspace(a, b, m)
    y = np.linspace(a, b, m)
    x_list = []
    y_list = []
    for cur_x in x:
        for cur_y in y:
            x_list.append(cur_x)
            y_list.append(cur_y)
    x_train = np.array([x_list, y_list])
    xgrid, ygrid = np.meshgrid(x, y)
    return x_train, xgrid, ygrid

a = -1.5*math.pi
b = 1.5*math.pi
k = 1
sigmoid_ammount = 25
m_1 = 10
space_dim = 2
iterations = int(5e5)
max_eps = 1e-12

xy, xgrid, ygrid = make_data(a,b,m_1)
m = xy[0].size

In [33]:
net = NeuralNet(-2,2)
net.add_input_layer(space_dim)
net.add(sigmoid_ammount, tf.sigmoid)
net.add_output_layer(1, net.linear)
net.compile()
net.set_random_matrixes()
net_out = net.return_graph()
sess = net.return_session()
dim = net.return_unroll_dim()

f = tf.cos(k*tf.sqrt(tf.reduce_sum(tf.square(net.x), axis = 0)))
J = (tf.reduce_sum(tf.square(f - tf.reduce_sum(net_out, axis = 0))))*(1/m) 

#f = tf.cos(k*x_tf)

def COST(params):
    net.roll_matrixes(params)
    res = net.calc(J, {net.x:xy})
    return res


opt_nm = NelderMead(-2.5,2.5, progress_bar='tqdm')
opt_nm.set_epsilon_and_sd(0.3, 100)

def opt(J, dim, n_it, eps):
    optimisation_result = opt_nm.optimize(J, dim, n_it, eps)
    return optimisation_result

In [34]:
NN_grid = net.run(xy).reshape((m_1, m_1))
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(xgrid, ygrid, NN_grid)


Out[34]:
<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x2028c8f6278>

In [35]:
optimisation_result = opt(COST, dim, iterations, max_eps)
print("J after optimisation: ", COST(optimisation_result.x))
print("Информация: ", optimisation_result)


100%|████████████████████████████████████████████████████████████████████████| 500000/500000 [05:34<00:00, 1496.03it/s]
J after optimisation:  0.008105981081380203
Информация:  Is converge: False
Amount of iterations: 500000
Total time: 334.22 s
Reached function value: 0.008105981081380203
Reason of break: Maximum iterations reached


In [36]:
NN_grid = net.run(xy).reshape((m_1, m_1))
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(xgrid, ygrid, NN_grid)


Out[36]:
<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x20287ca4240>

In [37]:
f_true = net.calc(f, {net.x:xy})
f_true = net.calc(f, {net.x:xy}).reshape((m_1, m_1))
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(xgrid, ygrid, f_true)


Out[37]:
<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x2028917f3c8>

In [38]:
error = NN_grid - f_true
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(xgrid, ygrid, error)


Out[38]:
<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x20287fe6be0>

In [39]:
J_fin = optimisation_result.cost_function
mse = math_util.MSE(error)
std_err = math_util.std_err(error)
print(J_fin, ' ', mse, ' ', std_err)


0.008105981081380203   0.008105981081380203   0.09048679283830222

In [40]:
COST(optimisation_result.x)


Out[40]:
0.008105981081380203