In [61]:
# Необходмые команды импорта.
import sys
sys.path.append('../physlearn/')
sys.path.append('../source')
import numpy as np
from numpy import linalg as LA
import tensorflow as tf
from matplotlib import pylab as plt
import numpy.random as rand
from physlearn.NeuralNet.NeuralNet import NeuralNet
from physlearn.Optimizer.NelderMead.NelderMead import NelderMead
import math_util
import math

from mpl_toolkits.mplot3d.axes3d import Axes3D
%matplotlib notebook


def grid_vect(a, b, N):
    x = np.linspace(a, b, N, endpoint=True)
    h = x[1] - x[0]
    return np.mgrid[a:b+h:h, a:b+h:h].reshape(2,-1).T

def get_meshgrid(a,b,N):
    x = np.linspace(a, b, N, endpoint=True)
    return np.meshgrid(x,x)


a = -2*math.pi
b = 2*math.pi
k = 1
sigmoid_ammount = 35
m_1 = 75
space_dim = 2
iterations = int(5e5)
max_eps = 1e-10

xy = grid_vect(a, b, m_1)
x_np = np.transpose(xy)

m = xy[:,0].size

net = NeuralNet(-5,5)
net.add_input_layer(space_dim)
net.add(sigmoid_ammount, tf.sigmoid)
net.add_output_layer(1, net.linear)
net.compile()
net.set_random_matrixes()
net_out = net.return_graph()
sess = net.return_session()
dim = net.return_unroll_dim()

f = tf.cos(k*tf.sqrt(tf.reduce_sum(tf.square(net.x), axis = 0)))
J = (tf.reduce_sum(tf.square(f - tf.reduce_sum(net_out, axis = 0))))*(1/m) 

#f = tf.cos(k*x_tf)

def COST(params):
    net.roll_matrixes(params)
    res = net.calc(J, {net.x:x_np})
    return res


opt_nm = NelderMead(-2.5,2.5, progress_bar='tqdm')
opt_nm.set_epsilon_and_sd(0.3, 100)

def opt(J, dim, n_it, eps):
    optimisation_result = opt_nm.optimize(J, dim, n_it, eps)
    return optimisation_result

In [62]:
nn_val = net.run(x_np)
nn_val = nn_val.reshape(1,nn_val.size)
print(nn_val.shape)
fig = plt.figure(figsize=(14,6))
ax = fig.add_subplot(1, 2, 1, projection='3d')
ax.plot_wireframe(xy[:,0], xy[:,1], nn_val)


(1, 5625)
Out[62]:
<mpl_toolkits.mplot3d.art3d.Line3DCollection at 0x1bdc1254f28>

In [63]:
optimisation_result = opt(COST, dim, iterations, max_eps)
print("J after optimisation: ", COST(optimisation_result.x))
print("Информация: ", optimisation_result)


100%|█████████████████████████████████████████████████████████████████████████| 500000/500000 [19:09<00:00, 434.95it/s]
J after optimisation:  0.04660165995258442
Информация:  Is converge: False
Amount of iterations: 500000
Total time: 1149.57 s
Reached function value: 0.04660165995258442
Reason of break: Maximum iterations reached


In [64]:
nn_val = net.run(x_np)
nn_val = nn_val.reshape(1,nn_val.size)
print(nn_val.shape)
fig = plt.figure(figsize=(14,6))
ax = fig.add_subplot(1, 2, 1, projection='3d')
ax.plot_wireframe(xy[:,0], xy[:,1], nn_val)


(1, 5625)
Out[64]:
<mpl_toolkits.mplot3d.art3d.Line3DCollection at 0x1bdbcbf3160>

In [65]:
f_true = net.calc(f, {net.x:x_np})
f_true = f_true.reshape(1,f_true.size)
print(f_true.shape)
fig = plt.figure(figsize=(14,6))
ax = fig.add_subplot(1, 2, 1, projection='3d')
ax.plot_wireframe(xy[:,0], xy[:,1], f_true)


(1, 5625)
Out[65]:
<mpl_toolkits.mplot3d.art3d.Line3DCollection at 0x1bdc2105fd0>

In [66]:
error = nn_val - f_true
print(error.shape)
fig = plt.figure(figsize=(14,6))
ax = fig.add_subplot(1, 2, 1, projection='3d')
ax.plot_wireframe(xy[:,0], xy[:,1], error)


(1, 5625)
Out[66]:
<mpl_toolkits.mplot3d.art3d.Line3DCollection at 0x1bdbccb1a58>

In [67]:
J_fin = optimisation_result.cost_function
mse = math_util.MSE(error)
std_err = math_util.std_err(error)
print(J_fin, ' ', mse, ' ', std_err)


0.04660165995258442   0.04660165995258441   0.21589336758047017

In [68]:
COST(optimisation_result.x)


Out[68]:
0.04660165995258442