In [ ]:
# Необходмые команды импорта.
import sys
sys.path.append('../physlearn/')
sys.path.append('../source')
import numpy as np
from numpy import linalg as LA
import tensorflow as tf
from matplotlib import pylab as plt
import numpy.random as rand
from physlearn.NeuralNet.NeuralNet import NeuralNet
from physlearn.Optimizer.NelderMead.NelderMead import NelderMead
import math_util
import math
from mpl_toolkits.mplot3d.axes3d import Axes3D
%matplotlib notebook
def make_all_variants(A):
dim = A.shape[0]
m = A.shape[1]
res = np.zeros((dim, m ** dim))
for i in range(dim):
for index, item in enumerate(A[i]):
for j in range(i + 2):
left_break = (m ** (dim - i - 1)) * index + j * m ** (dim - i)
right_break = (m ** (dim - i - 1)) * (index + 1) + j * m ** (dim - i)
res[i, left_break:right_break] = item
return res
def make_data(a,b, m_l, dim):
x = np.empty((dim, m_l))
for i in range(dim):
x[i] = np.linspace(a, b, m_l)
x_train = make_all_variants(x)
return x_train
a = 0
b = 2*math.pi
k = 1
sigmoid_ammount = 25
m_1 = 10
space_dim = 3
iterations = int(5e5)
max_eps = 1e-12
x = make_data(a,b,m_1,space_dim)
m =x[0].size
In [ ]:
net = NeuralNet(-2,2)
net.add_input_layer(space_dim)
net.add(sigmoid_ammount, tf.sigmoid)
net.add_output_layer(1, net.linear)
net.compile()
net.set_random_matrixes()
net_out = net.return_graph()
sess = net.return_session()
dim = net.return_unroll_dim()
f = tf.cos(k*tf.sqrt(tf.reduce_sum(tf.square(net.x), axis = 0)))
J = (tf.reduce_sum(tf.square(f - tf.reduce_sum(net_out, axis = 0))))*(1/m)
#f = tf.cos(k*x_tf)
def COST(params):
net.roll_matrixes(params)
res = net.calc(J, {net.x:x})
return res
opt_nm = NelderMead(-2.5,2.5, progress_bar='tqdm')
opt_nm.set_epsilon_and_sd(0.3, 100)
def opt(J, dim, n_it, eps):
optimisation_result = opt_nm.optimize(J, dim, n_it, eps)
return optimisation_result
In [ ]:
optimisation_result = opt(COST, dim, iterations, max_eps)
print("J after optimisation: ", COST(optimisation_result.x))
print("Информация: ", optimisation_result)
In [ ]:
NN_grid = net.run(x).reshape(m)
f_true = net.calc(f, {net.x:x}).reshape(m)
error = NN_grid - f_true
In [ ]:
J_fin = optimisation_result.cost_function
mse = math_util.MSE(error)
std_err = math_util.std_err(error)
print(J_fin, ' ', mse, ' ', std_err)
In [ ]:
COST(optimisation_result.x)
In [ ]:
NN_grid
In [ ]:
f_true
In [ ]:
In [ ]: