In [1]:
# Необходмые команды импорта.
import sys
#import os
sys.path.append('../physlearn/')
sys.path.append('../source')
import numpy as np
from numpy import linalg as LA
import tensorflow as tf
from matplotlib import pylab as plt
from IPython.display import clear_output
from physlearn.NeuralNet.NeuralNet import NeuralNet
from physlearn.Optimizer.NelderMead.NelderMead import NelderMead
from CostFunction import CostFunction
import d1_osc
import ann_constructor
import math_util
from visualiser import Visualiser
# Model Parameters
n_hid1 = 12
m = 350 # размер сеток обучения
M = 4 # количество выходных нейронов(базисных функций)
a = -6
b = 6
n_hid2 = 12
%matplotlib inline
In [2]:
# ANN
net, net_output, net_sum, sess = ann_constructor.return_separated_deep_net_expressions(M, n_hid1, n_hid2)
# Выражение, определяющеие образ выходов сети при действии гамильтонианом. Task-dependant
dim = net.return_unroll_dim()
print(dim)
In [3]:
cost_function = CostFunction(net, 'gaus')
cost_function.define_approximation_grid(a, b, m)
cost_function.define_linearity_grid(M)
cost_function.compile()
J = cost_function.get_cost_func()
# Оптимизация
opt_nm = NelderMead(-2.5,2.5)
opt_nm.set_epsilon_and_sd(0.3, 100)
In [4]:
optimisation_result = opt_nm.optimize(J, dim+1, 10**6, 10**(-2))
print("J after optimisation: ", J(optimisation_result.x))
print("Ебала: ", optimisation_result)
In [ ]:
In [9]:
from visualiser import Visualiser2
In [10]:
vis = Visualiser2(net, cost_function.trial_func, M)
vis.plot_four(np.linspace(-5, 5, m, endpoint=True).reshape(1, m) )
In [ ]:
y1 =
for i in range(self.M):
func_i = y1[i,:]
plt.plot(x[0], func_i)