In [2]:
# Необходмые команды импорта.
import sys, os
sys.path.append(os.path.join(sys.path[0], '../source/'))
sys.path.append(os.path.join(sys.path[0], '../../'))
import numpy as np
from numpy import linalg as LA
import tensorflow as tf
from matplotlib import pylab as plt
from tqdm import tqdm_notebook
from IPython.display import clear_output
import numpy.random as rand
from physlearn.NeuralNet.NeuralNetPro import NeuralNetPro
from physlearn.DifferentialEvolution import DifferentialEvolution
from visualiser import Visualiser
import d1_osc
import ann_constructor
import math_util
In [3]:
f = open("outs_matrix_with_variance.txt", 'r')
A = np.loadtxt(f, delimiter=' ')
f.close()
B = np.matmul(A, np.transpose(A))
C = np.matmul(np.transpose(A), A)
print('Shape of B:', B.shape, ' Shape of C: ', C.shape)
print('Norm of B:', math_util.norm(B), ' Norm of C: ', math_util.norm(C))
print('Cond of B:', math_util.cond(B), ' Cond of C: ', math_util.cond(C))
print('Determinant of B:', np.linalg.det(B), ' Determinant of C: ', np.linalg.det(C))
In [4]:
gram_matrix = np.matmul(A, np.transpose(A))
print('Gram matrix: \n', gram_matrix)
M = np.diag(gram_matrix).size
diag_gram_matrix = np.eye(M)
diag_gram_matrix.flat[::M+1] += -1 + gram_matrix.diagonal()
print('Diag of gram matrix: \n', diag_gram_matrix)
In [5]:
normal_gram_matrix = np.matmul(LA.inv(diag_gram_matrix), gram_matrix)
print('Normal gram matrix: \n', normal_gram_matrix)
np.fill_diagonal(normal_gram_matrix, 0)
print('Normal gram matrix - diag: \n', normal_gram_matrix)
nonortho_factor = math_util.norm(normal_gram_matrix)
print('Measure of nonorthoganality: \n', nonortho_factor)
In [6]:
m = 1000
x = np.linspace(-10, 10, m, endpoint=True).reshape(1, m)
Visualiser.show_wf_system(2, x)
In [7]:
def values(i):
values_H = np.gradient(A[i,:])
values_H = -np.gradient(values_H) + np.square(np.linspace(-7, 7, 500, endpoint=True))*(A[i,:].reshape(500,))
values = values_H / A[i,:]
return values
for i in range(4):
print('Mean of ', i, ' HN/N distr: ', math_util.mean(values(i)))
print('Dispersion of ', i, ' H N / N distr: ', math_util.variance(values(i)))
In [8]:
s = np.random.normal(1000, 500, 15000)
count, bins, ignored = plt.hist(s, 50, normed=True)
sigma = np.sqrt(math_util.variance(s))
mu = math_util.mean(s)
plt.plot(bins, 1.0/(sigma*np.sqrt(2*np.pi))*np.exp(-(bins - mu)**2/(2 * sigma**2)), linewidth=2, color='r')
plt.show()
In [9]:
M = 5
num_sig = 500
num_h2 = 10000
a = -30
b = 30
x = np.linspace(a, b, m, endpoint=True).reshape(1, m)
net, net_output, net_sum, sess = ann_constructor.return_deep_net_expressions(M, num_sig, num_h2)
vis = Visualiser(net, net_output, net_sum, M)
vis.plot_four(x)
In [10]:
#vis.plot_four(x)
In [11]:
x = np.linspace(-10, 10, 500, endpoint=True).reshape(1, 500)
for i in range(3):
y = A[i,:]
plt.plot(x[0], y)
In [ ]:
In [ ]:
In [ ]: