In [10]:
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
matplotlib.style.use('ggplot')
from mpl_toolkits.mplot3d import Axes3D
import IPython.html.widgets as widg
from IPython.display import clear_output
import sys
%matplotlib inline

In [11]:
class Network:
    def __init__(self, shape):
        """The base network class. This defines a simple feed-forward network with appropriate weights and biases.
        
        Arguments:
        shape (list-like): This defines the # of layers and # of neurons per layer in your network.
                           Each element of the array or list adds a new layer with the number neurons specified by the element.
        Variables:
        self.shape: see shape.
        self.weights: A list of numpy arrays containing the weights corresponding to each channel between neurons.
        self.biases: A list of numpy arrays containing the biases corresponding to each neuron.
        self.errors: A list of numpy arrays containing the error of each neurons in any iteration of the training process.
        self.eta: A float representing the learning rate.
        self.lam: A scale factor used in L2 regularization
        """
        
        self.shape = np.array(shape) #shape is array-like, i.e. (2,3,4) is a 2 input, 3 hidden node, 4 output network
        self.weights = [np.random.ranf((self.shape[i],self.shape[i-1]))*.1 for i in range(1,len(self.shape))]
        self.biases = [np.random.ranf((self.shape[i],))*.1 for i in range(1,len(self.shape))]
        self.errors = [np.random.ranf((self.shape[i],)) for i in range(1,len(self.shape))]
        self.eta = .1
        self.lam = .01
    def sigmoid(self, inputs):
        """Computes the sigmoid function of some input.
        
        Arguments:
        inputs (float or numpy array): The input or inputs to be fed through the sigmoid function.
        """
        
        return 1/(1+np.exp(-inputs))
    def feedforward(self, inputs):
        """Feeds inputs through the network and returns the output.
        
        Arguments:
        inputs (numpy array): The inputs to the network, must be the same size as the first(input) layer.
        
        Variables:
        self.activation: A list of numpy arrays corresponding to the output of each neuron in your network.
        """
        
        assert inputs.shape==self.shape[0] #inputs must feed directly into the first layer.
        self.activation = [np.zeros((self.shape[i],)) for i in range(len(self.shape))]
        self.activation[0] = inputs
        for i in range(1,len(self.shape)):
            self.activation[i]=self.sigmoid(np.dot(self.weights[i-1],self.activation[i-1])+self.biases[i-1])
        return self.activation[-1]
    def comp_error(self, answer):
        """Computes the errors of each neuron.(Typically called Back Propagation)
        
        Arguments:
        answers (numpy array): The expected output from the network.
        """
        if (self.activation[-1]-answer).any>.15:
            self.eta = .005
        else: 
            self.eta = .5
        
        assert answer.shape==self.activation[-1].shape
        self.errors[-1] = np.pi*np.tan(np.pi/2*(self.activation[-1]-answer))*1/np.cos(np.pi/2*(self.activation[-1]-answer))**2*np.exp(np.dot(self.weights[-1],self.activation[-2])+self.biases[-1])/(np.exp(np.dot(self.weights[-1],self.activation[-2])+self.biases[-1])+1)**2
        for i in range(len(self.shape)-2, 0, -1):
            self.errors[i-1] = self.weights[i].transpose().dot(self.errors[i])*np.exp(np.dot(self.weights[i-1],self.activation[i-1])+self.biases[i-1])/(np.exp(np.dot(self.weights[i-1],self.activation[i-1])+self.biases[i-1])+1)**2
    def grad_descent(self):
        """Changes each variable based on the gradient descent algorithm."""
        
        #for i in range(len(self.biases)):
         #   self.biases[i]=self.biases[i]-self.eta*self.errors[i]
        for i in range(len(self.weights)):
            self.biases[i]=self.biases[i]-self.eta*self.errors[i]
            for j in range(self.weights[i].shape[0]):
                for k in range(self.weights[i].shape[1]):
                    self.weights[i][j,k] = (1-self.eta*self.lam/1000)*self.weights[i][j,k] - self.eta*self.activation[i][k]*self.errors[i][j]
    def train(self, inputs, answer):
        """Trains the network.
        
        Arguments:
        inputs (numpy array): The inputs to the network, must be the same size as the first(input) layer.
        answers (numpy array): The expected output from the network, must be the same size as the last(output) layer.
        """
        
        self.feedforward(inputs)
        self.comp_error(answer)
        self.grad_descent()

In [12]:
n1 = Network([2,15,1])
print n1.feedforward(np.array([1,2]))
for i in range(1000):
    n1.train(np.array([1,200]), np.array([.5]))
print n1.feedforward(np.array([1,2]))


[ 0.59949582]
[ 0.50761429]

In [13]:
from sklearn.datasets import load_digits
digits = load_digits()
print(digits.data[0]*.01)


[ 0.    0.    0.05  0.13  0.09  0.01  0.    0.    0.    0.    0.13  0.15
  0.1   0.15  0.05  0.    0.    0.03  0.15  0.02  0.    0.11  0.08  0.    0.
  0.04  0.12  0.    0.    0.08  0.08  0.    0.    0.05  0.08  0.    0.
  0.09  0.08  0.    0.    0.04  0.11  0.    0.01  0.12  0.07  0.    0.
  0.02  0.14  0.05  0.1   0.12  0.    0.    0.    0.    0.06  0.13  0.1   0.
  0.    0.  ]

In [14]:
num = [0,0,0,0,0,0,0]

num[0] = Network([64, 7, 10])
num[1] = Network([64, 7, 7, 10])
num[2] = Network([64, 7, 7, 7, 10])
num[3] = Network([64, 7, 7, 7, 7, 10])
num[4] = Network([64, 7, 7, 7, 7, 7, 10])
num[5] = Network([64, 7, 7, 7, 7, 7, 7,  10])
num[6] = Network([64, 7, 7, 7, 7, 7, 7, 7, 10])

In [15]:
# %timeit num.feedforward(digits.data[89]*.01)
# %timeit num.comp_error(np.eye(10)[digits.target[89]])
# %timeit num.grad_descent()

In [16]:
def Train_it(num, itera):
    iden = np.eye(10)
    acc = np.zeros((itera,))
    trainer = zip(digits.data,digits.target)
    perm = np.random.permutation(trainer)
    trains = perm[:1000]
    test = perm[1001:]
    #num = Network([64, 14, 10])
    print num.feedforward(digits.data[89]*.01)
    for i in range(itera):
        print(float(100*i/(itera*1.0)))
        for dig, ans in trains:
            num.train(dig*.01,iden[ans])
        cor = 0
        tot = 0
        for dig, ans in test:
            if num.feedforward(dig*.01).argmax()==ans:
                cor += 1
            tot += 1
        acc[i] = cor/float(tot)
    return acc

In [14]:
acc = Train_it(num[0], 100)
print(acc)


[ 0.08919598  0.08919598  0.08919598  0.08919598  0.08919598  0.08919598
  0.08919598  0.08919598  0.08919598  0.08919598  0.08919598  0.08919598
  0.13944724  0.18341709  0.22110553  0.28140704  0.38693467  0.49748744
  0.55904523  0.60050251  0.61557789  0.64070352  0.65954774  0.66582915
  0.68341709  0.70100503  0.71231156  0.72361809  0.74371859  0.75753769
  0.7638191   0.77135678  0.77889447  0.78768844  0.79899497  0.80527638
  0.80778894  0.81155779  0.81407035  0.81909548  0.8241206   0.83040201
  0.83417085  0.83417085  0.83919598  0.84296482  0.84296482  0.84170854
  0.84422111  0.84296482  0.85050251  0.85301508  0.85301508  0.85427136
  0.85929648  0.86055276  0.86055276  0.86432161  0.86809045  0.86683417
  0.86809045  0.87060302  0.87311558  0.8781407   0.88190955  0.88190955
  0.88944724  0.89572864  0.89698492  0.89824121  0.90075377  0.90201005
  0.90452261  0.90577889  0.90703518  0.9120603   0.91457286  0.91457286
  0.91582915  0.91959799  0.92085427  0.91959799  0.91959799  0.91959799
  0.92211055  0.92336683  0.92336683  0.92336683  0.92336683  0.92462312
  0.92713568  0.92713568  0.92839196  0.92839196  0.92713568  0.92839196
  0.92839196  0.92839196  0.92839196  0.92839196]

In [18]:
accu = np.zeros((7,100))
for i in range(7):
    accu[i] = Train_it(num[i], 100)
print(accu)


[ 0.34157564  0.37212167  0.34858148  0.33094004  0.33793419  0.29777233
  0.30086883  0.3516532   0.3642872   0.29426764]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.55444263  0.57031692  0.57347298  0.56247551  0.58210168  0.56938949
  0.56729647  0.55387477  0.57910735  0.58457915]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.56306564  0.57901227  0.56633152  0.5626682   0.58773779  0.55222001
  0.56224344  0.55236107  0.54867688  0.53381503]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.55233082  0.57927085  0.56821732  0.54947315  0.55208474  0.54321856
  0.556371    0.56208907  0.56195113  0.57763715]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.56185295  0.56164312  0.57686976  0.56828812  0.55248237  0.53443616
  0.55695595  0.56189509  0.54466348  0.57180388]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.5697638   0.56430745  0.56874735  0.55306099  0.59471299  0.57702524
  0.5733623   0.58429974  0.57122682  0.56593367]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.5402707   0.55481801  0.5597271   0.58036972  0.54105585  0.56344833
  0.55227688  0.56862798  0.57865171  0.56845782]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[[ 0.09045226  0.11557789  0.12688442  0.12939698  0.13944724  0.2160804
   0.26507538  0.30025126  0.31658291  0.32160804  0.38190955  0.49748744
   0.57788945  0.61306533  0.64949749  0.6959799   0.72613065  0.75125628
   0.76005025  0.76758794  0.77763819  0.78015075  0.78266332  0.78894472
   0.79145729  0.79145729  0.79396985  0.79522613  0.79396985  0.80025126
   0.80527638  0.80904523  0.81658291  0.82160804  0.82537688  0.82788945
   0.83040201  0.83417085  0.83542714  0.83919598  0.84170854  0.84798995
   0.85050251  0.85552764  0.8580402   0.86180905  0.86683417  0.86934673
   0.8718593   0.87437186  0.87562814  0.8781407   0.88190955  0.88316583
   0.88442211  0.88567839  0.88819095  0.88944724  0.88944724  0.89070352
   0.8919598   0.89321608  0.89447236  0.89572864  0.89447236  0.89447236
   0.89447236  0.89698492  0.89949749  0.90201005  0.90326633  0.90326633
   0.90703518  0.90954774  0.90954774  0.90954774  0.9120603   0.9120603
   0.91457286  0.91331658  0.91331658  0.91457286  0.91457286  0.91457286
   0.91457286  0.91457286  0.91457286  0.91582915  0.91708543  0.91708543
   0.91708543  0.91959799  0.91959799  0.91959799  0.92085427  0.92211055
   0.92211055  0.92211055  0.92211055  0.92211055]
 [ 0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945  0.07788945  0.07788945
   0.07788945  0.07788945  0.07788945  0.07788945]
 [ 0.1080402   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714  0.08542714  0.08542714
   0.08542714  0.08542714  0.08542714  0.08542714]
 [ 0.10050251  0.10050251  0.10050251  0.10050251  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226  0.09045226  0.09045226
   0.09045226  0.09045226  0.09045226  0.09045226]
 [ 0.09547739  0.09547739  0.09547739  0.09547739  0.09547739  0.09547739
   0.09547739  0.09547739  0.09547739  0.09547739  0.09547739  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854  0.09170854  0.09170854
   0.09170854  0.09170854  0.09170854  0.09170854]
 [ 0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.09798995
   0.09798995  0.09798995  0.09798995  0.09798995]
 [ 0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.10050251
   0.10050251  0.10050251  0.10050251  0.10050251]]

In [19]:
for i in range(7):
    plt.figure(figsize=(15,10))
    plt.plot(np.linspace(0,100,100),accu[i])



In [20]:
np.savetxt("Accuracy_Data_run_8.dat", accu)

In [25]:
def plot_epochs(az_angle, eleva):
    fig = plt.figure(figsize=(15, 10))
    ax = fig.add_subplot(111, projection='3d')
    X, Y = np.meshgrid(np.linspace(0,100,100), np.linspace(0,7, 7))
    ax.plot_surface(X, Y, accu)
    ax.view_init(elev=eleva, azim=az_angle)

In [26]:
widg.interact(plot_epochs, az_angle=(0, 360, 1), eleva=(0,20,1))


Out[26]:
<function __main__.plot_epochs>

In [ ]: