In [1]:
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
matplotlib.style.use('ggplot')
from mpl_toolkits.mplot3d import Axes3D
import IPython.html.widgets as widg
from IPython.display import clear_output
import sys
import scipy.stats as sts
%matplotlib inline


:0: FutureWarning: IPython widgets are experimental and may change in the future.

In [2]:
class Network:
    def __init__(self, shape):
        """The base network class. This defines a simple feed-forward network with appropriate weights and biases.
        
        Arguments:
        shape (list-like): This defines the # of layers and # of neurons per layer in your network.
                           Each element of the array or list adds a new layer with the number neurons specified by the element.
        Variables:
        self.shape: see shape.
        self.weights: A list of numpy arrays containing the weights corresponding to each channel between neurons.
        self.biases: A list of numpy arrays containing the biases corresponding to each neuron.
        self.errors: A list of numpy arrays containing the error of each neurons in any iteration of the training process.
        self.eta: A float representing the learning rate.
        self.lam: A scale factor used in L2 regularization
        """
        
        self.shape = np.array(shape) #shape is array-like, i.e. (2,3,4) is a 2 input, 3 hidden node, 4 output network
        self.weights = [np.random.ranf((self.shape[i],self.shape[i-1]))*.1 for i in range(1,len(self.shape))]
        self.biases = [np.random.ranf((self.shape[i],))*.1 for i in range(1,len(self.shape))]
        self.errors = [np.random.ranf((self.shape[i],)) for i in range(1,len(self.shape))]
        self.eta = .1
        self.lam = .01
        self.wrong = 0
        self.total = 0
    def sigmoid(self, inputs):
        """Computes the sigmoid function of some input.
        
        Arguments:
        inputs (float or numpy array): The input or inputs to be fed through the sigmoid function.
        """
        
        return 1/(1+np.exp(-inputs))
    def feedforward(self, inputs):
        """Feeds inputs through the network and returns the output.
        
        Arguments:
        inputs (numpy array): The inputs to the network, must be the same size as the first(input) layer.
        
        Variables:
        self.activation: A list of numpy arrays corresponding to the output of each neuron in your network.
        """
        
        assert inputs.shape==self.shape[0] #inputs must feed directly into the first layer.
        self.activation = [np.zeros((self.shape[i],)) for i in range(len(self.shape))]
        self.activation[0] = inputs
        for i in range(1,len(self.shape)):
            self.activation[i]=self.sigmoid(np.dot(self.weights[i-1],self.activation[i-1])+self.biases[i-1])
        return self.activation[-1]
    def calc_learning_rate(self,grad):
        if grad>.85:
            self.eta=.1/grad**.1*1/(.25*(2*np.pi)**.5)*np.exp(-(grad)**2/(2*(.25)**2))
        else:
            self.eta=.1/grad**.6*1/(.4*(2*np.pi)**.5)*np.exp(-(grad)**2/(2*(.4)**2))*(grad+.08)
        self.wrong+=grad
        self.total+=grad
    def comp_error(self, answer):
        """Computes the errors of each neuron.(Typically called Back Propagation)
        
        Arguments:
        answers (numpy array): The expected output from the network.
        """
#         if (self.activation[-1]-answer).any>.15:
#             self.eta = .005
#         else: 
#             self.eta = .5
        self.calc_learning_rate(np.amax(np.abs((self.activation[-1]-answer))))
        #print(np.amax(np.abs((self.activation[-1]-answer))))
        assert answer.shape==self.activation[-1].shape
        self.errors[-1] = np.pi*np.tan(np.pi/2*(self.activation[-1]-answer))*1/np.cos(np.pi/2*(self.activation[-1]-answer))**2*np.exp(np.dot(self.weights[-1],self.activation[-2])+self.biases[-1])/(np.exp(np.dot(self.weights[-1],self.activation[-2])+self.biases[-1])+1)**2
        for i in range(len(self.shape)-2, 0, -1):
            self.errors[i-1] = self.weights[i].transpose().dot(self.errors[i])*np.exp(np.dot(self.weights[i-1],self.activation[i-1])+self.biases[i-1])/(np.exp(np.dot(self.weights[i-1],self.activation[i-1])+self.biases[i-1])+1)**2
    def grad_descent(self):
        """Changes each variable based on the gradient descent algorithm."""
        
        #for i in range(len(self.biases)):
         #   self.biases[i]=self.biases[i]-self.eta*self.errors[i]
        for i in range(len(self.weights)):
            self.biases[i]=self.biases[i]-self.eta*self.errors[i]
            for j in range(self.weights[i].shape[0]):
                for k in range(self.weights[i].shape[1]):
                    self.weights[i][j,k] = (1-self.eta*self.lam/1000)*self.weights[i][j,k] - self.eta*self.activation[i][k]*self.errors[i][j]
    def train(self, inputs, answer):
        """Trains the network.
        
        Arguments:
        inputs (numpy array): The inputs to the network, must be the same size as the first(input) layer.
        answers (numpy array): The expected output from the network, must be the same size as the last(output) layer.
        """
        
        self.feedforward(inputs)
        self.comp_error(answer)
        self.grad_descent()
    def get_fractional_err(self):
        return(self.wrong)

add piecewise def for learning rate


In [3]:
n1 = Network([2,15,1])
print n1.feedforward(np.array([1,2]))
for i in range(1000):
    n1.train(np.array([1,2]), np.array([.5]))
print n1.feedforward(np.array([1,2]))


[ 0.61116021]
[ 0.49975914]

In [4]:
from sklearn.datasets import load_digits
digits = load_digits()
print(digits.data[0]*.01)


[ 0.    0.    0.05  0.13  0.09  0.01  0.    0.    0.    0.    0.13  0.15
  0.1   0.15  0.05  0.    0.    0.03  0.15  0.02  0.    0.11  0.08  0.    0.
  0.04  0.12  0.    0.    0.08  0.08  0.    0.    0.05  0.08  0.    0.
  0.09  0.08  0.    0.    0.04  0.11  0.    0.01  0.12  0.07  0.    0.
  0.02  0.14  0.05  0.1   0.12  0.    0.    0.    0.    0.06  0.13  0.1   0.
  0.    0.  ]

In [5]:
num = []
for i in range(0,51):
    num.append(Network([64,7,10]))

In [6]:
# %timeit num.feedforward(digits.data[89]*.01)
# %timeit num.comp_error(np.eye(10)[digits.target[89]])
# %timeit num.grad_descent()

In [7]:
def Train_it(num, itera):
    iden = np.eye(10)
    acc = np.zeros((itera,))
    frac_err = np.zeros((itera,))
    trainer = zip(digits.data,digits.target)
    perm = np.random.permutation(trainer)
    trains = perm[:1000]
    test = perm[1001:]
    #num = Network([64, 14, 10])
    print num.feedforward(digits.data[89]*.01)
    for i in range(itera):
        print(float(100*i/(itera*1.0)))
        for dig, ans in trains:
            num.train(dig*.01,iden[ans])
        cor = 0
        tot = 0
        for dig, ans in test:
            if num.feedforward(dig*.01).argmax()==ans:
                cor += 1
            tot += 1
        acc[i] = cor/float(tot)
        frac_err[i] = num.get_fractional_err()
    return acc, frac_err

In [8]:
acc, frac_err = Train_it(num[8], 100)
print(acc)
print(frac_err)


[ 0.55407673  0.54586645  0.57549904  0.56780356  0.55255945  0.5532844
  0.56258799  0.57374516  0.54198566  0.57454814]
0.0
1.0
2.0
3.0
4.0
5.0
6.0
7.0
8.0
9.0
10.0
11.0
12.0
13.0
14.0
15.0
16.0
17.0
18.0
19.0
20.0
21.0
22.0
23.0
24.0
25.0
26.0
27.0
28.0
29.0
30.0
31.0
32.0
33.0
34.0
35.0
36.0
37.0
38.0
39.0
40.0
41.0
42.0
43.0
44.0
45.0
46.0
47.0
48.0
49.0
50.0
51.0
52.0
53.0
54.0
55.0
56.0
57.0
58.0
59.0
60.0
61.0
62.0
63.0
64.0
65.0
66.0
67.0
68.0
69.0
70.0
71.0
72.0
73.0
74.0
75.0
76.0
77.0
78.0
79.0
80.0
81.0
82.0
83.0
84.0
85.0
86.0
87.0
88.0
89.0
90.0
91.0
92.0
93.0
94.0
95.0
96.0
97.0
98.0
99.0
[ 0.10050251  0.09673367  0.10301508  0.18969849  0.19346734  0.37562814
  0.55527638  0.57537688  0.65954774  0.74623116  0.79020101  0.81532663
  0.85678392  0.87437186  0.88944724  0.90326633  0.90954774  0.91708543
  0.92211055  0.92211055  0.9258794   0.92839196  0.9321608   0.93592965
  0.93969849  0.94095477  0.94346734  0.94472362  0.9459799   0.94849246
  0.94849246  0.94849246  0.94849246  0.94849246  0.94723618  0.9459799
  0.94346734  0.94346734  0.94346734  0.94346734  0.94221106  0.94221106
  0.94221106  0.94346734  0.94346734  0.94221106  0.94095477  0.94095477
  0.94221106  0.94221106  0.94221106  0.94472362  0.94472362  0.94723618
  0.94849246  0.94849246  0.94849246  0.94849246  0.94849246  0.94849246
  0.94974874  0.94974874  0.94974874  0.95100503  0.95226131  0.95226131
  0.95226131  0.95100503  0.95100503  0.95100503  0.95100503  0.95100503
  0.95100503  0.94974874  0.94974874  0.94974874  0.94974874  0.94974874
  0.95100503  0.95100503  0.95100503  0.95100503  0.95100503  0.95100503
  0.95226131  0.95226131  0.95226131  0.95226131  0.95226131  0.95226131
  0.95226131  0.95226131  0.95226131  0.95226131  0.95226131  0.95226131
  0.95226131  0.95226131  0.95226131  0.95226131]
[   667.38719431   1335.31890816   2002.72549375   2668.55376847
   3326.35006241   3958.25468359   4556.73480492   5117.79896892
   5647.03611929   6150.67323731   6630.7079858    7089.67890698
   7528.87308089   7948.29481213   8347.72398947   8728.59229546
   9092.96145239   9444.20138504   9782.9190312   10110.23553179
  10429.0980865   10738.7362804   11041.34759453  11336.60941635
  11625.12951124  11907.38256361  12183.7981995   12454.82380552
  12720.83364692  12982.15964729  13239.07907556  13491.82927421
  13740.60851609  13985.61962406  14227.03280999  14464.99763751
  14699.65575465  14931.14547583  15159.58146047  15385.06553781
  15607.70342871  15827.57156441  16044.73281147  16259.25553982
  16471.19163578  16680.58521977  16887.47782354  17091.90701441
  17293.91118182  17493.5231476   17690.78966849  17885.73618858
  18078.39054199  18268.80405991  18457.01867804  18643.07556654
  18827.01964057  19008.88634975  19188.71238931  19366.55485475
  19542.46904164  19716.50239008  19888.70108319  20059.1039228
  20227.75161976  20394.68527095  20559.96185559  20723.63674202
  20885.75114089  21046.34403779  21205.45248037  21363.10945385
  21519.348914    21674.20029037  21827.69386916  21979.86811548
  22130.75620573  22280.38764589  22428.78966553  22575.99440014
  22722.02810472  22866.91531398  23010.68164976  23153.3503652
  23294.945666    23435.48985597  23575.00560802  23713.51597816
  23851.04043019  23987.59830372  24123.20906645  24257.8942614
  24391.67693146  24524.57530005  24656.60587914  24787.78355983
  24918.12552431  25047.64907753  25176.36917114  25304.29966781]

In [9]:
plt.figure(figsize=(15,10))
plt.plot(np.linspace(0,100, 100), frac_err)


Out[9]:
[<matplotlib.lines.Line2D at 0x7faa9cda8110>]

In [8]:
accu = np.zeros((50,20))
fracerr = np.zeros((50,20))
for i in range(50):
    print(i)
    accu[i], fracerr[i] = Train_it(num[i], 20)
print(accu)


0
[ 0.57023738  0.58561444  0.57752682  0.5679815   0.55223581  0.55855147
  0.53911845  0.57953125  0.56688274  0.5645422 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
1
[ 0.55208109  0.55339109  0.59193848  0.58277356  0.5787217   0.5628522
  0.55540352  0.53830783  0.54191251  0.56070854]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
2
[ 0.57046846  0.57895235  0.52238646  0.57831986  0.56718349  0.5625669
  0.57064598  0.58026207  0.56221166  0.54076506]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
3
[ 0.56504598  0.56039564  0.55283869  0.57181518  0.53877943  0.57128824
  0.56596843  0.56629316  0.57795036  0.56134488]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
4
[ 0.55754585  0.52776455  0.55960392  0.56342933  0.54871733  0.56820596
  0.56143231  0.57647292  0.55818138  0.58211627]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
5
[ 0.55007834  0.55713738  0.55381209  0.57102469  0.53350441  0.5677679
  0.55802914  0.53868598  0.56406038  0.56311666]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
6
[ 0.56593884  0.53667819  0.57113096  0.54697908  0.54204458  0.56761131
  0.54220064  0.54384897  0.57823332  0.5633489 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
7
[ 0.57174343  0.57734679  0.56515744  0.54990072  0.57861661  0.5219704
  0.55870763  0.56803887  0.56563853  0.54580722]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
8
[ 0.56052099  0.5786427   0.58538337  0.54056037  0.57609083  0.57269717
  0.55360277  0.5463438   0.55689802  0.56185527]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
9
[ 0.5662583   0.54219054  0.55017563  0.54143847  0.56447502  0.56304934
  0.55541     0.55869023  0.544816    0.56398256]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
10
[ 0.58052973  0.56790123  0.55097663  0.58005666  0.54836448  0.54634335
  0.56765912  0.58342298  0.56488355  0.56275753]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
11
[ 0.54931207  0.56583961  0.56817233  0.56879108  0.55573449  0.56173662
  0.5635368   0.54238235  0.53507109  0.55814849]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
12
[ 0.55226868  0.57259699  0.55319204  0.54761531  0.57960684  0.56941135
  0.55521159  0.54694327  0.55363922  0.57197233]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
13
[ 0.58390665  0.56065787  0.56795362  0.53932085  0.56836005  0.57976902
  0.53811308  0.56914371  0.55863815  0.558632  ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
14
[ 0.55192904  0.55903685  0.5745293   0.56663145  0.55838904  0.55699521
  0.54963919  0.56177369  0.55310329  0.55688798]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
15
[ 0.55119092  0.55412855  0.56696509  0.54374442  0.54902939  0.56879494
  0.5819689   0.54675303  0.55719007  0.54840767]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
16
[ 0.57850916  0.55131752  0.5768905   0.57730652  0.58331159  0.55450334
  0.55071611  0.5544896   0.55585347  0.55718314]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
17
[ 0.5653029   0.55396858  0.57122872  0.56258219  0.56522702  0.55469452
  0.55636012  0.56102624  0.55858549  0.56365793]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
18
[ 0.56707549  0.5506591   0.54759498  0.55763313  0.55665801  0.55494638
  0.55085431  0.56383256  0.56640973  0.56179164]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
19
[ 0.55365111  0.54749003  0.559391    0.56685669  0.57234819  0.56102247
  0.55122953  0.58579988  0.57145611  0.54064985]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
20
[ 0.56376795  0.54755475  0.58287542  0.55765194  0.526907    0.56791444
  0.55254615  0.5319156   0.57380589  0.56671911]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
21
[ 0.56543366  0.58330566  0.5615732   0.55570316  0.56259054  0.57940963
  0.54017317  0.57687996  0.56397419  0.56006934]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
22
[ 0.57812886  0.56558329  0.56403662  0.5568754   0.54858829  0.54681902
  0.54655204  0.55723158  0.57469478  0.56917803]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
23
[ 0.55802466  0.56172898  0.57341427  0.56152238  0.56591588  0.58993023
  0.53305925  0.55082766  0.55201411  0.55678501]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
24
[ 0.54759688  0.53485616  0.54490891  0.55477048  0.5700275   0.55631295
  0.5355983   0.56239223  0.53239264  0.56667166]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
25
[ 0.56241986  0.54662535  0.56995235  0.55596962  0.53422767  0.55231769
  0.56377075  0.54679869  0.56234457  0.56954784]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
26
[ 0.54141076  0.5510447   0.55311884  0.55719543  0.55710451  0.54994294
  0.56151955  0.57411475  0.56762966  0.56287285]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
27
[ 0.54961349  0.55127     0.57088518  0.5407178   0.58404473  0.57310254
  0.57050698  0.56244342  0.56298576  0.56985595]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
28
[ 0.54988207  0.56716545  0.56784671  0.54383392  0.55389972  0.56271866
  0.56005207  0.57668062  0.56900956  0.5394869 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
29
[ 0.54437182  0.56809423  0.56205805  0.55159435  0.5700079   0.56189334
  0.57285849  0.5762984   0.54913982  0.55486771]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
30
[ 0.54663047  0.57819747  0.5595369   0.55549741  0.56591898  0.56485706
  0.56083891  0.53863381  0.54610279  0.56571368]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
31
[ 0.56535442  0.55903685  0.545525    0.55397201  0.54882904  0.56370611
  0.56305642  0.56193875  0.56168118  0.55216309]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
32
[ 0.5286921   0.57555667  0.54448007  0.56069233  0.54740154  0.53983909
  0.5783981   0.56840899  0.57291489  0.55903872]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
33
[ 0.5573607   0.56349974  0.55803134  0.55480697  0.57265543  0.55558621
  0.55322344  0.56957775  0.53944591  0.58819995]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
34
[ 0.56784703  0.55792379  0.57622652  0.5684123   0.57116626  0.56992794
  0.56288352  0.56225592  0.5485181   0.5746673 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
35
[ 0.56301848  0.55785468  0.571092    0.56613049  0.56130173  0.55168933
  0.572194    0.56377905  0.57083933  0.5547444 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
36
[ 0.56860392  0.5505137   0.53691309  0.5869054   0.5592157   0.54175308
  0.57921073  0.56775319  0.55610624  0.55786397]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
37
[ 0.57146993  0.55836868  0.54900734  0.55498533  0.57115011  0.59429865
  0.54350885  0.56183874  0.5738406   0.54737803]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
38
[ 0.56106342  0.54469995  0.57701969  0.57349619  0.58210873  0.55983696
  0.55957014  0.56084886  0.56968231  0.5537858 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
39
[ 0.55603093  0.56411509  0.55568616  0.56257489  0.54532535  0.53668594
  0.56870038  0.5427898   0.55533139  0.56602112]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
40
[ 0.53628592  0.55927125  0.5657461   0.54586115  0.58337656  0.58631499
  0.55261005  0.57473177  0.55632022  0.5717819 ]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
41
[ 0.57326328  0.55447042  0.54262206  0.55899088  0.56131203  0.53892738
  0.54310377  0.54082612  0.54674741  0.56170739]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
42
[ 0.53743618  0.57730379  0.58304913  0.59293354  0.5643221   0.58575344
  0.55045122  0.54584422  0.53859154  0.55781131]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
43
[ 0.56397045  0.54478377  0.58470451  0.57392576  0.55386998  0.57909956
  0.57623195  0.55622797  0.550484    0.55841628]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
44
[ 0.54398274  0.5545806   0.56525666  0.55265261  0.5423119   0.55627075
  0.54776632  0.56044607  0.55735265  0.56847388]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
45
[ 0.57076838  0.56693753  0.57987497  0.55165468  0.55210454  0.57091732
  0.55332083  0.54639121  0.54686093  0.56486941]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
46
[ 0.55072992  0.57137647  0.5441667   0.57653261  0.57601412  0.54182246
  0.54658162  0.57565649  0.56380565  0.55374473]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
47
[ 0.53912671  0.54285528  0.56129476  0.5596996   0.56096022  0.54681196
  0.56126366  0.561774    0.56111772  0.56620281]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
48
[ 0.57811487  0.55672863  0.56280642  0.55529597  0.55027131  0.56389898
  0.56521186  0.55902479  0.51736623  0.54204945]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
49
[ 0.57257518  0.55677852  0.55777724  0.54962409  0.55238656  0.56625495
  0.57900324  0.57907993  0.56620611  0.56855022]
0.0
5.0
10.0
15.0
20.0
25.0
30.0
35.0
40.0
45.0
50.0
55.0
60.0
65.0
70.0
75.0
80.0
85.0
90.0
95.0
[[ 0.09673367  0.09673367  0.09673367  0.27512563  0.34296482  0.46356784
   0.57286432  0.70728643  0.76758794  0.7839196   0.80778894  0.83291457
   0.85175879  0.87060302  0.88190955  0.88944724  0.90326633  0.9120603
   0.91708543  0.9258794 ]
 [ 0.08291457  0.08291457  0.08291457  0.09924623  0.3178392   0.44849246
   0.51758794  0.58040201  0.66582915  0.72613065  0.77386935  0.81030151
   0.83668342  0.86557789  0.88065327  0.88944724  0.89824121  0.90201005
   0.90954774  0.91457286]
 [ 0.09422111  0.09422111  0.09422111  0.09547739  0.37060302  0.42336683
   0.5         0.55025126  0.69974874  0.75        0.80778894  0.84547739
   0.86432161  0.87939698  0.88442211  0.89824121  0.90326633  0.90703518
   0.90954774  0.91080402]
 [ 0.0879397   0.0879397   0.0879397   0.0879397   0.13065327  0.35929648
   0.44221106  0.57788945  0.65075377  0.69849246  0.72738693  0.75251256
   0.78517588  0.80150754  0.81281407  0.82663317  0.83919598  0.85175879
   0.86055276  0.8718593 ]
 [ 0.10175879  0.10175879  0.10175879  0.16080402  0.41959799  0.62562814
   0.7160804   0.7298995   0.74874372  0.76884422  0.79145729  0.80778894
   0.81909548  0.83165829  0.84296482  0.85301508  0.87437186  0.89447236
   0.89824121  0.90577889]
 [ 0.08919598  0.08919598  0.08919598  0.08919598  0.20100503  0.41959799
   0.52638191  0.58668342  0.70351759  0.77386935  0.80778894  0.8379397
   0.85552764  0.87437186  0.89572864  0.89824121  0.90577889  0.91080402
   0.91331658  0.91708543]
 [ 0.10929648  0.10929648  0.10929648  0.15577889  0.23743719  0.27763819
   0.44472362  0.60175879  0.6758794   0.70979899  0.7361809   0.77638191
   0.81155779  0.84170854  0.86306533  0.88316583  0.89321608  0.90452261
   0.91331658  0.91331658]
 [ 0.11557789  0.1080402   0.1080402   0.11432161  0.32286432  0.45728643
   0.63442211  0.67964824  0.70351759  0.73492462  0.75502513  0.79522613
   0.82663317  0.84422111  0.86306533  0.87562814  0.88442211  0.89698492
   0.90326633  0.90829146]
 [ 0.09924623  0.09924623  0.09924623  0.20477387  0.38065327  0.43467337
   0.58165829  0.71984925  0.78517588  0.82286432  0.85301508  0.87311558
   0.89698492  0.91457286  0.91582915  0.92336683  0.92462312  0.92085427
   0.92211055  0.92336683]
 [ 0.08542714  0.08542714  0.09798995  0.18341709  0.20603015  0.31155779
   0.58040201  0.67211055  0.7298995   0.75125628  0.80527638  0.83542714
   0.86180905  0.87939698  0.89070352  0.90201005  0.90829146  0.91080402
   0.91457286  0.91959799]
 [ 0.10175879  0.14572864  0.19723618  0.19723618  0.25        0.25879397
   0.46984925  0.6419598   0.71482412  0.75753769  0.77889447  0.80025126
   0.81407035  0.84045226  0.8580402   0.86432161  0.87688442  0.88819095
   0.89321608  0.89698492]
 [ 0.09296482  0.09296482  0.09296482  0.09296482  0.18718593  0.47487437
   0.59296482  0.6218593   0.65829146  0.72110553  0.75125628  0.77386935
   0.78643216  0.8040201   0.83291457  0.86180905  0.87939698  0.89447236
   0.90326633  0.91457286]
 [ 0.08417085  0.08417085  0.08417085  0.15954774  0.18718593  0.46733668
   0.58542714  0.66582915  0.70603015  0.74371859  0.78140704  0.8241206
   0.84924623  0.87688442  0.89321608  0.89698492  0.90075377  0.90703518
   0.91080402  0.91457286]
 [ 0.10301508  0.10301508  0.10301508  0.13944724  0.34045226  0.53140704
   0.61055276  0.70226131  0.75502513  0.79522613  0.84296482  0.86934673
   0.88190955  0.89447236  0.89824121  0.91080402  0.9120603   0.91331658
   0.91457286  0.91708543]
 [ 0.09924623  0.09924623  0.09924623  0.12688442  0.20603015  0.42839196
   0.56532663  0.63065327  0.71356784  0.76633166  0.80276382  0.84045226
   0.86432161  0.88567839  0.89949749  0.90703518  0.91708543  0.92085427
   0.92336683  0.93341709]
 [ 0.10678392  0.10678392  0.10678392  0.10678392  0.29145729  0.46984925
   0.61683417  0.70100503  0.76758794  0.81281407  0.83668342  0.86055276
   0.8718593   0.88567839  0.89698492  0.90703518  0.91457286  0.91959799
   0.92462312  0.93090452]
 [ 0.09547739  0.09547739  0.09547739  0.09547739  0.1821608   0.28768844
   0.56658291  0.74371859  0.8241206   0.83668342  0.84798995  0.86306533
   0.87939698  0.88442211  0.89321608  0.89447236  0.90201005  0.89949749
   0.90577889  0.90703518]
 [ 0.09798995  0.09798995  0.18844221  0.27889447  0.38567839  0.54271357
   0.66331658  0.78140704  0.84547739  0.87060302  0.88065327  0.89070352
   0.89572864  0.90201005  0.91457286  0.91834171  0.92336683  0.92839196
   0.93090452  0.93341709]
 [ 0.09798995  0.09798995  0.09798995  0.20477387  0.3781407   0.4798995
   0.68341709  0.74371859  0.80276382  0.84422111  0.86055276  0.87311558
   0.88567839  0.89572864  0.89698492  0.90703518  0.90703518  0.91331658
   0.91708543  0.92085427]
 [ 0.09422111  0.11055276  0.11055276  0.23115578  0.27261307  0.44095477
   0.52638191  0.65201005  0.73994975  0.8241206   0.85552764  0.87688442
   0.89824121  0.90452261  0.90954774  0.9120603   0.91331658  0.91708543
   0.91708543  0.91959799]
 [ 0.10552764  0.10552764  0.10552764  0.10552764  0.19346734  0.47613065
   0.66457286  0.72361809  0.76507538  0.81407035  0.84673367  0.85427136
   0.86934673  0.87562814  0.88190955  0.88567839  0.88944724  0.89321608
   0.89447236  0.8919598 ]
 [ 0.1080402   0.1080402   0.18844221  0.19723618  0.22361809  0.41457286
   0.6281407   0.69723618  0.73366834  0.74371859  0.75753769  0.76758794
   0.78894472  0.80653266  0.83542714  0.8718593   0.88944724  0.89949749
   0.9120603   0.91959799]
 [ 0.10175879  0.10175879  0.10175879  0.10301508  0.29396985  0.34798995
   0.53643216  0.65452261  0.72864322  0.75879397  0.79773869  0.80527638
   0.81909548  0.83668342  0.84296482  0.85678392  0.86934673  0.88442211
   0.88944724  0.89949749]
 [ 0.09045226  0.09045226  0.09045226  0.09045226  0.28015075  0.5741206
   0.69974874  0.74120603  0.78894472  0.83040201  0.85427136  0.86557789
   0.88065327  0.89447236  0.90452261  0.91331658  0.91959799  0.92462312
   0.92839196  0.93090452]
 [ 0.1080402   0.1080402   0.1080402   0.1080402   0.15703518  0.37562814
   0.66959799  0.82537688  0.83165829  0.84296482  0.84924623  0.85678392
   0.86055276  0.87311558  0.88316583  0.89447236  0.90201005  0.91080402
   0.92085427  0.92713568]
 [ 0.10050251  0.10050251  0.10050251  0.10050251  0.10050251  0.21984925
   0.42964824  0.66708543  0.76633166  0.80653266  0.83417085  0.85427136
   0.86683417  0.87939698  0.89572864  0.90326633  0.91080402  0.91457286
   0.91708543  0.91959799]
 [ 0.10427136  0.10427136  0.10427136  0.11809045  0.28140704  0.33291457
   0.49497487  0.59422111  0.65703518  0.74748744  0.78140704  0.81030151
   0.83040201  0.84422111  0.86306533  0.87311558  0.87939698  0.89447236
   0.89572864  0.90326633]
 [ 0.09798995  0.09798995  0.09798995  0.09798995  0.09798995  0.23366834
   0.50251256  0.64824121  0.67964824  0.72361809  0.74623116  0.76884422
   0.79145729  0.81658291  0.84296482  0.85678392  0.87311558  0.87939698
   0.88065327  0.88316583]
 [ 0.09296482  0.09296482  0.09296482  0.09296482  0.20979899  0.28015075
   0.47110553  0.6218593   0.67462312  0.72613065  0.75502513  0.78266332
   0.79145729  0.80904523  0.83668342  0.85427136  0.89572864  0.89949749
   0.90326633  0.90954774]
 [ 0.08919598  0.08919598  0.08919598  0.17336683  0.30778894  0.47487437
   0.65201005  0.68718593  0.73115578  0.75376884  0.79396985  0.82788945
   0.85929648  0.87311558  0.88316583  0.89824121  0.91080402  0.91331658
   0.91708543  0.92085427]
 [ 0.09924623  0.09924623  0.09924623  0.09924623  0.26005025  0.51256281
   0.66080402  0.73115578  0.77512563  0.80150754  0.82160804  0.82788945
   0.83919598  0.85175879  0.86557789  0.87437186  0.88944724  0.89447236
   0.90326633  0.90954774]
 [ 0.09547739  0.09547739  0.09547739  0.09673367  0.33542714  0.41331658
   0.53894472  0.68467337  0.75879397  0.81281407  0.84170854  0.86055276
   0.87437186  0.88065327  0.88819095  0.89447236  0.89949749  0.89949749
   0.89949749  0.90201005]
 [ 0.10678392  0.10678392  0.09924623  0.09924623  0.3241206   0.37437186
   0.55527638  0.63190955  0.68467337  0.76256281  0.81030151  0.84924623
   0.86934673  0.8919598   0.90452261  0.91959799  0.92462312  0.92964824
   0.92964824  0.9321608 ]
 [ 0.0879397   0.0879397   0.0879397   0.0879397   0.1758794   0.3241206
   0.47110553  0.54648241  0.64949749  0.73241206  0.77512563  0.8040201
   0.83291457  0.84798995  0.85929648  0.8718593   0.88190955  0.89321608
   0.89447236  0.90075377]
 [ 0.09673367  0.09673367  0.18592965  0.18718593  0.25879397  0.36055276
   0.55025126  0.68467337  0.79522613  0.82788945  0.83919598  0.86180905
   0.87060302  0.8781407   0.88567839  0.88442211  0.8919598   0.89824121
   0.90201005  0.90075377]
 [ 0.10050251  0.10050251  0.10050251  0.10050251  0.14321608  0.25502513
   0.39447236  0.51005025  0.65075377  0.72361809  0.80653266  0.85050251
   0.86557789  0.88065327  0.88819095  0.89321608  0.90075377  0.90452261
   0.90326633  0.91331658]
 [ 0.08040201  0.08040201  0.13442211  0.25628141  0.34170854  0.40703518
   0.53894472  0.61934673  0.66080402  0.70100503  0.72487437  0.74497487
   0.77889447  0.80527638  0.83542714  0.86306533  0.88190955  0.89824121
   0.90954774  0.9120603 ]
 [ 0.10929648  0.10929648  0.10929648  0.10929648  0.16959799  0.4798995
   0.57663317  0.65577889  0.69974874  0.73366834  0.78517588  0.8379397
   0.86809045  0.88567839  0.89824121  0.90703518  0.90703518  0.90703518
   0.90326633  0.90703518]
 [ 0.10301508  0.10301508  0.10301508  0.10678392  0.31532663  0.43467337
   0.60427136  0.69974874  0.77512563  0.8178392   0.84924623  0.86432161
   0.89070352  0.90201005  0.91080402  0.91834171  0.91834171  0.92462312
   0.92964824  0.93341709]
 [ 0.10552764  0.10552764  0.10552764  0.10552764  0.15954774  0.32663317
   0.50251256  0.70226131  0.75        0.78894472  0.82035176  0.84673367
   0.85929648  0.86934673  0.87939698  0.8919598   0.8919598   0.89949749
   0.90452261  0.91080402]
 [ 0.09170854  0.09170854  0.09170854  0.09170854  0.18969849  0.24371859
   0.56407035  0.68969849  0.7638191   0.80527638  0.83417085  0.84924623
   0.86306533  0.87939698  0.88944724  0.89698492  0.90326633  0.90829146
   0.91708543  0.92336683]
 [ 0.09924623  0.09924623  0.09924623  0.11683417  0.18090452  0.26884422
   0.3781407   0.64070352  0.75251256  0.79899497  0.81281407  0.83040201
   0.84673367  0.86557789  0.8718593   0.88819095  0.89447236  0.89824121
   0.90075377  0.90577889]
 [ 0.10050251  0.10050251  0.13693467  0.18592965  0.41959799  0.50251256
   0.60427136  0.65452261  0.70728643  0.75        0.80276382  0.84673367
   0.8718593   0.88567839  0.89698492  0.90954774  0.91457286  0.91834171
   0.92839196  0.93090452]
 [ 0.09296482  0.18467337  0.17964824  0.18592965  0.20979899  0.36432161
   0.52763819  0.69723618  0.7638191   0.81909548  0.85678392  0.87060302
   0.88944724  0.89949749  0.91457286  0.91959799  0.9258794   0.93090452
   0.93467337  0.93467337]
 [ 0.09798995  0.17211055  0.18467337  0.18592965  0.3241206   0.51633166
   0.66331658  0.72613065  0.7701005   0.82537688  0.84924623  0.8580402
   0.86180905  0.86557789  0.86934673  0.8781407   0.88944724  0.89824121
   0.90326633  0.91080402]
 [ 0.08165829  0.10552764  0.10552764  0.18090452  0.27135678  0.33040201
   0.49748744  0.62939698  0.72613065  0.79522613  0.8178392   0.84045226
   0.85301508  0.87437186  0.89321608  0.90452261  0.91080402  0.9120603
   0.91331658  0.92085427]
 [ 0.09170854  0.09170854  0.09170854  0.09170854  0.10301508  0.33417085
   0.47110553  0.60678392  0.66708543  0.7298995   0.77386935  0.80778894
   0.83668342  0.85175879  0.86306533  0.87437186  0.87939698  0.88693467
   0.89447236  0.89949749]
 [ 0.08668342  0.08668342  0.08668342  0.08668342  0.08668342  0.23743719
   0.49497487  0.57663317  0.65452261  0.75125628  0.81155779  0.84296482
   0.86180905  0.87311558  0.8919598   0.89572864  0.89949749  0.89949749
   0.90075377  0.90577889]
 [ 0.0879397   0.0879397   0.0879397   0.0879397   0.24120603  0.40201005
   0.63190955  0.70979899  0.75502513  0.79020101  0.82286432  0.84924623
   0.86934673  0.88567839  0.91080402  0.92336683  0.9258794   0.9258794
   0.92336683  0.92462312]
 [ 0.10175879  0.10175879  0.10175879  0.10175879  0.11557789  0.20100503
   0.35929648  0.59170854  0.70603015  0.75753769  0.78517588  0.81658291
   0.83919598  0.85301508  0.85929648  0.87437186  0.88316583  0.89447236
   0.89949749  0.90075377]]

In [9]:
for i in range(50):
    plt.figure(figsize=(15,10))
    plt.plot(np.linspace(0,20,20),fracerr[i])


/usr/local/lib/python2.7/dist-packages/matplotlib/pyplot.py:424: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).
  max_open_warning, RuntimeWarning)