In [42]:
#Imports and model parameters

import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
#Simple network: Given three integers a,b,c, [-100,100] chooses three random x-values, and evaluates
#the quadratic function a*x^2 + b*x + c at those values.

import copy

alpha,hidden_dim,hidden_dim2 = (.001,4,4)

thresh = .1

cost_thresh = 1.0

# Parameters
learning_rate = 0.001
training_epochs = 15
batch_size = 100
display_step = 1

# Network Parameters
n_hidden_1 = 256 # 1st layer number of features
n_hidden_2 = 256 # 2nd layer number of features
n_input = 784 # Guess quadratic function
n_classes = 10 # 
#synapses = []
models = []

#Testing starting in the same place
#synapse0 = 2*np.random.random((1,hidden_dim)) - 1
#synapse1 = 2*np.random.random((hidden_dim,hidden_dim2)) - 1
#synapse2 = 2*np.random.random((hidden_dim2,1)) - 1
copy_model = multilayer_perceptron(ind=0)


Extracting /tmp/data/train-images-idx3-ubyte.gz
Extracting /tmp/data/train-labels-idx1-ubyte.gz
Extracting /tmp/data/t10k-images-idx3-ubyte.gz
Extracting /tmp/data/t10k-labels-idx1-ubyte.gz

In [54]:
#Function definitions

def func(x,a,b,c):
    return x*x*a + x*b + c

def generatecandidate4(a,b,c,tot):
    
    candidate = [[np.random.random() for x in xrange(1)] for y in xrange(tot)]
    candidatesolutions = [[func(x[0],a,b,c)] for x in candidate]
    
    return (candidate, candidatesolutions)

def synapse_interpolate(synapse1, synapse2, t):
    return (synapse2-synapse1)*t + synapse1

def model_interpolate(w1,b1,w2,b2,t):
    
    m1w = w1
    m1b = b1
    m2w = w2 
    m2b = b2
    
    mwi = [synapse_interpolate(m1we,m2we,t) for m1we, m2we in zip(m1w,m2w)]
    mbi = [synapse_interpolate(m1be,m2be,t) for m1be, m2be in zip(m1b,m2b)]
    
    return mwi, mbi

def InterpBeadError(w1,b1, w2,b2, write = False, name = "00"):
    errors = []
    
    #xdat,ydat = generatecandidate4(.5, .25, .1, 1000)
    
    xdat,ydat = mnist.train.next_batch(batch_size)
    #xdat = np.array(xdat)
    #ydat = np.array(ydat)
    
    
    
    
    for tt in xrange(100):
        #print tt
        #accuracy = 0.
        t = tt/100.
        thiserror = 0

        #x0 = tf.placeholder("float", [None, n_input])
        #y0 = tf.placeholder("float", [None, n_classes])
        weights, biases = model_interpolate(w1,b1,w2,b2, t)
        interp_model = multilayer_perceptron(w=weights, b=biases)
        
        with interp_model.g.as_default():
            
            #interp_model.UpdateWeights(weights, biases)


            x = tf.placeholder("float", [None, n_input])
            y = tf.placeholder("float", [None, n_classes])
            pred = interp_model.predict(x)
            init = tf.initialize_all_variables()


            with tf.Session() as sess:
                sess.run(init)
                correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                print "Accuracy:", 1 - accuracy.eval({x: xdat, y: ydat}),"\t",tt,weights[0][1][0],weights[0][1][1]
                thiserror = 1 - accuracy.eval({x: xdat, y: ydat})


        errors.append(thiserror)

    if write == True:
        with open("f" + str(name) + ".out",'w+') as f:
            for e in errors:
                f.write(str(e) + "\n")
    
    return max(errors), np.argmax(errors)

In [44]:
#Class definitions

class multilayer_perceptron():
    
    #weights = {}
    #biases = {}
    
    def __init__(self, w=0, b=0, ind='00'):
        
        self.index = ind #used for reading values from file
        #See the filesystem convention below (is this really necessary?)
        #I'm going to eschew writing to file for now because I'll be generating too many files
        #Currently, the last value of the parameters is stored in self.params to be read
        
        learning_rate = 0.001
        training_epochs = 15
        batch_size = 100
        display_step = 1

        # Network Parameters
        n_hidden_1 = 256 # 1st layer number of features
        n_hidden_2 = 256 # 2nd layer number of features
        n_input = 784 # Guess quadratic function
        n_classes = 10 # 
        self.g = tf.Graph()
        
        
        self.params = []
        
        with self.g.as_default():
        
            #Note that by default, weights and biases will be initialized to random normal dists
            if w==0:
                
                self.weights = {
                    'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
                    'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
                    'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
                }
                self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
                self.biases = {
                    'b1': tf.Variable(tf.random_normal([n_hidden_1])),
                    'b2': tf.Variable(tf.random_normal([n_hidden_2])),
                    'out': tf.Variable(tf.random_normal([n_classes]))
                }
                self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
                
            else:
                
                self.weights = {
                    'h1': tf.Variable(w[0]),
                    'h2': tf.Variable(w[1]),
                    'out': tf.Variable(w[2])
                }
                self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
                self.biases = {
                    'b1': tf.Variable(b[0]),
                    'b2': tf.Variable(b[1]),
                    'out': tf.Variable(b[2])
                }
                self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
            self.saver = tf.train.Saver()
    
    
    def UpdateWeights(self, w, b):
        with self.g.as_default():
            self.weights = {
                    'h1': tf.Variable(w[0]),
                    'h2': tf.Variable(w[1]),
                    'out': tf.Variable(w[2])
                }
            self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
            self.biases = {
                'b1': tf.Variable(b[0]),
                'b2': tf.Variable(b[1]),
                'out': tf.Variable(b[2])
            }
            self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
            

        
    def predict(self, x):
        
        with self.g.as_default():
            layer_1 = tf.add(tf.matmul(x, self.weights['h1']), self.biases['b1'])
            layer_1 = tf.nn.relu(layer_1)
            # Hidden layer with RELU activation
            layer_2 = tf.add(tf.matmul(layer_1, self.weights['h2']), self.biases['b2'])
            layer_2 = tf.nn.relu(layer_2)
            # Output layer with linear activation
            out_layer = tf.matmul(layer_2, self.weights['out']) + self.biases['out']
            return out_layer
        
    def ReturnParamsAsList(self):
        
        with self.g.as_default():

            with tf.Session() as sess:
                # Restore variables from disk
                self.saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model"+str(self.index)+".ckpt")                
                return sess.run(self.weightslist), sess.run(self.biaseslist)

        
        
class WeightString:
    
    def __init__(self, w1, b1, w2, b2, numbeads, threshold):
        self.w1 = w1
        self.w2 = w2
        self.b1 = b1
        self.b2 = b2
        #self.w2, self.b2 = m2.params
        self.AllBeads = []

        self.threshold = threshold
        
        self.AllBeads.append([w1,b1])
        
        
        for n in xrange(numbeads):
            ws,bs = model_interpolate(w1,b1,w2,b2, (n + 1.)/(numbeads+1.))
            self.AllBeads.append([ws,bs])
            
        self.AllBeads.append([w2,b2])
        
        
        self.ConvergedList = [False for f in xrange(len(self.AllBeads))]
        self.ConvergedList[0] = True
        self.ConvergedList[-1] = True
    
    
    def SpringNorm(self, order):
        
        total = 0.
        
        #Energy between mobile beads
        for i,b in enumerate(self.AllBeads):
            if i < len(self.AllBeads)-1:
                #print "Tallying energy between bead " + str(i) + " and bead " + str(i+1)
                subtotal = 0.
                for j in xrange(len(b)):
                    subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][0][j],self.AllBeads[i+1][0][j]),ord=order)#/len(self.beads[0][j])
                for j in xrange(len(b)):
                    subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][1][j],self.AllBeads[i+1][1][j]),ord=order)#/len(self.beads[0][j])
                total+=subtotal
        
        return total#/len(self.beads)
        
    
    
    def SGDBead(self, bead, thresh, maxindex):
        
        finalerror = 0.
        
        #thresh = .05

        # Parameters
        learning_rate = 0.01
        training_epochs = 15
        batch_size = 1000
        display_step = 1
        
        curWeights, curBiases = self.AllBeads[bead]
        test_model = multilayer_perceptron(w=curWeights, b=curBiases)

        with test_model.g.as_default():

            x = tf.placeholder("float", [None, n_input])
            y = tf.placeholder("float", [None, n_classes])
            pred = test_model.predict(x)
            cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
            optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
            init = tf.initialize_all_variables()
            stopcond = True

            with tf.Session() as sess:
                sess.run(init)
                xtest = mnist.test.images
                ytest = mnist.test.labels
                
                thiserror = 0.
                j = 0
                while stopcond:
                    for epoch in range(training_epochs):
                        avg_cost = 0.
                        total_batch = int(mnist.train.num_examples/batch_size)
                        if (avg_cost > thresh or avg_cost == 0.) and stopcond:
                        # Loop over all batches
                            for i in range(total_batch):
                                batch_x, batch_y = mnist.train.next_batch(batch_size)
                                # Run optimization op (backprop) and cost op (to get loss value)
                                _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
                                                                              y: batch_y})
                                # Compute average loss
                                avg_cost += c / total_batch
                            # Display logs per epoch step
                            #if epoch % display_step == 0:
                            #    print "Epoch:", '%04d' % (epoch+1), "cost=", \
                            #        "{:.9f}".format(avg_cost)
                            correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                            # Calculate accuracy
                            accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                            #print "Accuracy:", accuracy.eval({x: xtest, y: ytest})
                            thiserror = 1 - accuracy.eval({x: xtest, y: ytest})
                            if thiserror < thresh:
                                stopcond = False
                    #print "Optimization Finished!"

                    # Test model
                    #correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                    #correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                    # Calculate accuracy
                    #accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                    #print "Accuracy:", accuracy.eval({x: xtest, y: ytest})

                    #if (j%5000) == 0:
                    #    print "Error after "+str(j)+" iterations:" + str(accuracy.eval({x: xtest, y: ytest}))

                    finalerror = 1 - accuracy.eval({x: xtest, y: ytest})
                    
                    if finalerror < thresh or stopcond==False:# or j > maxindex:
                        #print "Changing stopcond!"
                        stopcond = False
                        #print "Final params:"
                        test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
                        self.AllBeads[bead]=test_model.params
                        print "Final bead error: " + str(finalerror)
                        
                    j+=1

            return finalerror

In [45]:
#Model generation

for ii in xrange(3):

    '''weights = {
        'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
        'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
        'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
    }
    biases = {
        'b1': tf.Variable(tf.random_normal([n_hidden_1])),
        'b2': tf.Variable(tf.random_normal([n_hidden_2])),
        'out': tf.Variable(tf.random_normal([n_classes]))
    }'''

    # Construct model with different initial weights
    test_model = multilayer_perceptron(ind=ii)
    
    #Construct model with same initial weights
    #test_model = copy.copy(copy_model)
    #test_model.index = ii
    
    
    
    
    #print test_model.weights
    

    
    models.append(test_model)
    with test_model.g.as_default():

        x = tf.placeholder("float", [None, n_input])
        y = tf.placeholder("float", [None, n_classes])
        pred = test_model.predict(x)

        # Define loss and optimizer
        #cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
        cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
        optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)

        # Initializing the variables
        init = tf.initialize_all_variables()


        #remove the comment to get random initialization
        stopcond = True




        with tf.Session() as sess:
            sess.run(init)
            xtest = mnist.test.images
            ytest = mnist.test.labels
            while stopcond:
                #print 'epoch:' + str(e)
                #X = []
                #y = []
                j = 0
                # Training cycle
                for epoch in range(training_epochs):
                    avg_cost = 0.
                    total_batch = int(10000/batch_size)

                    if (avg_cost > thresh or avg_cost == 0.) and stopcond:
                    # Loop over all batches
                        for i in range(total_batch):
                            batch_x, batch_y = mnist.train.next_batch(batch_size)
                            # Run optimization op (backprop) and cost op (to get loss value)
                            _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
                                                                          y: batch_y})
                            # Compute average loss
                            avg_cost += c / total_batch
                        # Display logs per epoch step
                        if epoch % display_step == 0:
                            print "Epoch:", '%04d' % (epoch+1), "cost=", \
                                "{:.9f}".format(avg_cost)
                        
                        correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                        # Calculate accuracy
                        accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                        #print "Accuracy:", accuracy.eval({x: xtest, y: ytest})
                        thiserror = 1 - accuracy.eval({x: xtest, y: ytest})
                        if thiserror < thresh:
                            stopcond = False
                            
                print "Optimization Finished!"

                # Test model
                #correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                # Calculate accuracy
                accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                print "Accuracy:", accuracy.eval({x: xtest, y: ytest})

                if (j%5000) == 0:
                    print "Error after "+str(j)+" iterations:" + str(accuracy.eval({x: xtest, y: ytest}))

                if 1 - accuracy.eval({x: xtest, y: ytest}) < thresh or stopcond == False:
                    #print "Changing stopcond!"
                    stopcond = False
                    print "Final params:"
                    test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
                    save_path = test_model.saver.save(sess,"/home/dfreeman/PythonFun/tmp/model" + str(ii) + ".ckpt")
                j+=1
    #remove the comment to get random initialization

    
    #synapses.append([synapse_0,synapse_1,synapse_2


Epoch: 0001 cost= 569.200964050
Epoch: 0002 cost= 162.350347366
Epoch: 0003 cost= 109.549932747
Epoch: 0004 cost= 83.252990379
Epoch: 0005 cost= 72.425635023
Epoch: 0006 cost= 53.994613857
Epoch: 0007 cost= 51.823056984
Epoch: 0008 cost= 44.821461678
Epoch: 0009 cost= 41.322505980
Epoch: 0010 cost= 38.339859755
Epoch: 0011 cost= 38.245632200
Epoch: 0012 cost= 30.248446031
Epoch: 0013 cost= 27.927715507
Optimization Finished!
Accuracy: 0.9003
Error after 0 iterations:0.9003
Final params:
Epoch: 0001 cost= 462.551841125
Epoch: 0002 cost= 138.978277740
Epoch: 0003 cost= 100.559575577
Epoch: 0004 cost= 74.754091358
Epoch: 0005 cost= 62.825597267
Epoch: 0006 cost= 54.829888697
Epoch: 0007 cost= 49.491547890
Epoch: 0008 cost= 44.055701201
Epoch: 0009 cost= 41.198764448
Epoch: 0010 cost= 33.823042288
Epoch: 0011 cost= 32.589753995
Epoch: 0012 cost= 28.527572842
Optimization Finished!
Accuracy: 0.9028
Error after 0 iterations:0.9028
Final params:
Epoch: 0001 cost= 516.627025299
Epoch: 0002 cost= 140.971016731
Epoch: 0003 cost= 96.274020119
Epoch: 0004 cost= 71.887283592
Epoch: 0005 cost= 63.750078754
Epoch: 0006 cost= 54.013752480
Epoch: 0007 cost= 52.205150316
Epoch: 0008 cost= 48.441035209
Epoch: 0009 cost= 37.454276619
Epoch: 0010 cost= 37.021613417
Epoch: 0011 cost= 32.234993992
Epoch: 0012 cost= 30.552076435
Optimization Finished!
Accuracy: 0.9023
Error after 0 iterations:0.9023
Final params:

In [46]:
#Connected components search


#Used for softening the training criteria.  There's some fuzz required due to the difference in 
#training error between test and training
thresh_multiplier = 1.1


results = []

connecteddict = {}
for i1 in xrange(len(models)):
    connecteddict[i1] = 'not connected'


for i1 in xrange(len(models)):
    print i1
    for i2 in xrange(len(models)):
        
        if i2 > i1 and ((connecteddict[i1] != connecteddict[i2]) or (connecteddict[i1] == 'not connected' or connecteddict[i2] == 'not connected')) :
            #print "slow1?"
            #print i1,i2
            #print models[0]
            #print models[1]
            #print models[0].params
            #print models[1].params
            test = WeightString(models[i1].params[0],models[i1].params[1],models[i2].params[0],models[i2].params[1],1,1)

            training_threshold = thresh

            depth = 0
            d_max = 10

            #Check error between beads
            #Alg: for each bead at depth i, SGD until converged.
            #For beads with max error along path too large, add another bead between them, repeat

            
            #Keeps track of which indices to check the interpbeaderror between
            newindices = [0,1]
            
            while (depth < d_max):
                print newindices
                #print "slow2?"
                #X, y = GenTest(X,y)
                counter = 0

                for i,c in enumerate(test.ConvergedList):
                    if c == False:
                        #print "slow3?"
                        error = test.SGDBead(i, .5*training_threshold, 20)
                        #print "slow4?"
                            #if counter%5000==0:
                            #    print counter
                            #    print error
                        test.ConvergedList[i] = True

                print test.ConvergedList

                interperrors = []
                interp_bead_indices = []
                for b in xrange(len(test.AllBeads)-1):
                    if b in newindices:
                        e = InterpBeadError(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1])

                        interperrors.append(e)
                        interp_bead_indices.append(b)
                print interperrors

                if max([ee[0] for ee in interperrors]) < thresh_multiplier*training_threshold:
                    depth = 2*d_max
                    #print test.ConvergedList
                    #print test.SpringNorm(2)
                    #print "Done!"

                else:
                    del newindices[:]
                    #Interperrors stores the maximum error on the path between beads
                    #shift index to account for added beads
                    shift = 0
                    for i, ie in enumerate(interperrors):
                        if ie[0] > thresh_multiplier*training_threshold:
                            k = interp_bead_indices[i]
                            
                            ws,bs = model_interpolate(test.AllBeads[k+shift][0],test.AllBeads[k+shift][1],\
                                                      test.AllBeads[k+shift+1][0],test.AllBeads[k+shift+1][1],\
                                                      ie[1]/100.)
                            
                            test.AllBeads.insert(k+shift+1,[ws,bs])
                            test.ConvergedList.insert(k+shift+1, False)
                            newindices.append(k+shift+1)
                            newindices.append(k+shift)
                            shift+=1
                            #print test.ConvergedList
                            #print test.SpringNorm(2)


                    #print d_max
                    depth += 1
            if depth == 2*d_max:
                results.append([i1,i2,test.SpringNorm(2),"Connected"])
                if connecteddict[i1] == 'not connected' and connecteddict[i2] == 'not connected':
                    connecteddict[i1] = i1
                    connecteddict[i2] = i1

                if connecteddict[i1] == 'not connected':
                    connecteddict[i1] = connecteddict[i2]
                else:
                    if connecteddict[i2] == 'not connected':
                        connecteddict[i2] = connecteddict[i1]
                    else:
                        if connecteddict[i1] != 'not connected' and connecteddict[i2] != 'not connected':
                            hold = connecteddict[i2]
                            connecteddict[i2] = connecteddict[i1]
                            for h in xrange(len(models)):
                                if connecteddict[h] == hold:
                                    connecteddict[h] = connecteddict[i1]
                    
            else:
                results.append([i1,i2,test.SpringNorm(2),"Disconnected"])
            #print results[-1]
	
	
	

uniquecomps = []
totalcomps = 0
for i in xrange(len(models)):
    if not (connecteddict[i] in uniquecomps):
        uniquecomps.append(connecteddict[i])
    
    if connecteddict[i] == 'not connected':
        totalcomps += 1
        
    #print i,connecteddict[i]

notconoffset = 0

if 'not connected' in uniquecomps:
    notconoffset = -1
    
print "Thresh: " + str(thresh)
print "Comps: " + str(len(uniquecomps) + notconoffset + totalcomps)



#for i in xrange(len(synapses)):
#    print connecteddict[i]

connsum = []
for r in results:
    if r[3] == "Connected":
        connsum.append(r[2])
        #print r[2]
        
print "***"
print np.average(connsum)
print np.std(connsum)


0
[0, 1]
Final bead error: 0.0486000180244
[True, True, True]
Accuracy: 0.139999985695 	0
Accuracy: 0.139999985695 	1
Accuracy: 0.139999985695 	2
Accuracy: 0.139999985695 	3
Accuracy: 0.139999985695 	4
Accuracy: 0.139999985695 	5
Accuracy: 0.129999995232 	6
Accuracy: 0.129999995232 	7
Accuracy: 0.129999995232 	8
Accuracy: 0.129999995232 	9
Accuracy: 0.129999995232 	10
Accuracy: 0.139999985695 	11
Accuracy: 0.139999985695 	12
Accuracy: 0.149999976158 	13
Accuracy: 0.149999976158 	14
Accuracy: 0.149999976158 	15
Accuracy: 0.149999976158 	16
Accuracy: 0.149999976158 	17
Accuracy: 0.149999976158 	18
Accuracy: 0.149999976158 	19
Accuracy: 0.149999976158 	20
Accuracy: 0.149999976158 	21
Accuracy: 0.149999976158 	22
Accuracy: 0.149999976158 	23
Accuracy: 0.160000026226 	24
Accuracy: 0.160000026226 	25
Accuracy: 0.180000007153 	26
Accuracy: 0.180000007153 	27
Accuracy: 0.170000016689 	28
Accuracy: 0.180000007153 	29
Accuracy: 0.189999997616 	30
Accuracy: 0.189999997616 	31
Accuracy: 0.180000007153 	32
Accuracy: 0.180000007153 	33
Accuracy: 0.180000007153 	34
Accuracy: 0.180000007153 	35
Accuracy: 0.180000007153 	36
Accuracy: 0.180000007153 	37
Accuracy: 0.180000007153 	38
Accuracy: 0.170000016689 	39
Accuracy: 0.180000007153 	40
Accuracy: 0.180000007153 	41
Accuracy: 0.189999997616 	42
Accuracy: 0.180000007153 	43
Accuracy: 0.180000007153 	44
Accuracy: 0.180000007153 	45
Accuracy: 0.180000007153 	46
Accuracy: 0.180000007153 	47
Accuracy: 0.189999997616 	48
Accuracy: 0.189999997616 	49
Accuracy: 0.189999997616 	50
Accuracy: 0.189999997616 	51
Accuracy: 0.189999997616 	52
Accuracy: 0.189999997616 	53
Accuracy: 0.180000007153 	54
Accuracy: 0.180000007153 	55
Accuracy: 0.180000007153 	56
Accuracy: 0.180000007153 	57
Accuracy: 0.180000007153 	58
Accuracy: 0.189999997616 	59
Accuracy: 0.180000007153 	60
Accuracy: 0.170000016689 	61
Accuracy: 0.170000016689 	62
Accuracy: 0.160000026226 	63
Accuracy: 0.160000026226 	64
Accuracy: 0.160000026226 	65
Accuracy: 0.149999976158 	66
Accuracy: 0.149999976158 	67
Accuracy: 0.139999985695 	68
Accuracy: 0.120000004768 	69
Accuracy: 0.120000004768 	70
Accuracy: 0.120000004768 	71
Accuracy: 0.120000004768 	72
Accuracy: 0.120000004768 	73
Accuracy: 0.110000014305 	74
Accuracy: 0.110000014305 	75
Accuracy: 0.100000023842 	76
Accuracy: 0.100000023842 	77
Accuracy: 0.100000023842 	78
Accuracy: 0.0799999833107 	79
Accuracy: 0.0799999833107 	80
Accuracy: 0.0799999833107 	81
Accuracy: 0.0799999833107 	82
Accuracy: 0.0799999833107 	83
Accuracy: 0.0699999928474 	84
Accuracy: 0.0699999928474 	85
Accuracy: 0.0600000023842 	86
Accuracy: 0.0500000119209 	87
Accuracy: 0.0500000119209 	88
Accuracy: 0.0500000119209 	89
Accuracy: 0.0400000214577 	90
Accuracy: 0.0400000214577 	91
Accuracy: 0.0299999713898 	92
Accuracy: 0.0199999809265 	93
Accuracy: 0.0199999809265 	94
Accuracy: 0.0199999809265 	95
Accuracy: 0.0199999809265 	96
Accuracy: 0.0199999809265 	97
Accuracy: 0.00999999046326 	98
Accuracy: 0.00999999046326 	99
Accuracy: 0.0199999809265 	0
Accuracy: 0.0199999809265 	1
Accuracy: 0.0199999809265 	2
Accuracy: 0.0199999809265 	3
Accuracy: 0.0199999809265 	4
Accuracy: 0.0199999809265 	5
Accuracy: 0.00999999046326 	6
Accuracy: 0.00999999046326 	7
Accuracy: 0.00999999046326 	8
Accuracy: 0.00999999046326 	9
Accuracy: 0.0199999809265 	10
Accuracy: 0.0199999809265 	11
Accuracy: 0.0199999809265 	12
Accuracy: 0.0299999713898 	13
Accuracy: 0.0299999713898 	14
Accuracy: 0.0400000214577 	15
Accuracy: 0.0400000214577 	16
Accuracy: 0.0400000214577 	17
Accuracy: 0.0500000119209 	18
Accuracy: 0.0500000119209 	19
Accuracy: 0.0500000119209 	20
Accuracy: 0.0699999928474 	21
Accuracy: 0.0699999928474 	22
Accuracy: 0.0699999928474 	23
Accuracy: 0.0699999928474 	24
Accuracy: 0.0699999928474 	25
Accuracy: 0.0699999928474 	26
Accuracy: 0.0699999928474 	27
Accuracy: 0.0799999833107 	28
Accuracy: 0.0799999833107 	29
Accuracy: 0.0799999833107 	30
Accuracy: 0.089999973774 	31
Accuracy: 0.089999973774 	32
Accuracy: 0.100000023842 	33
Accuracy: 0.100000023842 	34
Accuracy: 0.110000014305 	35
Accuracy: 0.120000004768 	36
Accuracy: 0.129999995232 	37
Accuracy: 0.139999985695 	38
Accuracy: 0.149999976158 	39
Accuracy: 0.149999976158 	40
Accuracy: 0.149999976158 	41
Accuracy: 0.160000026226 	42
Accuracy: 0.160000026226 	43
Accuracy: 0.170000016689 	44
Accuracy: 0.149999976158 	45
Accuracy: 0.149999976158 	46
Accuracy: 0.139999985695 	47
Accuracy: 0.139999985695 	48
Accuracy: 0.139999985695 	49
Accuracy: 0.139999985695 	50
Accuracy: 0.129999995232 	51
Accuracy: 0.120000004768 	52
Accuracy: 0.120000004768 	53
Accuracy: 0.120000004768 	54
Accuracy: 0.129999995232 	55
Accuracy: 0.129999995232 	56
Accuracy: 0.129999995232 	57
Accuracy: 0.139999985695 	58
Accuracy: 0.139999985695 	59
Accuracy: 0.139999985695 	60
Accuracy: 0.139999985695 	61
Accuracy: 0.139999985695 	62
Accuracy: 0.139999985695 	63
Accuracy: 0.129999995232 	64
Accuracy: 0.129999995232 	65
Accuracy: 0.129999995232 	66
Accuracy: 0.120000004768 	67
Accuracy: 0.110000014305 	68
Accuracy: 0.110000014305 	69
Accuracy: 0.100000023842 	70
Accuracy: 0.100000023842 	71
Accuracy: 0.089999973774 	72
Accuracy: 0.0799999833107 	73
Accuracy: 0.0799999833107 	74
Accuracy: 0.0799999833107 	75
Accuracy: 0.0799999833107 	76
Accuracy: 0.089999973774 	77
Accuracy: 0.100000023842 	78
Accuracy: 0.100000023842 	79
Accuracy: 0.100000023842 	80
Accuracy: 0.100000023842 	81
Accuracy: 0.089999973774 	82
Accuracy: 0.0699999928474 	83
Accuracy: 0.0699999928474 	84
Accuracy: 0.0699999928474 	85
Accuracy: 0.0799999833107 	86
Accuracy: 0.0799999833107 	87
Accuracy: 0.0799999833107 	88
Accuracy: 0.0699999928474 	89
Accuracy: 0.0699999928474 	90
Accuracy: 0.0699999928474 	91
Accuracy: 0.0799999833107 	92
Accuracy: 0.0799999833107 	93
Accuracy: 0.0799999833107 	94
Accuracy: 0.0699999928474 	95
Accuracy: 0.0699999928474 	96
Accuracy: 0.0699999928474 	97
Accuracy: 0.0799999833107 	98
Accuracy: 0.0799999833107 	99
[(0.18999999761581421, 30), (0.17000001668930054, 44)]
[1, 0, 3, 2]
Final bead error: 0.049399971962
Final bead error: 0.0475000143051
[True, True, True, True, True]
Accuracy: 0.0799999833107 	0
Accuracy: 0.0799999833107 	1
Accuracy: 0.0799999833107 	2
Accuracy: 0.0699999928474 	3
Accuracy: 0.0699999928474 	4
Accuracy: 0.0699999928474 	5
Accuracy: 0.0699999928474 	6
Accuracy: 0.0699999928474 	7
Accuracy: 0.0699999928474 	8
Accuracy: 0.0699999928474 	9
Accuracy: 0.0699999928474 	10
Accuracy: 0.0699999928474 	11
Accuracy: 0.0699999928474 	12
Accuracy: 0.0699999928474 	13
Accuracy: 0.0699999928474 	14
Accuracy: 0.0699999928474 	15
Accuracy: 0.0699999928474 	16
Accuracy: 0.0699999928474 	17
Accuracy: 0.0699999928474 	18
Accuracy: 0.0699999928474 	19
Accuracy: 0.0699999928474 	20
Accuracy: 0.0600000023842 	21
Accuracy: 0.0600000023842 	22
Accuracy: 0.0600000023842 	23
Accuracy: 0.0600000023842 	24
Accuracy: 0.0699999928474 	25
Accuracy: 0.0699999928474 	26
Accuracy: 0.0600000023842 	27
Accuracy: 0.0600000023842 	28
Accuracy: 0.0600000023842 	29
Accuracy: 0.0600000023842 	30
Accuracy: 0.0600000023842 	31
Accuracy: 0.0600000023842 	32
Accuracy: 0.0600000023842 	33
Accuracy: 0.0600000023842 	34
Accuracy: 0.0600000023842 	35
Accuracy: 0.0600000023842 	36
Accuracy: 0.0600000023842 	37
Accuracy: 0.0600000023842 	38
Accuracy: 0.0500000119209 	39
Accuracy: 0.0500000119209 	40
Accuracy: 0.0500000119209 	41
Accuracy: 0.0500000119209 	42
Accuracy: 0.0500000119209 	43
Accuracy: 0.0400000214577 	44
Accuracy: 0.0400000214577 	45
Accuracy: 0.0400000214577 	46
Accuracy: 0.0400000214577 	47
Accuracy: 0.0400000214577 	48
Accuracy: 0.0400000214577 	49
Accuracy: 0.0400000214577 	50
Accuracy: 0.0400000214577 	51
Accuracy: 0.0400000214577 	52
Accuracy: 0.0400000214577 	53
Accuracy: 0.0400000214577 	54
Accuracy: 0.0400000214577 	55
Accuracy: 0.0400000214577 	56
Accuracy: 0.0400000214577 	57
Accuracy: 0.0299999713898 	58
Accuracy: 0.0299999713898 	59
Accuracy: 0.0299999713898 	60
Accuracy: 0.0299999713898 	61
Accuracy: 0.0299999713898 	62
Accuracy: 0.0299999713898 	63
Accuracy: 0.0299999713898 	64
Accuracy: 0.0299999713898 	65
Accuracy: 0.0299999713898 	66
Accuracy: 0.0199999809265 	67
Accuracy: 0.0199999809265 	68
Accuracy: 0.0199999809265 	69
Accuracy: 0.0199999809265 	70
Accuracy: 0.0199999809265 	71
Accuracy: 0.0199999809265 	72
Accuracy: 0.0199999809265 	73
Accuracy: 0.00999999046326 	74
Accuracy: 0.00999999046326 	75
Accuracy: 0.00999999046326 	76
Accuracy: 0.00999999046326 	77
Accuracy: 0.00999999046326 	78
Accuracy: 0.00999999046326 	79
Accuracy: 0.00999999046326 	80
Accuracy: 0.0 	81
Accuracy: 0.0 	82
Accuracy: 0.0 	83
Accuracy: 0.0 	84
Accuracy: 0.0 	85
Accuracy: 0.0 	86
Accuracy: 0.0 	87
Accuracy: 0.0 	88
Accuracy: 0.0 	89
Accuracy: 0.0 	90
Accuracy: 0.0 	91
Accuracy: 0.0 	92
Accuracy: 0.0 	93
Accuracy: 0.0 	94
Accuracy: 0.0 	95
Accuracy: 0.0 	96
Accuracy: 0.0 	97
Accuracy: 0.0 	98
Accuracy: 0.0 	99
Accuracy: 0.0 	0
Accuracy: 0.0 	1
Accuracy: 0.0 	2
Accuracy: 0.0 	3
Accuracy: 0.0 	4
Accuracy: 0.0 	5
Accuracy: 0.0 	6
Accuracy: 0.0 	7
Accuracy: 0.0 	8
Accuracy: 0.0 	9
Accuracy: 0.0 	10
Accuracy: 0.0 	11
Accuracy: 0.0 	12
Accuracy: 0.00999999046326 	13
Accuracy: 0.00999999046326 	14
Accuracy: 0.00999999046326 	15
Accuracy: 0.00999999046326 	16
Accuracy: 0.00999999046326 	17
Accuracy: 0.00999999046326 	18
Accuracy: 0.00999999046326 	19
Accuracy: 0.0199999809265 	20
Accuracy: 0.0199999809265 	21
Accuracy: 0.0199999809265 	22
Accuracy: 0.0199999809265 	23
Accuracy: 0.0199999809265 	24
Accuracy: 0.0199999809265 	25
Accuracy: 0.0199999809265 	26
Accuracy: 0.0199999809265 	27
Accuracy: 0.0199999809265 	28
Accuracy: 0.0199999809265 	29
Accuracy: 0.0199999809265 	30
Accuracy: 0.0199999809265 	31
Accuracy: 0.0199999809265 	32
Accuracy: 0.0199999809265 	33
Accuracy: 0.0199999809265 	34
Accuracy: 0.0199999809265 	35
Accuracy: 0.0299999713898 	36
Accuracy: 0.0299999713898 	37
Accuracy: 0.0299999713898 	38
Accuracy: 0.0299999713898 	39
Accuracy: 0.0400000214577 	40
Accuracy: 0.0400000214577 	41
Accuracy: 0.0500000119209 	42
Accuracy: 0.0500000119209 	43
Accuracy: 0.0500000119209 	44
Accuracy: 0.0500000119209 	45
Accuracy: 0.0500000119209 	46
Accuracy: 0.0500000119209 	47
Accuracy: 0.0500000119209 	48
Accuracy: 0.0500000119209 	49
Accuracy: 0.0400000214577 	50
Accuracy: 0.0400000214577 	51
Accuracy: 0.0400000214577 	52
Accuracy: 0.0400000214577 	53
Accuracy: 0.0400000214577 	54
Accuracy: 0.0400000214577 	55
Accuracy: 0.0400000214577 	56
Accuracy: 0.0400000214577 	57
Accuracy: 0.0400000214577 	58
Accuracy: 0.0400000214577 	59
Accuracy: 0.0400000214577 	60
Accuracy: 0.0400000214577 	61
Accuracy: 0.0500000119209 	62
Accuracy: 0.0400000214577 	63
Accuracy: 0.0400000214577 	64
Accuracy: 0.0400000214577 	65
Accuracy: 0.0400000214577 	66
Accuracy: 0.0400000214577 	67
Accuracy: 0.0400000214577 	68
Accuracy: 0.0400000214577 	69
Accuracy: 0.0400000214577 	70
Accuracy: 0.0400000214577 	71
Accuracy: 0.0400000214577 	72
Accuracy: 0.0400000214577 	73
Accuracy: 0.0400000214577 	74
Accuracy: 0.0299999713898 	75
Accuracy: 0.0199999809265 	76
Accuracy: 0.00999999046326 	77
Accuracy: 0.00999999046326 	78
Accuracy: 0.00999999046326 	79
Accuracy: 0.00999999046326 	80
Accuracy: 0.00999999046326 	81
Accuracy: 0.00999999046326 	82
Accuracy: 0.00999999046326 	83
Accuracy: 0.00999999046326 	84
Accuracy: 0.00999999046326 	85
Accuracy: 0.00999999046326 	86
Accuracy: 0.0 	87
Accuracy: 0.0 	88
Accuracy: 0.0 	89
Accuracy: 0.0 	90
Accuracy: 0.0 	91
Accuracy: 0.0 	92
Accuracy: 0.0 	93
Accuracy: 0.0 	94
Accuracy: 0.0 	95
Accuracy: 0.0 	96
Accuracy: 0.0 	97
Accuracy: 0.0 	98
Accuracy: 0.00999999046326 	99
Accuracy: 0.0 	0
Accuracy: 0.0 	1
Accuracy: 0.0 	2
Accuracy: 0.0 	3
Accuracy: 0.0 	4
Accuracy: 0.0 	5
Accuracy: 0.0 	6
Accuracy: 0.0 	7
Accuracy: 0.0 	8
Accuracy: 0.0 	9
Accuracy: 0.0 	10
Accuracy: 0.0 	11
Accuracy: 0.0 	12
Accuracy: 0.0 	13
Accuracy: 0.0 	14
Accuracy: 0.0 	15
Accuracy: 0.0 	16
Accuracy: 0.0 	17
Accuracy: 0.0 	18
Accuracy: 0.0 	19
Accuracy: 0.0 	20
Accuracy: 0.0 	21
Accuracy: 0.0 	22
Accuracy: 0.0 	23
Accuracy: 0.0 	24
Accuracy: 0.0 	25
Accuracy: 0.0 	26
Accuracy: 0.0 	27
Accuracy: 0.0 	28
Accuracy: 0.0 	29
Accuracy: 0.0 	30
Accuracy: 0.0 	31
Accuracy: 0.0 	32
Accuracy: 0.0 	33
Accuracy: 0.0 	34
Accuracy: 0.0 	35
Accuracy: 0.0 	36
Accuracy: 0.0 	37
Accuracy: 0.0 	38
Accuracy: 0.0 	39
Accuracy: 0.0 	40
Accuracy: 0.0 	41
Accuracy: 0.0 	42
Accuracy: 0.0 	43
Accuracy: 0.0 	44
Accuracy: 0.0 	45
Accuracy: 0.0 	46
Accuracy: 0.0 	47
Accuracy: 0.0 	48
Accuracy: 0.0 	49
Accuracy: 0.0 	50
Accuracy: 0.0 	51
Accuracy: 0.0 	52
Accuracy: 0.0 	53
Accuracy: 0.0 	54
Accuracy: 0.0 	55
Accuracy: 0.0 	56
Accuracy: 0.0 	57
Accuracy: 0.0 	58
Accuracy: 0.0 	59
Accuracy: 0.0 	60
Accuracy: 0.0 	61
Accuracy: 0.0 	62
Accuracy: 0.0 	63
Accuracy: 0.0 	64
Accuracy: 0.0 	65
Accuracy: 0.0 	66
Accuracy: 0.0 	67
Accuracy: 0.0 	68
Accuracy: 0.0 	69
Accuracy: 0.0 	70
Accuracy: 0.0 	71
Accuracy: 0.0 	72
Accuracy: 0.0 	73
Accuracy: 0.0 	74
Accuracy: 0.0 	75
Accuracy: 0.0 	76
Accuracy: 0.0 	77
Accuracy: 0.0 	78
Accuracy: 0.0 	79
Accuracy: 0.0 	80
Accuracy: 0.0 	81
Accuracy: 0.0 	82
Accuracy: 0.0 	83
Accuracy: 0.0 	84
Accuracy: 0.0 	85
Accuracy: 0.0 	86
Accuracy: 0.00999999046326 	87
Accuracy: 0.00999999046326 	88
Accuracy: 0.0199999809265 	89
Accuracy: 0.0199999809265 	90
Accuracy: 0.0199999809265 	91
Accuracy: 0.0199999809265 	92
Accuracy: 0.0199999809265 	93
Accuracy: 0.0199999809265 	94
Accuracy: 0.0199999809265 	95
Accuracy: 0.0199999809265 	96
Accuracy: 0.0199999809265 	97
Accuracy: 0.0199999809265 	98
Accuracy: 0.0199999809265 	99
Accuracy: 0.00999999046326 	0
Accuracy: 0.00999999046326 	1
Accuracy: 0.0199999809265 	2
Accuracy: 0.0199999809265 	3
Accuracy: 0.0199999809265 	4
Accuracy: 0.00999999046326 	5
Accuracy: 0.00999999046326 	6
Accuracy: 0.00999999046326 	7
Accuracy: 0.00999999046326 	8
Accuracy: 0.00999999046326 	9
Accuracy: 0.0199999809265 	10
Accuracy: 0.0299999713898 	11
Accuracy: 0.0299999713898 	12
Accuracy: 0.0299999713898 	13
Accuracy: 0.0299999713898 	14
Accuracy: 0.0299999713898 	15
Accuracy: 0.0299999713898 	16
Accuracy: 0.0299999713898 	17
Accuracy: 0.0400000214577 	18
Accuracy: 0.0500000119209 	19
Accuracy: 0.0500000119209 	20
Accuracy: 0.0500000119209 	21
Accuracy: 0.0500000119209 	22
Accuracy: 0.0500000119209 	23
Accuracy: 0.0500000119209 	24
Accuracy: 0.0500000119209 	25
Accuracy: 0.0500000119209 	26
Accuracy: 0.0500000119209 	27
Accuracy: 0.0500000119209 	28
Accuracy: 0.0500000119209 	29
Accuracy: 0.0500000119209 	30
Accuracy: 0.0500000119209 	31
Accuracy: 0.0500000119209 	32
Accuracy: 0.0500000119209 	33
Accuracy: 0.0600000023842 	34
Accuracy: 0.0600000023842 	35
Accuracy: 0.0600000023842 	36
Accuracy: 0.0600000023842 	37
Accuracy: 0.0600000023842 	38
Accuracy: 0.0699999928474 	39
Accuracy: 0.0799999833107 	40
Accuracy: 0.0799999833107 	41
Accuracy: 0.0799999833107 	42
Accuracy: 0.0799999833107 	43
Accuracy: 0.0799999833107 	44
Accuracy: 0.0799999833107 	45
Accuracy: 0.0799999833107 	46
Accuracy: 0.0799999833107 	47
Accuracy: 0.0799999833107 	48
Accuracy: 0.0799999833107 	49
Accuracy: 0.089999973774 	50
Accuracy: 0.089999973774 	51
Accuracy: 0.089999973774 	52
Accuracy: 0.089999973774 	53
Accuracy: 0.089999973774 	54
Accuracy: 0.089999973774 	55
Accuracy: 0.089999973774 	56
Accuracy: 0.089999973774 	57
Accuracy: 0.089999973774 	58
Accuracy: 0.089999973774 	59
Accuracy: 0.089999973774 	60
Accuracy: 0.089999973774 	61
Accuracy: 0.089999973774 	62
Accuracy: 0.089999973774 	63
Accuracy: 0.089999973774 	64
Accuracy: 0.089999973774 	65
Accuracy: 0.089999973774 	66
Accuracy: 0.089999973774 	67
Accuracy: 0.089999973774 	68
Accuracy: 0.089999973774 	69
Accuracy: 0.089999973774 	70
Accuracy: 0.089999973774 	71
Accuracy: 0.089999973774 	72
Accuracy: 0.089999973774 	73
Accuracy: 0.089999973774 	74
Accuracy: 0.089999973774 	75
Accuracy: 0.089999973774 	76
Accuracy: 0.089999973774 	77
Accuracy: 0.089999973774 	78
Accuracy: 0.089999973774 	79
Accuracy: 0.089999973774 	80
Accuracy: 0.089999973774 	81
Accuracy: 0.089999973774 	82
Accuracy: 0.089999973774 	83
Accuracy: 0.089999973774 	84
Accuracy: 0.089999973774 	85
Accuracy: 0.089999973774 	86
Accuracy: 0.0799999833107 	87
Accuracy: 0.0799999833107 	88
Accuracy: 0.0799999833107 	89
Accuracy: 0.0799999833107 	90
Accuracy: 0.0799999833107 	91
Accuracy: 0.089999973774 	92
Accuracy: 0.089999973774 	93
Accuracy: 0.089999973774 	94
Accuracy: 0.089999973774 	95
Accuracy: 0.089999973774 	96
Accuracy: 0.089999973774 	97
Accuracy: 0.089999973774 	98
Accuracy: 0.100000023842 	99
[(0.079999983310699463, 0), (0.050000011920928955, 42), (0.019999980926513672, 89), (0.10000002384185791, 99)]
[0, 1]
Final bead error: 0.0468000173569
[True, True, True]
Accuracy: 0.120000004768 	0
Accuracy: 0.120000004768 	1
Accuracy: 0.120000004768 	2
Accuracy: 0.120000004768 	3
Accuracy: 0.129999995232 	4
Accuracy: 0.129999995232 	5
Accuracy: 0.129999995232 	6
Accuracy: 0.129999995232 	7
Accuracy: 0.129999995232 	8
Accuracy: 0.129999995232 	9
Accuracy: 0.129999995232 	10
Accuracy: 0.129999995232 	11
Accuracy: 0.129999995232 	12
Accuracy: 0.129999995232 	13
Accuracy: 0.129999995232 	14
Accuracy: 0.129999995232 	15
Accuracy: 0.129999995232 	16
Accuracy: 0.129999995232 	17
Accuracy: 0.129999995232 	18
Accuracy: 0.129999995232 	19
Accuracy: 0.129999995232 	20
Accuracy: 0.129999995232 	21
Accuracy: 0.129999995232 	22
Accuracy: 0.139999985695 	23
Accuracy: 0.139999985695 	24
Accuracy: 0.139999985695 	25
Accuracy: 0.139999985695 	26
Accuracy: 0.139999985695 	27
Accuracy: 0.139999985695 	28
Accuracy: 0.139999985695 	29
Accuracy: 0.139999985695 	30
Accuracy: 0.139999985695 	31
Accuracy: 0.139999985695 	32
Accuracy: 0.139999985695 	33
Accuracy: 0.149999976158 	34
Accuracy: 0.160000026226 	35
Accuracy: 0.160000026226 	36
Accuracy: 0.160000026226 	37
Accuracy: 0.170000016689 	38
Accuracy: 0.170000016689 	39
Accuracy: 0.170000016689 	40
Accuracy: 0.170000016689 	41
Accuracy: 0.170000016689 	42
Accuracy: 0.170000016689 	43
Accuracy: 0.170000016689 	44
Accuracy: 0.180000007153 	45
Accuracy: 0.170000016689 	46
Accuracy: 0.170000016689 	47
Accuracy: 0.160000026226 	48
Accuracy: 0.149999976158 	49
Accuracy: 0.149999976158 	50
Accuracy: 0.149999976158 	51
Accuracy: 0.149999976158 	52
Accuracy: 0.149999976158 	53
Accuracy: 0.149999976158 	54
Accuracy: 0.149999976158 	55
Accuracy: 0.149999976158 	56
Accuracy: 0.149999976158 	57
Accuracy: 0.149999976158 	58
Accuracy: 0.149999976158 	59
Accuracy: 0.149999976158 	60
Accuracy: 0.149999976158 	61
Accuracy: 0.149999976158 	62
Accuracy: 0.149999976158 	63
Accuracy: 0.139999985695 	64
Accuracy: 0.129999995232 	65
Accuracy: 0.120000004768 	66
Accuracy: 0.110000014305 	67
Accuracy: 0.110000014305 	68
Accuracy: 0.110000014305 	69
Accuracy: 0.110000014305 	70
Accuracy: 0.110000014305 	71
Accuracy: 0.110000014305 	72
Accuracy: 0.120000004768 	73
Accuracy: 0.120000004768 	74
Accuracy: 0.120000004768 	75
Accuracy: 0.120000004768 	76
Accuracy: 0.120000004768 	77
Accuracy: 0.120000004768 	78
Accuracy: 0.120000004768 	79
Accuracy: 0.089999973774 	80
Accuracy: 0.0799999833107 	81
Accuracy: 0.0799999833107 	82
Accuracy: 0.0799999833107 	83
Accuracy: 0.0799999833107 	84
Accuracy: 0.0600000023842 	85
Accuracy: 0.0600000023842 	86
Accuracy: 0.0600000023842 	87
Accuracy: 0.0600000023842 	88
Accuracy: 0.0600000023842 	89
Accuracy: 0.0600000023842 	90
Accuracy: 0.0600000023842 	91
Accuracy: 0.0500000119209 	92
Accuracy: 0.0500000119209 	93
Accuracy: 0.0500000119209 	94
Accuracy: 0.0400000214577 	95
Accuracy: 0.0299999713898 	96
Accuracy: 0.0199999809265 	97
Accuracy: 0.0199999809265 	98
Accuracy: 0.0199999809265 	99
Accuracy: 0.00999999046326 	0
Accuracy: 0.00999999046326 	1
Accuracy: 0.00999999046326 	2
Accuracy: 0.00999999046326 	3
Accuracy: 0.00999999046326 	4
Accuracy: 0.00999999046326 	5
Accuracy: 0.00999999046326 	6
Accuracy: 0.00999999046326 	7
Accuracy: 0.00999999046326 	8
Accuracy: 0.00999999046326 	9
Accuracy: 0.0199999809265 	10
Accuracy: 0.0199999809265 	11
Accuracy: 0.0199999809265 	12
Accuracy: 0.0199999809265 	13
Accuracy: 0.0199999809265 	14
Accuracy: 0.0199999809265 	15
Accuracy: 0.0199999809265 	16
Accuracy: 0.0199999809265 	17
Accuracy: 0.0199999809265 	18
Accuracy: 0.0400000214577 	19
Accuracy: 0.0400000214577 	20
Accuracy: 0.0400000214577 	21
Accuracy: 0.0400000214577 	22
Accuracy: 0.0400000214577 	23
Accuracy: 0.0400000214577 	24
Accuracy: 0.0500000119209 	25
Accuracy: 0.0600000023842 	26
Accuracy: 0.0699999928474 	27
Accuracy: 0.089999973774 	28
Accuracy: 0.089999973774 	29
Accuracy: 0.089999973774 	30
Accuracy: 0.089999973774 	31
Accuracy: 0.110000014305 	32
Accuracy: 0.120000004768 	33
Accuracy: 0.129999995232 	34
Accuracy: 0.129999995232 	35
Accuracy: 0.129999995232 	36
Accuracy: 0.129999995232 	37
Accuracy: 0.129999995232 	38
Accuracy: 0.129999995232 	39
Accuracy: 0.129999995232 	40
Accuracy: 0.129999995232 	41
Accuracy: 0.129999995232 	42
Accuracy: 0.129999995232 	43
Accuracy: 0.120000004768 	44
Accuracy: 0.120000004768 	45
Accuracy: 0.120000004768 	46
Accuracy: 0.120000004768 	47
Accuracy: 0.120000004768 	48
Accuracy: 0.120000004768 	49
Accuracy: 0.120000004768 	50
Accuracy: 0.120000004768 	51
Accuracy: 0.110000014305 	52
Accuracy: 0.110000014305 	53
Accuracy: 0.110000014305 	54
Accuracy: 0.120000004768 	55
Accuracy: 0.110000014305 	56
Accuracy: 0.100000023842 	57
Accuracy: 0.100000023842 	58
Accuracy: 0.100000023842 	59
Accuracy: 0.100000023842 	60
Accuracy: 0.089999973774 	61
Accuracy: 0.089999973774 	62
Accuracy: 0.089999973774 	63
Accuracy: 0.089999973774 	64
Accuracy: 0.089999973774 	65
Accuracy: 0.089999973774 	66
Accuracy: 0.089999973774 	67
Accuracy: 0.089999973774 	68
Accuracy: 0.0799999833107 	69
Accuracy: 0.0799999833107 	70
Accuracy: 0.0799999833107 	71
Accuracy: 0.0799999833107 	72
Accuracy: 0.0799999833107 	73
Accuracy: 0.0799999833107 	74
Accuracy: 0.0799999833107 	75
Accuracy: 0.0799999833107 	76
Accuracy: 0.0799999833107 	77
Accuracy: 0.0799999833107 	78
Accuracy: 0.089999973774 	79
Accuracy: 0.089999973774 	80
Accuracy: 0.0799999833107 	81
Accuracy: 0.0799999833107 	82
Accuracy: 0.0799999833107 	83
Accuracy: 0.089999973774 	84
Accuracy: 0.089999973774 	85
Accuracy: 0.0799999833107 	86
Accuracy: 0.0799999833107 	87
Accuracy: 0.0799999833107 	88
Accuracy: 0.0799999833107 	89
Accuracy: 0.0799999833107 	90
Accuracy: 0.0799999833107 	91
Accuracy: 0.0799999833107 	92
Accuracy: 0.0799999833107 	93
Accuracy: 0.089999973774 	94
Accuracy: 0.089999973774 	95
Accuracy: 0.089999973774 	96
Accuracy: 0.0799999833107 	97
Accuracy: 0.0799999833107 	98
Accuracy: 0.0799999833107 	99
[(0.18000000715255737, 45), (0.12999999523162842, 34)]
[1, 0, 3, 2]
Final bead error: 0.0460000038147
Final bead error: 0.043799996376
[True, True, True, True, True]
Accuracy: 0.0600000023842 	0
Accuracy: 0.0600000023842 	1
Accuracy: 0.0600000023842 	2
Accuracy: 0.0600000023842 	3
Accuracy: 0.0500000119209 	4
Accuracy: 0.0500000119209 	5
Accuracy: 0.0500000119209 	6
Accuracy: 0.0500000119209 	7
Accuracy: 0.0600000023842 	8
Accuracy: 0.0600000023842 	9
Accuracy: 0.0600000023842 	10
Accuracy: 0.0600000023842 	11
Accuracy: 0.0600000023842 	12
Accuracy: 0.0600000023842 	13
Accuracy: 0.0600000023842 	14
Accuracy: 0.0600000023842 	15
Accuracy: 0.0500000119209 	16
Accuracy: 0.0500000119209 	17
Accuracy: 0.0500000119209 	18
Accuracy: 0.0500000119209 	19
Accuracy: 0.0500000119209 	20
Accuracy: 0.0500000119209 	21
Accuracy: 0.0500000119209 	22
Accuracy: 0.0500000119209 	23
Accuracy: 0.0500000119209 	24
Accuracy: 0.0600000023842 	25
Accuracy: 0.0600000023842 	26
Accuracy: 0.0500000119209 	27
Accuracy: 0.0500000119209 	28
Accuracy: 0.0500000119209 	29
Accuracy: 0.0500000119209 	30
Accuracy: 0.0600000023842 	31
Accuracy: 0.0600000023842 	32
Accuracy: 0.0699999928474 	33
Accuracy: 0.0699999928474 	34
Accuracy: 0.0699999928474 	35
Accuracy: 0.0600000023842 	36
Accuracy: 0.0600000023842 	37
Accuracy: 0.0600000023842 	38
Accuracy: 0.0600000023842 	39
Accuracy: 0.0600000023842 	40
Accuracy: 0.0600000023842 	41
Accuracy: 0.0600000023842 	42
Accuracy: 0.0600000023842 	43
Accuracy: 0.0600000023842 	44
Accuracy: 0.0600000023842 	45
Accuracy: 0.0600000023842 	46
Accuracy: 0.0600000023842 	47
Accuracy: 0.0600000023842 	48
Accuracy: 0.0600000023842 	49
Accuracy: 0.0600000023842 	50
Accuracy: 0.0600000023842 	51
Accuracy: 0.0600000023842 	52
Accuracy: 0.0600000023842 	53
Accuracy: 0.0600000023842 	54
Accuracy: 0.0600000023842 	55
Accuracy: 0.0600000023842 	56
Accuracy: 0.0600000023842 	57
Accuracy: 0.0600000023842 	58
Accuracy: 0.0600000023842 	59
Accuracy: 0.0600000023842 	60
Accuracy: 0.0600000023842 	61
Accuracy: 0.0600000023842 	62
Accuracy: 0.0600000023842 	63
Accuracy: 0.0600000023842 	64
Accuracy: 0.0600000023842 	65
Accuracy: 0.0600000023842 	66
Accuracy: 0.0600000023842 	67
Accuracy: 0.0600000023842 	68
Accuracy: 0.0600000023842 	69
Accuracy: 0.0600000023842 	70
Accuracy: 0.0600000023842 	71
Accuracy: 0.0600000023842 	72
Accuracy: 0.0600000023842 	73
Accuracy: 0.0600000023842 	74
Accuracy: 0.0600000023842 	75
Accuracy: 0.0600000023842 	76
Accuracy: 0.0600000023842 	77
Accuracy: 0.0500000119209 	78
Accuracy: 0.0500000119209 	79
Accuracy: 0.0500000119209 	80
Accuracy: 0.0500000119209 	81
Accuracy: 0.0500000119209 	82
Accuracy: 0.0299999713898 	83
Accuracy: 0.0299999713898 	84
Accuracy: 0.0299999713898 	85
Accuracy: 0.0299999713898 	86
Accuracy: 0.0199999809265 	87
Accuracy: 0.0199999809265 	88
Accuracy: 0.0199999809265 	89
Accuracy: 0.0199999809265 	90
Accuracy: 0.00999999046326 	91
Accuracy: 0.00999999046326 	92
Accuracy: 0.00999999046326 	93
Accuracy: 0.00999999046326 	94
Accuracy: 0.00999999046326 	95
Accuracy: 0.00999999046326 	96
Accuracy: 0.00999999046326 	97
Accuracy: 0.00999999046326 	98
Accuracy: 0.00999999046326 	99
Accuracy: 0.00999999046326 	0
Accuracy: 0.00999999046326 	1
Accuracy: 0.00999999046326 	2
Accuracy: 0.00999999046326 	3
Accuracy: 0.00999999046326 	4
Accuracy: 0.00999999046326 	5
Accuracy: 0.00999999046326 	6
Accuracy: 0.00999999046326 	7
Accuracy: 0.00999999046326 	8
Accuracy: 0.00999999046326 	9
Accuracy: 0.00999999046326 	10
Accuracy: 0.0 	11
Accuracy: 0.0 	12
Accuracy: 0.0 	13
Accuracy: 0.0 	14
Accuracy: 0.0 	15
Accuracy: 0.0 	16
Accuracy: 0.0 	17
Accuracy: 0.0 	18
Accuracy: 0.0 	19
Accuracy: 0.0 	20
Accuracy: 0.0 	21
Accuracy: 0.0 	22
Accuracy: 0.0 	23
Accuracy: 0.0 	24
Accuracy: 0.0 	25
Accuracy: 0.00999999046326 	26
Accuracy: 0.00999999046326 	27
Accuracy: 0.00999999046326 	28
Accuracy: 0.00999999046326 	29
Accuracy: 0.00999999046326 	30
Accuracy: 0.00999999046326 	31
Accuracy: 0.00999999046326 	32
Accuracy: 0.00999999046326 	33
Accuracy: 0.00999999046326 	34
Accuracy: 0.00999999046326 	35
Accuracy: 0.0199999809265 	36
Accuracy: 0.0199999809265 	37
Accuracy: 0.0199999809265 	38
Accuracy: 0.0199999809265 	39
Accuracy: 0.0199999809265 	40
Accuracy: 0.0199999809265 	41
Accuracy: 0.0199999809265 	42
Accuracy: 0.0199999809265 	43
Accuracy: 0.0199999809265 	44
Accuracy: 0.0199999809265 	45
Accuracy: 0.0199999809265 	46
Accuracy: 0.0199999809265 	47
Accuracy: 0.0199999809265 	48
Accuracy: 0.0199999809265 	49
Accuracy: 0.0199999809265 	50
Accuracy: 0.0199999809265 	51
Accuracy: 0.0199999809265 	52
Accuracy: 0.0199999809265 	53
Accuracy: 0.0199999809265 	54
Accuracy: 0.0199999809265 	55
Accuracy: 0.0199999809265 	56
Accuracy: 0.0199999809265 	57
Accuracy: 0.0199999809265 	58
Accuracy: 0.0199999809265 	59
Accuracy: 0.0199999809265 	60
Accuracy: 0.0199999809265 	61
Accuracy: 0.0199999809265 	62
Accuracy: 0.0199999809265 	63
Accuracy: 0.0199999809265 	64
Accuracy: 0.0199999809265 	65
Accuracy: 0.00999999046326 	66
Accuracy: 0.00999999046326 	67
Accuracy: 0.00999999046326 	68
Accuracy: 0.00999999046326 	69
Accuracy: 0.00999999046326 	70
Accuracy: 0.00999999046326 	71
Accuracy: 0.00999999046326 	72
Accuracy: 0.00999999046326 	73
Accuracy: 0.00999999046326 	74
Accuracy: 0.00999999046326 	75
Accuracy: 0.00999999046326 	76
Accuracy: 0.00999999046326 	77
Accuracy: 0.00999999046326 	78
Accuracy: 0.00999999046326 	79
Accuracy: 0.00999999046326 	80
Accuracy: 0.00999999046326 	81
Accuracy: 0.00999999046326 	82
Accuracy: 0.0 	83
Accuracy: 0.0 	84
Accuracy: 0.0 	85
Accuracy: 0.0 	86
Accuracy: 0.0 	87
Accuracy: 0.0 	88
Accuracy: 0.0 	89
Accuracy: 0.0 	90
Accuracy: 0.0 	91
Accuracy: 0.0 	92
Accuracy: 0.0 	93
Accuracy: 0.0 	94
Accuracy: 0.0 	95
Accuracy: 0.0 	96
Accuracy: 0.0 	97
Accuracy: 0.0 	98
Accuracy: 0.0 	99
Accuracy: 0.00999999046326 	0
Accuracy: 0.00999999046326 	1
Accuracy: 0.00999999046326 	2
Accuracy: 0.00999999046326 	3
Accuracy: 0.00999999046326 	4
Accuracy: 0.00999999046326 	5
Accuracy: 0.00999999046326 	6
Accuracy: 0.00999999046326 	7
Accuracy: 0.00999999046326 	8
Accuracy: 0.00999999046326 	9
Accuracy: 0.00999999046326 	10
Accuracy: 0.00999999046326 	11
Accuracy: 0.00999999046326 	12
Accuracy: 0.00999999046326 	13
Accuracy: 0.00999999046326 	14
Accuracy: 0.00999999046326 	15
Accuracy: 0.00999999046326 	16
Accuracy: 0.00999999046326 	17
Accuracy: 0.00999999046326 	18
Accuracy: 0.00999999046326 	19
Accuracy: 0.00999999046326 	20
Accuracy: 0.00999999046326 	21
Accuracy: 0.00999999046326 	22
Accuracy: 0.00999999046326 	23
Accuracy: 0.00999999046326 	24
Accuracy: 0.00999999046326 	25
Accuracy: 0.00999999046326 	26
Accuracy: 0.00999999046326 	27
Accuracy: 0.00999999046326 	28
Accuracy: 0.00999999046326 	29
Accuracy: 0.00999999046326 	30
Accuracy: 0.00999999046326 	31
Accuracy: 0.00999999046326 	32
Accuracy: 0.00999999046326 	33
Accuracy: 0.00999999046326 	34
Accuracy: 0.00999999046326 	35
Accuracy: 0.00999999046326 	36
Accuracy: 0.00999999046326 	37
Accuracy: 0.00999999046326 	38
Accuracy: 0.00999999046326 	39
Accuracy: 0.00999999046326 	40
Accuracy: 0.00999999046326 	41
Accuracy: 0.00999999046326 	42
Accuracy: 0.00999999046326 	43
Accuracy: 0.00999999046326 	44
Accuracy: 0.00999999046326 	45
Accuracy: 0.00999999046326 	46
Accuracy: 0.00999999046326 	47
Accuracy: 0.00999999046326 	48
Accuracy: 0.0199999809265 	49
Accuracy: 0.0199999809265 	50
Accuracy: 0.0199999809265 	51
Accuracy: 0.0199999809265 	52
Accuracy: 0.0199999809265 	53
Accuracy: 0.0199999809265 	54
Accuracy: 0.0199999809265 	55
Accuracy: 0.0199999809265 	56
Accuracy: 0.0199999809265 	57
Accuracy: 0.0199999809265 	58
Accuracy: 0.0199999809265 	59
Accuracy: 0.0199999809265 	60
Accuracy: 0.0199999809265 	61
Accuracy: 0.0199999809265 	62
Accuracy: 0.0199999809265 	63
Accuracy: 0.0199999809265 	64
Accuracy: 0.0199999809265 	65
Accuracy: 0.0199999809265 	66
Accuracy: 0.0199999809265 	67
Accuracy: 0.0199999809265 	68
Accuracy: 0.0199999809265 	69
Accuracy: 0.0199999809265 	70
Accuracy: 0.0199999809265 	71
Accuracy: 0.0199999809265 	72
Accuracy: 0.0199999809265 	73
Accuracy: 0.0199999809265 	74
Accuracy: 0.0199999809265 	75
Accuracy: 0.0199999809265 	76
Accuracy: 0.0199999809265 	77
Accuracy: 0.0199999809265 	78
Accuracy: 0.0199999809265 	79
Accuracy: 0.0199999809265 	80
Accuracy: 0.0199999809265 	81
Accuracy: 0.0199999809265 	82
Accuracy: 0.0199999809265 	83
Accuracy: 0.0199999809265 	84
Accuracy: 0.0199999809265 	85
Accuracy: 0.0199999809265 	86
Accuracy: 0.0199999809265 	87
Accuracy: 0.0199999809265 	88
Accuracy: 0.0199999809265 	89
Accuracy: 0.0199999809265 	90
Accuracy: 0.0199999809265 	91
Accuracy: 0.0199999809265 	92
Accuracy: 0.0199999809265 	93
Accuracy: 0.0199999809265 	94
Accuracy: 0.0199999809265 	95
Accuracy: 0.0199999809265 	96
Accuracy: 0.0199999809265 	97
Accuracy: 0.0199999809265 	98
Accuracy: 0.0199999809265 	99
Accuracy: 0.0299999713898 	0
Accuracy: 0.0299999713898 	1
Accuracy: 0.0299999713898 	2
Accuracy: 0.0299999713898 	3
Accuracy: 0.0299999713898 	4
Accuracy: 0.0199999809265 	5
Accuracy: 0.0199999809265 	6
Accuracy: 0.0199999809265 	7
Accuracy: 0.0199999809265 	8
Accuracy: 0.0199999809265 	9
Accuracy: 0.0199999809265 	10
Accuracy: 0.0199999809265 	11
Accuracy: 0.0199999809265 	12
Accuracy: 0.0199999809265 	13
Accuracy: 0.0199999809265 	14
Accuracy: 0.0199999809265 	15
Accuracy: 0.0199999809265 	16
Accuracy: 0.0299999713898 	17
Accuracy: 0.0299999713898 	18
Accuracy: 0.0299999713898 	19
Accuracy: 0.0299999713898 	20
Accuracy: 0.0299999713898 	21
Accuracy: 0.0400000214577 	22
Accuracy: 0.0400000214577 	23
Accuracy: 0.0500000119209 	24
Accuracy: 0.0500000119209 	25
Accuracy: 0.0500000119209 	26
Accuracy: 0.0500000119209 	27
Accuracy: 0.0500000119209 	28
Accuracy: 0.0699999928474 	29
Accuracy: 0.0799999833107 	30
Accuracy: 0.0799999833107 	31
Accuracy: 0.0799999833107 	32
Accuracy: 0.0799999833107 	33
Accuracy: 0.0799999833107 	34
Accuracy: 0.0699999928474 	35
Accuracy: 0.0699999928474 	36
Accuracy: 0.0699999928474 	37
Accuracy: 0.0699999928474 	38
Accuracy: 0.0699999928474 	39
Accuracy: 0.0699999928474 	40
Accuracy: 0.0699999928474 	41
Accuracy: 0.0799999833107 	42
Accuracy: 0.0699999928474 	43
Accuracy: 0.0699999928474 	44
Accuracy: 0.0699999928474 	45
Accuracy: 0.0699999928474 	46
Accuracy: 0.0699999928474 	47
Accuracy: 0.0799999833107 	48
Accuracy: 0.0799999833107 	49
Accuracy: 0.089999973774 	50
Accuracy: 0.089999973774 	51
Accuracy: 0.089999973774 	52
Accuracy: 0.089999973774 	53
Accuracy: 0.089999973774 	54
Accuracy: 0.089999973774 	55
Accuracy: 0.089999973774 	56
Accuracy: 0.089999973774 	57
Accuracy: 0.089999973774 	58
Accuracy: 0.0799999833107 	59
Accuracy: 0.0799999833107 	60
Accuracy: 0.0799999833107 	61
Accuracy: 0.0799999833107 	62
Accuracy: 0.0799999833107 	63
Accuracy: 0.0799999833107 	64
Accuracy: 0.089999973774 	65
Accuracy: 0.089999973774 	66
Accuracy: 0.089999973774 	67
Accuracy: 0.089999973774 	68
Accuracy: 0.089999973774 	69
Accuracy: 0.089999973774 	70
Accuracy: 0.089999973774 	71
Accuracy: 0.089999973774 	72
Accuracy: 0.089999973774 	73
Accuracy: 0.089999973774 	74
Accuracy: 0.089999973774 	75
Accuracy: 0.089999973774 	76
Accuracy: 0.089999973774 	77
Accuracy: 0.089999973774 	78
Accuracy: 0.089999973774 	79
Accuracy: 0.089999973774 	80
Accuracy: 0.089999973774 	81
Accuracy: 0.100000023842 	82
Accuracy: 0.100000023842 	83
Accuracy: 0.110000014305 	84
Accuracy: 0.110000014305 	85
Accuracy: 0.120000004768 	86
Accuracy: 0.120000004768 	87
Accuracy: 0.120000004768 	88
Accuracy: 0.120000004768 	89
Accuracy: 0.110000014305 	90
Accuracy: 0.110000014305 	91
Accuracy: 0.110000014305 	92
Accuracy: 0.110000014305 	93
Accuracy: 0.110000014305 	94
Accuracy: 0.110000014305 	95
Accuracy: 0.110000014305 	96
Accuracy: 0.120000004768 	97
Accuracy: 0.110000014305 	98
Accuracy: 0.110000014305 	99
[(0.069999992847442627, 33), (0.019999980926513672, 36), (0.019999980926513672, 49), (0.12000000476837158, 86)]
[4, 3]
Final bead error: 0.0482000112534
[True, True, True, True, True, True]
Accuracy: 0.0199999809265 	0
Accuracy: 0.0199999809265 	1
Accuracy: 0.0199999809265 	2
Accuracy: 0.0199999809265 	3
Accuracy: 0.0199999809265 	4
Accuracy: 0.0199999809265 	5
Accuracy: 0.0199999809265 	6
Accuracy: 0.0199999809265 	7
Accuracy: 0.0199999809265 	8
Accuracy: 0.0199999809265 	9
Accuracy: 0.0199999809265 	10
Accuracy: 0.0299999713898 	11
Accuracy: 0.0299999713898 	12
Accuracy: 0.0299999713898 	13
Accuracy: 0.0299999713898 	14
Accuracy: 0.0299999713898 	15
Accuracy: 0.0299999713898 	16
Accuracy: 0.0299999713898 	17
Accuracy: 0.0299999713898 	18
Accuracy: 0.0299999713898 	19
Accuracy: 0.0299999713898 	20
Accuracy: 0.0400000214577 	21
Accuracy: 0.0400000214577 	22
Accuracy: 0.0400000214577 	23
Accuracy: 0.0400000214577 	24
Accuracy: 0.0299999713898 	25
Accuracy: 0.0299999713898 	26
Accuracy: 0.0299999713898 	27
Accuracy: 0.0299999713898 	28
Accuracy: 0.0299999713898 	29
Accuracy: 0.0299999713898 	30
Accuracy: 0.0299999713898 	31
Accuracy: 0.0299999713898 	32
Accuracy: 0.0500000119209 	33
Accuracy: 0.0500000119209 	34
Accuracy: 0.0400000214577 	35
Accuracy: 0.0400000214577 	36
Accuracy: 0.0400000214577 	37
Accuracy: 0.0400000214577 	38
Accuracy: 0.0400000214577 	39
Accuracy: 0.0400000214577 	40
Accuracy: 0.0400000214577 	41
Accuracy: 0.0400000214577 	42
Accuracy: 0.0400000214577 	43
Accuracy: 0.0400000214577 	44
Accuracy: 0.0400000214577 	45
Accuracy: 0.0400000214577 	46
Accuracy: 0.0400000214577 	47
Accuracy: 0.0400000214577 	48
Accuracy: 0.0400000214577 	49
Accuracy: 0.0400000214577 	50
Accuracy: 0.0400000214577 	51
Accuracy: 0.0400000214577 	52
Accuracy: 0.0400000214577 	53
Accuracy: 0.0299999713898 	54
Accuracy: 0.0299999713898 	55
Accuracy: 0.0299999713898 	56
Accuracy: 0.0299999713898 	57
Accuracy: 0.0299999713898 	58
Accuracy: 0.0299999713898 	59
Accuracy: 0.0299999713898 	60
Accuracy: 0.0299999713898 	61
Accuracy: 0.0299999713898 	62
Accuracy: 0.0299999713898 	63
Accuracy: 0.0299999713898 	64
Accuracy: 0.0299999713898 	65
Accuracy: 0.0299999713898 	66
Accuracy: 0.0299999713898 	67
Accuracy: 0.0299999713898 	68
Accuracy: 0.0299999713898 	69
Accuracy: 0.0299999713898 	70
Accuracy: 0.0299999713898 	71
Accuracy: 0.0299999713898 	72
Accuracy: 0.0199999809265 	73
Accuracy: 0.0199999809265 	74
Accuracy: 0.0199999809265 	75
Accuracy: 0.0199999809265 	76
Accuracy: 0.0199999809265 	77
Accuracy: 0.0199999809265 	78
Accuracy: 0.0199999809265 	79
Accuracy: 0.0199999809265 	80
Accuracy: 0.0199999809265 	81
Accuracy: 0.0199999809265 	82
Accuracy: 0.00999999046326 	83
Accuracy: 0.00999999046326 	84
Accuracy: 0.00999999046326 	85
Accuracy: 0.0 	86
Accuracy: 0.0 	87
Accuracy: 0.0 	88
Accuracy: 0.0 	89
Accuracy: 0.0 	90
Accuracy: 0.0 	91
Accuracy: 0.0 	92
Accuracy: 0.0 	93
Accuracy: 0.0 	94
Accuracy: 0.0 	95
Accuracy: 0.0 	96
Accuracy: 0.0 	97
Accuracy: 0.0 	98
Accuracy: 0.0 	99
Accuracy: 0.0199999809265 	0
Accuracy: 0.0299999713898 	1
Accuracy: 0.0299999713898 	2
Accuracy: 0.0299999713898 	3
Accuracy: 0.0299999713898 	4
Accuracy: 0.0299999713898 	5
Accuracy: 0.0299999713898 	6
Accuracy: 0.0299999713898 	7
Accuracy: 0.0299999713898 	8
Accuracy: 0.0299999713898 	9
Accuracy: 0.0299999713898 	10
Accuracy: 0.0299999713898 	11
Accuracy: 0.0299999713898 	12
Accuracy: 0.0299999713898 	13
Accuracy: 0.0299999713898 	14
Accuracy: 0.0299999713898 	15
Accuracy: 0.0299999713898 	16
Accuracy: 0.0299999713898 	17
Accuracy: 0.0299999713898 	18
Accuracy: 0.0299999713898 	19
Accuracy: 0.0299999713898 	20
Accuracy: 0.0299999713898 	21
Accuracy: 0.0299999713898 	22
Accuracy: 0.0299999713898 	23
Accuracy: 0.0299999713898 	24
Accuracy: 0.0299999713898 	25
Accuracy: 0.0299999713898 	26
Accuracy: 0.0299999713898 	27
Accuracy: 0.0299999713898 	28
Accuracy: 0.0299999713898 	29
Accuracy: 0.0299999713898 	30
Accuracy: 0.0299999713898 	31
Accuracy: 0.0299999713898 	32
Accuracy: 0.0299999713898 	33
Accuracy: 0.0299999713898 	34
Accuracy: 0.0299999713898 	35
Accuracy: 0.0299999713898 	36
Accuracy: 0.0299999713898 	37
Accuracy: 0.0299999713898 	38
Accuracy: 0.0299999713898 	39
Accuracy: 0.0299999713898 	40
Accuracy: 0.0299999713898 	41
Accuracy: 0.0400000214577 	42
Accuracy: 0.0400000214577 	43
Accuracy: 0.0400000214577 	44
Accuracy: 0.0400000214577 	45
Accuracy: 0.0500000119209 	46
Accuracy: 0.0500000119209 	47
Accuracy: 0.0500000119209 	48
Accuracy: 0.0500000119209 	49
Accuracy: 0.0500000119209 	50
Accuracy: 0.0500000119209 	51
Accuracy: 0.0500000119209 	52
Accuracy: 0.0500000119209 	53
Accuracy: 0.0600000023842 	54
Accuracy: 0.0600000023842 	55
Accuracy: 0.0699999928474 	56
Accuracy: 0.0699999928474 	57
Accuracy: 0.0699999928474 	58
Accuracy: 0.0699999928474 	59
Accuracy: 0.0699999928474 	60
Accuracy: 0.0699999928474 	61
Accuracy: 0.0699999928474 	62
Accuracy: 0.0699999928474 	63
Accuracy: 0.0699999928474 	64
Accuracy: 0.0699999928474 	65
Accuracy: 0.0699999928474 	66
Accuracy: 0.0699999928474 	67
Accuracy: 0.0699999928474 	68
Accuracy: 0.0699999928474 	69
Accuracy: 0.0699999928474 	70
Accuracy: 0.0699999928474 	71
Accuracy: 0.0699999928474 	72
Accuracy: 0.0699999928474 	73
Accuracy: 0.0699999928474 	74
Accuracy: 0.0699999928474 	75
Accuracy: 0.0699999928474 	76
Accuracy: 0.0699999928474 	77
Accuracy: 0.0699999928474 	78
Accuracy: 0.0699999928474 	79
Accuracy: 0.0699999928474 	80
Accuracy: 0.0699999928474 	81
Accuracy: 0.0699999928474 	82
Accuracy: 0.0699999928474 	83
Accuracy: 0.0699999928474 	84
Accuracy: 0.0699999928474 	85
Accuracy: 0.0699999928474 	86
Accuracy: 0.0699999928474 	87
Accuracy: 0.0699999928474 	88
Accuracy: 0.0699999928474 	89
Accuracy: 0.0699999928474 	90
Accuracy: 0.0699999928474 	91
Accuracy: 0.0699999928474 	92
Accuracy: 0.0699999928474 	93
Accuracy: 0.0699999928474 	94
Accuracy: 0.0699999928474 	95
Accuracy: 0.0699999928474 	96
Accuracy: 0.0699999928474 	97
Accuracy: 0.0699999928474 	98
Accuracy: 0.0600000023842 	99
[(0.050000011920928955, 33), (0.069999992847442627, 56)]
1
2
Thresh: 0.1
Comps: 1
***
176.260862648
3.33940654993

In [25]:
models


Out[25]:
[<__main__.multilayer_perceptron instance at 0x7fc3f3ce0518>,
 <__main__.multilayer_perceptron instance at 0x7fc3f3ce00e0>,
 <__main__.multilayer_perceptron instance at 0x7fc3f366cb00>]

In [53]:
len(test.AllBeads)


Out[53]:
6

In [55]:
for b in xrange(len(test.AllBeads)-1):
    e = InterpBeadError(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1])


Accuracy: 0.100000023842 	0 0.813 -0.818951
Accuracy: 0.100000023842 	1 0.811995 -0.815392
Accuracy: 0.100000023842 	2 0.81099 -0.811833
Accuracy: 0.100000023842 	3 0.809985 -0.808274
Accuracy: 0.100000023842 	4 0.808979 -0.804715
Accuracy: 0.089999973774 	5 0.807974 -0.801156
Accuracy: 0.089999973774 	6 0.806969 -0.797597
Accuracy: 0.089999973774 	7 0.805964 -0.794038
Accuracy: 0.089999973774 	8 0.804959 -0.790479
Accuracy: 0.089999973774 	9 0.803954 -0.78692
Accuracy: 0.089999973774 	10 0.802949 -0.783361
Accuracy: 0.089999973774 	11 0.801944 -0.779802
Accuracy: 0.089999973774 	12 0.800939 -0.776243
Accuracy: 0.089999973774 	13 0.799934 -0.772684
Accuracy: 0.089999973774 	14 0.798929 -0.769125
Accuracy: 0.089999973774 	15 0.797924 -0.765566
Accuracy: 0.089999973774 	16 0.796918 -0.762007
Accuracy: 0.089999973774 	17 0.795913 -0.758448
Accuracy: 0.089999973774 	18 0.794908 -0.754889
Accuracy: 0.089999973774 	19 0.793903 -0.75133
Accuracy: 0.089999973774 	20 0.792898 -0.747771
Accuracy: 0.089999973774 	21 0.791893 -0.744212
Accuracy: 0.089999973774 	22 0.790888 -0.740653
Accuracy: 0.089999973774 	23 0.789883 -0.737094
Accuracy: 0.089999973774 	24 0.788878 -0.733535
Accuracy: 0.089999973774 	25 0.787873 -0.729976
Accuracy: 0.0799999833107 	26 0.786868 -0.726417
Accuracy: 0.0799999833107 	27 0.785862 -0.722858
Accuracy: 0.0699999928474 	28 0.784857 -0.719299
Accuracy: 0.0699999928474 	29 0.783852 -0.71574
Accuracy: 0.0699999928474 	30 0.782847 -0.712181
Accuracy: 0.0699999928474 	31 0.781842 -0.708622
Accuracy: 0.0699999928474 	32 0.780837 -0.705063
Accuracy: 0.0699999928474 	33 0.779832 -0.701504
Accuracy: 0.0699999928474 	34 0.778827 -0.697945
Accuracy: 0.0699999928474 	35 0.777822 -0.694386
Accuracy: 0.0699999928474 	36 0.776817 -0.690827
Accuracy: 0.0699999928474 	37 0.775812 -0.687268
Accuracy: 0.0699999928474 	38 0.774806 -0.683709
Accuracy: 0.0699999928474 	39 0.773801 -0.68015
Accuracy: 0.0699999928474 	40 0.772796 -0.676591
Accuracy: 0.0699999928474 	41 0.771791 -0.673032
Accuracy: 0.0699999928474 	42 0.770786 -0.669473
Accuracy: 0.0600000023842 	43 0.769781 -0.665914
Accuracy: 0.0600000023842 	44 0.768776 -0.662355
Accuracy: 0.0600000023842 	45 0.767771 -0.658796
Accuracy: 0.0600000023842 	46 0.766766 -0.655237
Accuracy: 0.0500000119209 	47 0.765761 -0.651678
Accuracy: 0.0500000119209 	48 0.764756 -0.648119
Accuracy: 0.0500000119209 	49 0.763751 -0.64456
Accuracy: 0.0400000214577 	50 0.762745 -0.641001
Accuracy: 0.0400000214577 	51 0.76174 -0.637442
Accuracy: 0.0400000214577 	52 0.760735 -0.633883
Accuracy: 0.0400000214577 	53 0.75973 -0.630324
Accuracy: 0.0400000214577 	54 0.758725 -0.626765
Accuracy: 0.0400000214577 	55 0.75772 -0.623206
Accuracy: 0.0400000214577 	56 0.756715 -0.619647
Accuracy: 0.0400000214577 	57 0.75571 -0.616088
Accuracy: 0.0299999713898 	58 0.754705 -0.612529
Accuracy: 0.0299999713898 	59 0.7537 -0.60897
Accuracy: 0.0199999809265 	60 0.752695 -0.605411
Accuracy: 0.0199999809265 	61 0.751689 -0.601852
Accuracy: 0.0199999809265 	62 0.750684 -0.598293
Accuracy: 0.0199999809265 	63 0.749679 -0.594734
Accuracy: 0.0199999809265 	64 0.748674 -0.591175
Accuracy: 0.0199999809265 	65 0.747669 -0.587616
Accuracy: 0.0199999809265 	66 0.746664 -0.584057
Accuracy: 0.0199999809265 	67 0.745659 -0.580498
Accuracy: 0.0199999809265 	68 0.744654 -0.576939
Accuracy: 0.0199999809265 	69 0.743649 -0.57338
Accuracy: 0.0199999809265 	70 0.742644 -0.569821
Accuracy: 0.0199999809265 	71 0.741639 -0.566262
Accuracy: 0.0199999809265 	72 0.740634 -0.562703
Accuracy: 0.0199999809265 	73 0.739628 -0.559144
Accuracy: 0.0199999809265 	74 0.738623 -0.555585
Accuracy: 0.0199999809265 	75 0.737618 -0.552026
Accuracy: 0.0199999809265 	76 0.736613 -0.548467
Accuracy: 0.0199999809265 	77 0.735608 -0.544908
Accuracy: 0.0199999809265 	78 0.734603 -0.541349
Accuracy: 0.0199999809265 	79 0.733598 -0.53779
Accuracy: 0.0199999809265 	80 0.732593 -0.534231
Accuracy: 0.0199999809265 	81 0.731588 -0.530672
Accuracy: 0.0199999809265 	82 0.730583 -0.527113
Accuracy: 0.0199999809265 	83 0.729578 -0.523554
Accuracy: 0.0199999809265 	84 0.728573 -0.519995
Accuracy: 0.0199999809265 	85 0.727567 -0.516436
Accuracy: 0.0199999809265 	86 0.726562 -0.512877
Accuracy: 0.00999999046326 	87 0.725557 -0.509318
Accuracy: 0.00999999046326 	88 0.724552 -0.505759
Accuracy: 0.00999999046326 	89 0.723547 -0.5022
Accuracy: 0.0 	90 0.722542 -0.498641
Accuracy: 0.0 	91 0.721537 -0.495082
Accuracy: 0.0 	92 0.720532 -0.491523
Accuracy: 0.0 	93 0.719527 -0.487964
Accuracy: 0.0 	94 0.718522 -0.484405
Accuracy: 0.0 	95 0.717517 -0.480846
Accuracy: 0.0 	96 0.716511 -0.477287
Accuracy: 0.0 	97 0.715506 -0.473728
Accuracy: 0.0 	98 0.714501 -0.470169
Accuracy: 0.0 	99 0.713496 -0.46661
Accuracy: 0.0199999809265 	0 0.712491 -0.463051
Accuracy: 0.0199999809265 	1 0.711263 -0.458701
Accuracy: 0.0199999809265 	2 0.710034 -0.454351
Accuracy: 0.0199999809265 	3 0.708806 -0.450001
Accuracy: 0.0199999809265 	4 0.707577 -0.445651
Accuracy: 0.0199999809265 	5 0.706349 -0.441301
Accuracy: 0.0199999809265 	6 0.705121 -0.436952
Accuracy: 0.0199999809265 	7 0.703892 -0.432602
Accuracy: 0.0199999809265 	8 0.702664 -0.428252
Accuracy: 0.0199999809265 	9 0.701435 -0.423902
Accuracy: 0.0199999809265 	10 0.700207 -0.419552
Accuracy: 0.0199999809265 	11 0.698978 -0.415202
Accuracy: 0.0199999809265 	12 0.69775 -0.410852
Accuracy: 0.00999999046326 	13 0.696521 -0.406502
Accuracy: 0.00999999046326 	14 0.695293 -0.402152
Accuracy: 0.0199999809265 	15 0.694065 -0.397803
Accuracy: 0.0199999809265 	16 0.692836 -0.393453
Accuracy: 0.0199999809265 	17 0.691608 -0.389103
Accuracy: 0.0299999713898 	18 0.690379 -0.384753
Accuracy: 0.0299999713898 	19 0.689151 -0.380403
Accuracy: 0.0299999713898 	20 0.687922 -0.376053
Accuracy: 0.0299999713898 	21 0.686694 -0.371703
Accuracy: 0.0299999713898 	22 0.685466 -0.367353
Accuracy: 0.0299999713898 	23 0.684237 -0.363003
Accuracy: 0.0299999713898 	24 0.683009 -0.358653
Accuracy: 0.0299999713898 	25 0.68178 -0.354304
Accuracy: 0.0299999713898 	26 0.680552 -0.349954
Accuracy: 0.0299999713898 	27 0.679323 -0.345604
Accuracy: 0.0299999713898 	28 0.678095 -0.341254
Accuracy: 0.0299999713898 	29 0.676866 -0.336904
Accuracy: 0.0299999713898 	30 0.675638 -0.332554
Accuracy: 0.0299999713898 	31 0.67441 -0.328204
Accuracy: 0.0299999713898 	32 0.673181 -0.323854
Accuracy: 0.0299999713898 	33 0.671953 -0.319504
Accuracy: 0.0299999713898 	34 0.670724 -0.315155
Accuracy: 0.0299999713898 	35 0.669496 -0.310805
Accuracy: 0.0299999713898 	36 0.668267 -0.306455
Accuracy: 0.0299999713898 	37 0.667039 -0.302105
Accuracy: 0.0299999713898 	38 0.66581 -0.297755
Accuracy: 0.0299999713898 	39 0.664582 -0.293405
Accuracy: 0.0299999713898 	40 0.663354 -0.289055
Accuracy: 0.0299999713898 	41 0.662125 -0.284705
Accuracy: 0.0199999809265 	42 0.660897 -0.280355
Accuracy: 0.0199999809265 	43 0.659668 -0.276006
Accuracy: 0.0199999809265 	44 0.65844 -0.271656
Accuracy: 0.0299999713898 	45 0.657211 -0.267306
Accuracy: 0.0299999713898 	46 0.655983 -0.262956
Accuracy: 0.0299999713898 	47 0.654755 -0.258606
Accuracy: 0.0299999713898 	48 0.653526 -0.254256
Accuracy: 0.0299999713898 	49 0.652298 -0.249906
Accuracy: 0.0299999713898 	50 0.651069 -0.245556
Accuracy: 0.0299999713898 	51 0.649841 -0.241206
Accuracy: 0.0299999713898 	52 0.648612 -0.236856
Accuracy: 0.0299999713898 	53 0.647384 -0.232507
Accuracy: 0.0299999713898 	54 0.646155 -0.228157
Accuracy: 0.0299999713898 	55 0.644927 -0.223807
Accuracy: 0.0299999713898 	56 0.643699 -0.219457
Accuracy: 0.0299999713898 	57 0.64247 -0.215107
Accuracy: 0.0299999713898 	58 0.641242 -0.210757
Accuracy: 0.0299999713898 	59 0.640013 -0.206407
Accuracy: 0.0299999713898 	60 0.638785 -0.202057
Accuracy: 0.0299999713898 	61 0.637556 -0.197707
Accuracy: 0.0299999713898 	62 0.636328 -0.193358
Accuracy: 0.0299999713898 	63 0.6351 -0.189008
Accuracy: 0.0299999713898 	64 0.633871 -0.184658
Accuracy: 0.0299999713898 	65 0.632643 -0.180308
Accuracy: 0.0299999713898 	66 0.631414 -0.175958
Accuracy: 0.0299999713898 	67 0.630186 -0.171608
Accuracy: 0.0299999713898 	68 0.628957 -0.167258
Accuracy: 0.0400000214577 	69 0.627729 -0.162908
Accuracy: 0.0400000214577 	70 0.6265 -0.158558
Accuracy: 0.0400000214577 	71 0.625272 -0.154208
Accuracy: 0.0400000214577 	72 0.624044 -0.149859
Accuracy: 0.0299999713898 	73 0.622815 -0.145509
Accuracy: 0.0299999713898 	74 0.621587 -0.141159
Accuracy: 0.0299999713898 	75 0.620358 -0.136809
Accuracy: 0.0299999713898 	76 0.61913 -0.132459
Accuracy: 0.0299999713898 	77 0.617901 -0.128109
Accuracy: 0.0299999713898 	78 0.616673 -0.123759
Accuracy: 0.0299999713898 	79 0.615444 -0.119409
Accuracy: 0.0299999713898 	80 0.614216 -0.115059
Accuracy: 0.0299999713898 	81 0.612988 -0.11071
Accuracy: 0.0299999713898 	82 0.611759 -0.10636
Accuracy: 0.0299999713898 	83 0.610531 -0.10201
Accuracy: 0.0299999713898 	84 0.609302 -0.0976599
Accuracy: 0.0299999713898 	85 0.608074 -0.09331
Accuracy: 0.0299999713898 	86 0.606845 -0.0889601
Accuracy: 0.0299999713898 	87 0.605617 -0.0846102
Accuracy: 0.0299999713898 	88 0.604389 -0.0802603
Accuracy: 0.0299999713898 	89 0.60316 -0.0759104
Accuracy: 0.0199999809265 	90 0.601932 -0.0715605
Accuracy: 0.0299999713898 	91 0.600703 -0.0672106
Accuracy: 0.0299999713898 	92 0.599475 -0.0628607
Accuracy: 0.0299999713898 	93 0.598246 -0.0585108
Accuracy: 0.0299999713898 	94 0.597018 -0.0541609
Accuracy: 0.0299999713898 	95 0.595789 -0.049811
Accuracy: 0.0299999713898 	96 0.594561 -0.0454611
Accuracy: 0.0299999713898 	97 0.593333 -0.0411112
Accuracy: 0.0299999713898 	98 0.592104 -0.0367613
Accuracy: 0.0299999713898 	99 0.590876 -0.0324115
Accuracy: 0.0199999809265 	0 0.589647 -0.0280616
Accuracy: 0.0199999809265 	1 0.588888 -0.0253725
Accuracy: 0.0199999809265 	2 0.588129 -0.0226835
Accuracy: 0.0199999809265 	3 0.587369 -0.0199945
Accuracy: 0.0199999809265 	4 0.58661 -0.0173055
Accuracy: 0.0199999809265 	5 0.58585 -0.0146164
Accuracy: 0.0199999809265 	6 0.585091 -0.0119274
Accuracy: 0.0199999809265 	7 0.584332 -0.00923839
Accuracy: 0.0199999809265 	8 0.583572 -0.00654937
Accuracy: 0.0199999809265 	9 0.582813 -0.00386034
Accuracy: 0.0199999809265 	10 0.582053 -0.00117132
Accuracy: 0.0199999809265 	11 0.581294 0.00151771
Accuracy: 0.0199999809265 	12 0.580535 0.00420673
Accuracy: 0.0199999809265 	13 0.579775 0.00689575
Accuracy: 0.0199999809265 	14 0.579016 0.00958478
Accuracy: 0.0199999809265 	15 0.578256 0.0122738
Accuracy: 0.0199999809265 	16 0.577497 0.0149628
Accuracy: 0.0199999809265 	17 0.576738 0.0176519
Accuracy: 0.0199999809265 	18 0.575978 0.0203409
Accuracy: 0.0199999809265 	19 0.575219 0.0230299
Accuracy: 0.0199999809265 	20 0.574459 0.0257189
Accuracy: 0.0199999809265 	21 0.5737 0.028408
Accuracy: 0.0199999809265 	22 0.572941 0.031097
Accuracy: 0.0199999809265 	23 0.572181 0.033786
Accuracy: 0.0199999809265 	24 0.571422 0.036475
Accuracy: 0.0199999809265 	25 0.570662 0.0391641
Accuracy: 0.0199999809265 	26 0.569903 0.0418531
Accuracy: 0.0199999809265 	27 0.569144 0.0445421
Accuracy: 0.0199999809265 	28 0.568384 0.0472311
Accuracy: 0.0199999809265 	29 0.567625 0.0499202
Accuracy: 0.0199999809265 	30 0.566865 0.0526092
Accuracy: 0.0199999809265 	31 0.566106 0.0552982
Accuracy: 0.0199999809265 	32 0.565347 0.0579872
Accuracy: 0.0199999809265 	33 0.564587 0.0606763
Accuracy: 0.0199999809265 	34 0.563828 0.0633653
Accuracy: 0.0199999809265 	35 0.563068 0.0660543
Accuracy: 0.00999999046326 	36 0.562309 0.0687433
Accuracy: 0.00999999046326 	37 0.56155 0.0714324
Accuracy: 0.00999999046326 	38 0.56079 0.0741214
Accuracy: 0.00999999046326 	39 0.560031 0.0768104
Accuracy: 0.00999999046326 	40 0.559271 0.0794994
Accuracy: 0.00999999046326 	41 0.558512 0.0821885
Accuracy: 0.00999999046326 	42 0.557753 0.0848775
Accuracy: 0.00999999046326 	43 0.556993 0.0875665
Accuracy: 0.00999999046326 	44 0.556234 0.0902555
Accuracy: 0.00999999046326 	45 0.555474 0.0929446
Accuracy: 0.00999999046326 	46 0.554715 0.0956336
Accuracy: 0.00999999046326 	47 0.553956 0.0983226
Accuracy: 0.00999999046326 	48 0.553196 0.101012
Accuracy: 0.00999999046326 	49 0.552437 0.103701
Accuracy: 0.00999999046326 	50 0.551677 0.10639
Accuracy: 0.00999999046326 	51 0.550918 0.109079
Accuracy: 0.0199999809265 	52 0.550159 0.111768
Accuracy: 0.0199999809265 	53 0.549399 0.114457
Accuracy: 0.0199999809265 	54 0.54864 0.117146
Accuracy: 0.0199999809265 	55 0.54788 0.119835
Accuracy: 0.0199999809265 	56 0.547121 0.122524
Accuracy: 0.0199999809265 	57 0.546362 0.125213
Accuracy: 0.0199999809265 	58 0.545602 0.127902
Accuracy: 0.0199999809265 	59 0.544843 0.130591
Accuracy: 0.0199999809265 	60 0.544083 0.13328
Accuracy: 0.0199999809265 	61 0.543324 0.135969
Accuracy: 0.0199999809265 	62 0.542565 0.138658
Accuracy: 0.0199999809265 	63 0.541805 0.141347
Accuracy: 0.0199999809265 	64 0.541046 0.144036
Accuracy: 0.0199999809265 	65 0.540286 0.146725
Accuracy: 0.0199999809265 	66 0.539527 0.149414
Accuracy: 0.00999999046326 	67 0.538768 0.152103
Accuracy: 0.00999999046326 	68 0.538008 0.154792
Accuracy: 0.00999999046326 	69 0.537249 0.157481
Accuracy: 0.00999999046326 	70 0.536489 0.16017
Accuracy: 0.00999999046326 	71 0.53573 0.162859
Accuracy: 0.0199999809265 	72 0.534971 0.165548
Accuracy: 0.0199999809265 	73 0.534211 0.168237
Accuracy: 0.0199999809265 	74 0.533452 0.170926
Accuracy: 0.0199999809265 	75 0.532692 0.173615
Accuracy: 0.0199999809265 	76 0.531933 0.176304
Accuracy: 0.0199999809265 	77 0.531174 0.178993
Accuracy: 0.0199999809265 	78 0.530414 0.181682
Accuracy: 0.0199999809265 	79 0.529655 0.184371
Accuracy: 0.0199999809265 	80 0.528895 0.18706
Accuracy: 0.0199999809265 	81 0.528136 0.189749
Accuracy: 0.0199999809265 	82 0.527377 0.192438
Accuracy: 0.0199999809265 	83 0.526617 0.195128
Accuracy: 0.0199999809265 	84 0.525858 0.197817
Accuracy: 0.0199999809265 	85 0.525098 0.200506
Accuracy: 0.0199999809265 	86 0.524339 0.203195
Accuracy: 0.0199999809265 	87 0.52358 0.205884
Accuracy: 0.0199999809265 	88 0.52282 0.208573
Accuracy: 0.0199999809265 	89 0.522061 0.211262
Accuracy: 0.0199999809265 	90 0.521301 0.213951
Accuracy: 0.0199999809265 	91 0.520542 0.21664
Accuracy: 0.0199999809265 	92 0.519783 0.219329
Accuracy: 0.0199999809265 	93 0.519023 0.222018
Accuracy: 0.0199999809265 	94 0.518264 0.224707
Accuracy: 0.00999999046326 	95 0.517504 0.227396
Accuracy: 0.0 	96 0.516745 0.230085
Accuracy: 0.0 	97 0.515986 0.232774
Accuracy: 0.0 	98 0.515226 0.235463
Accuracy: 0.0 	99 0.514467 0.238152
Accuracy: 0.0299999713898 	0 0.513707 0.240841
Accuracy: 0.0299999713898 	1 0.51244 0.24533
Accuracy: 0.0299999713898 	2 0.511172 0.249819
Accuracy: 0.0299999713898 	3 0.509904 0.254308
Accuracy: 0.0299999713898 	4 0.508636 0.258797
Accuracy: 0.0299999713898 	5 0.507369 0.263286
Accuracy: 0.0299999713898 	6 0.506101 0.267775
Accuracy: 0.0299999713898 	7 0.504833 0.272265
Accuracy: 0.0299999713898 	8 0.503565 0.276754
Accuracy: 0.0299999713898 	9 0.502298 0.281243
Accuracy: 0.0400000214577 	10 0.50103 0.285732
Accuracy: 0.0400000214577 	11 0.499762 0.290221
Accuracy: 0.0400000214577 	12 0.498494 0.29471
Accuracy: 0.0400000214577 	13 0.497227 0.299199
Accuracy: 0.0400000214577 	14 0.495959 0.303688
Accuracy: 0.0400000214577 	15 0.494691 0.308177
Accuracy: 0.0400000214577 	16 0.493423 0.312666
Accuracy: 0.0400000214577 	17 0.492156 0.317155
Accuracy: 0.0400000214577 	18 0.490888 0.321645
Accuracy: 0.0400000214577 	19 0.48962 0.326134
Accuracy: 0.0400000214577 	20 0.488352 0.330623
Accuracy: 0.0400000214577 	21 0.487085 0.335112
Accuracy: 0.0500000119209 	22 0.485817 0.339601
Accuracy: 0.0500000119209 	23 0.484549 0.34409
Accuracy: 0.0500000119209 	24 0.483281 0.348579
Accuracy: 0.0500000119209 	25 0.482014 0.353068
Accuracy: 0.0600000023842 	26 0.480746 0.357557
Accuracy: 0.0600000023842 	27 0.479478 0.362046
Accuracy: 0.0500000119209 	28 0.47821 0.366535
Accuracy: 0.0500000119209 	29 0.476943 0.371025
Accuracy: 0.0500000119209 	30 0.475675 0.375514
Accuracy: 0.0500000119209 	31 0.474407 0.380003
Accuracy: 0.0500000119209 	32 0.473139 0.384492
Accuracy: 0.0500000119209 	33 0.471872 0.388981
Accuracy: 0.0500000119209 	34 0.470604 0.39347
Accuracy: 0.0500000119209 	35 0.469336 0.397959
Accuracy: 0.0500000119209 	36 0.468068 0.402448
Accuracy: 0.0500000119209 	37 0.466801 0.406937
Accuracy: 0.0500000119209 	38 0.465533 0.411426
Accuracy: 0.0500000119209 	39 0.464265 0.415915
Accuracy: 0.0500000119209 	40 0.462997 0.420405
Accuracy: 0.0500000119209 	41 0.46173 0.424894
Accuracy: 0.0500000119209 	42 0.460462 0.429383
Accuracy: 0.0500000119209 	43 0.459194 0.433872
Accuracy: 0.0500000119209 	44 0.457927 0.438361
Accuracy: 0.0500000119209 	45 0.456659 0.44285
Accuracy: 0.0500000119209 	46 0.455391 0.447339
Accuracy: 0.0500000119209 	47 0.454123 0.451828
Accuracy: 0.0500000119209 	48 0.452855 0.456317
Accuracy: 0.0500000119209 	49 0.451588 0.460806
Accuracy: 0.0500000119209 	50 0.45032 0.465295
Accuracy: 0.0500000119209 	51 0.449052 0.469785
Accuracy: 0.0500000119209 	52 0.447785 0.474274
Accuracy: 0.0500000119209 	53 0.446517 0.478763
Accuracy: 0.0500000119209 	54 0.445249 0.483252
Accuracy: 0.0600000023842 	55 0.443981 0.487741
Accuracy: 0.0600000023842 	56 0.442713 0.49223
Accuracy: 0.0600000023842 	57 0.441446 0.496719
Accuracy: 0.0600000023842 	58 0.440178 0.501208
Accuracy: 0.0600000023842 	59 0.43891 0.505697
Accuracy: 0.0500000119209 	60 0.437643 0.510186
Accuracy: 0.0500000119209 	61 0.436375 0.514675
Accuracy: 0.0500000119209 	62 0.435107 0.519165
Accuracy: 0.0500000119209 	63 0.433839 0.523654
Accuracy: 0.0500000119209 	64 0.432572 0.528143
Accuracy: 0.0500000119209 	65 0.431304 0.532632
Accuracy: 0.0500000119209 	66 0.430036 0.537121
Accuracy: 0.0500000119209 	67 0.428768 0.54161
Accuracy: 0.0500000119209 	68 0.427501 0.546099
Accuracy: 0.0500000119209 	69 0.426233 0.550588
Accuracy: 0.0500000119209 	70 0.424965 0.555077
Accuracy: 0.0500000119209 	71 0.423697 0.559566
Accuracy: 0.0500000119209 	72 0.42243 0.564055
Accuracy: 0.0500000119209 	73 0.421162 0.568545
Accuracy: 0.0400000214577 	74 0.419894 0.573034
Accuracy: 0.0400000214577 	75 0.418626 0.577523
Accuracy: 0.0400000214577 	76 0.417359 0.582012
Accuracy: 0.0400000214577 	77 0.416091 0.586501
Accuracy: 0.0400000214577 	78 0.414823 0.59099
Accuracy: 0.0299999713898 	79 0.413555 0.595479
Accuracy: 0.0299999713898 	80 0.412288 0.599968
Accuracy: 0.0199999809265 	81 0.41102 0.604457
Accuracy: 0.0199999809265 	82 0.409752 0.608946
Accuracy: 0.0199999809265 	83 0.408484 0.613435
Accuracy: 0.0199999809265 	84 0.407217 0.617925
Accuracy: 0.0199999809265 	85 0.405949 0.622414
Accuracy: 0.00999999046326 	86 0.404681 0.626903
Accuracy: 0.00999999046326 	87 0.403413 0.631392
Accuracy: 0.00999999046326 	88 0.402146 0.635881
Accuracy: 0.00999999046326 	89 0.400878 0.64037
Accuracy: 0.00999999046326 	90 0.39961 0.644859
Accuracy: 0.00999999046326 	91 0.398342 0.649348
Accuracy: 0.00999999046326 	92 0.397075 0.653837
Accuracy: 0.00999999046326 	93 0.395807 0.658326
Accuracy: 0.00999999046326 	94 0.394539 0.662815
Accuracy: 0.00999999046326 	95 0.393271 0.667305
Accuracy: 0.00999999046326 	96 0.392004 0.671794
Accuracy: 0.00999999046326 	97 0.390736 0.676283
Accuracy: 0.00999999046326 	98 0.389468 0.680772
Accuracy: 0.0199999809265 	99 0.3882 0.685261
Accuracy: 0.0500000119209 	0 0.386933 0.68975
Accuracy: 0.0500000119209 	1 0.386726 0.690481
Accuracy: 0.0500000119209 	2 0.38652 0.691212
Accuracy: 0.0500000119209 	3 0.386313 0.691942
Accuracy: 0.0500000119209 	4 0.386107 0.692673
Accuracy: 0.0500000119209 	5 0.385901 0.693404
Accuracy: 0.0500000119209 	6 0.385694 0.694135
Accuracy: 0.0500000119209 	7 0.385488 0.694865
Accuracy: 0.0500000119209 	8 0.385282 0.695596
Accuracy: 0.0500000119209 	9 0.385075 0.696327
Accuracy: 0.0400000214577 	10 0.384869 0.697058
Accuracy: 0.0400000214577 	11 0.384662 0.697789
Accuracy: 0.0400000214577 	12 0.384456 0.698519
Accuracy: 0.0400000214577 	13 0.38425 0.69925
Accuracy: 0.0400000214577 	14 0.384043 0.699981
Accuracy: 0.0400000214577 	15 0.383837 0.700712
Accuracy: 0.0400000214577 	16 0.383631 0.701442
Accuracy: 0.0400000214577 	17 0.383424 0.702173
Accuracy: 0.0400000214577 	18 0.383218 0.702904
Accuracy: 0.0400000214577 	19 0.383011 0.703635
Accuracy: 0.0400000214577 	20 0.382805 0.704366
Accuracy: 0.0400000214577 	21 0.382599 0.705096
Accuracy: 0.0400000214577 	22 0.382392 0.705827
Accuracy: 0.0400000214577 	23 0.382186 0.706558
Accuracy: 0.0400000214577 	24 0.381979 0.707289
Accuracy: 0.0500000119209 	25 0.381773 0.708019
Accuracy: 0.0500000119209 	26 0.381567 0.70875
Accuracy: 0.0500000119209 	27 0.38136 0.709481
Accuracy: 0.0500000119209 	28 0.381154 0.710212
Accuracy: 0.0500000119209 	29 0.380948 0.710943
Accuracy: 0.0500000119209 	30 0.380741 0.711673
Accuracy: 0.0500000119209 	31 0.380535 0.712404
Accuracy: 0.0500000119209 	32 0.380328 0.713135
Accuracy: 0.0500000119209 	33 0.380122 0.713866
Accuracy: 0.0500000119209 	34 0.379916 0.714597
Accuracy: 0.0500000119209 	35 0.379709 0.715327
Accuracy: 0.0500000119209 	36 0.379503 0.716058
Accuracy: 0.0500000119209 	37 0.379297 0.716789
Accuracy: 0.0600000023842 	38 0.37909 0.71752
Accuracy: 0.0600000023842 	39 0.378884 0.71825
Accuracy: 0.0600000023842 	40 0.378677 0.718981
Accuracy: 0.0699999928474 	41 0.378471 0.719712
Accuracy: 0.0699999928474 	42 0.378265 0.720443
Accuracy: 0.0699999928474 	43 0.378058 0.721174
Accuracy: 0.0699999928474 	44 0.377852 0.721904
Accuracy: 0.0699999928474 	45 0.377646 0.722635
Accuracy: 0.0699999928474 	46 0.377439 0.723366
Accuracy: 0.0699999928474 	47 0.377233 0.724097
Accuracy: 0.0699999928474 	48 0.377026 0.724828
Accuracy: 0.0699999928474 	49 0.37682 0.725558
Accuracy: 0.0699999928474 	50 0.376614 0.726289
Accuracy: 0.0799999833107 	51 0.376407 0.72702
Accuracy: 0.0799999833107 	52 0.376201 0.727751
Accuracy: 0.0799999833107 	53 0.375995 0.728481
Accuracy: 0.0799999833107 	54 0.375788 0.729212
Accuracy: 0.0799999833107 	55 0.375582 0.729943
Accuracy: 0.0799999833107 	56 0.375375 0.730674
Accuracy: 0.0799999833107 	57 0.375169 0.731405
Accuracy: 0.0699999928474 	58 0.374963 0.732135
Accuracy: 0.0699999928474 	59 0.374756 0.732866
Accuracy: 0.0699999928474 	60 0.37455 0.733597
Accuracy: 0.0699999928474 	61 0.374344 0.734328
Accuracy: 0.0699999928474 	62 0.374137 0.735058
Accuracy: 0.0699999928474 	63 0.373931 0.735789
Accuracy: 0.0799999833107 	64 0.373724 0.73652
Accuracy: 0.0799999833107 	65 0.373518 0.737251
Accuracy: 0.0799999833107 	66 0.373312 0.737982
Accuracy: 0.0799999833107 	67 0.373105 0.738712
Accuracy: 0.0799999833107 	68 0.372899 0.739443
Accuracy: 0.089999973774 	69 0.372692 0.740174
Accuracy: 0.089999973774 	70 0.372486 0.740905
Accuracy: 0.089999973774 	71 0.37228 0.741636
Accuracy: 0.089999973774 	72 0.372073 0.742366
Accuracy: 0.089999973774 	73 0.371867 0.743097
Accuracy: 0.089999973774 	74 0.371661 0.743828
Accuracy: 0.089999973774 	75 0.371454 0.744559
Accuracy: 0.089999973774 	76 0.371248 0.745289
Accuracy: 0.089999973774 	77 0.371041 0.74602
Accuracy: 0.089999973774 	78 0.370835 0.746751
Accuracy: 0.089999973774 	79 0.370629 0.747482
Accuracy: 0.089999973774 	80 0.370422 0.748213
Accuracy: 0.089999973774 	81 0.370216 0.748943
Accuracy: 0.089999973774 	82 0.37001 0.749674
Accuracy: 0.089999973774 	83 0.369803 0.750405
Accuracy: 0.089999973774 	84 0.369597 0.751136
Accuracy: 0.089999973774 	85 0.36939 0.751866
Accuracy: 0.089999973774 	86 0.369184 0.752597
Accuracy: 0.089999973774 	87 0.368978 0.753328
Accuracy: 0.089999973774 	88 0.368771 0.754059
Accuracy: 0.089999973774 	89 0.368565 0.75479
Accuracy: 0.100000023842 	90 0.368359 0.75552
Accuracy: 0.110000014305 	91 0.368152 0.756251
Accuracy: 0.120000004768 	92 0.367946 0.756982
Accuracy: 0.120000004768 	93 0.367739 0.757713
Accuracy: 0.120000004768 	94 0.367533 0.758443
Accuracy: 0.120000004768 	95 0.367327 0.759174
Accuracy: 0.120000004768 	96 0.36712 0.759905
Accuracy: 0.120000004768 	97 0.366914 0.760636
Accuracy: 0.120000004768 	98 0.366708 0.761367
Accuracy: 0.120000004768 	99 0.366501 0.762097

In [ ]: