In [1]:
import tensorflow as tf

In [2]:
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)


Extracting MNIST_data/train-images-idx3-ubyte.gz
Extracting MNIST_data/train-labels-idx1-ubyte.gz
Extracting MNIST_data/t10k-images-idx3-ubyte.gz
Extracting MNIST_data/t10k-labels-idx1-ubyte.gz

In [6]:
weights = []

for alpha in [.5]:
    x = tf.placeholder(tf.float32, [None, 784])

    W = tf.Variable(tf.zeros([784, 10]))
    b = tf.Variable(tf.zeros([10]))

    y = tf.nn.softmax(tf.matmul(x, W) + b)

    y_ = tf.placeholder(tf.float32, [None, 10])

    cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))

    train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)

    init = tf.initialize_all_variables()

    correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))

    accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

    sess = tf.Session()
    sess.run(init)

    for i in range(1000):
        batch_xs, batch_ys = mnist.train.next_batch(100)
        sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})


        if i%100 == 0:
            print(sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
            print W.eval(sess)
    #weights.append(W.eval(sess))


0.4322
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.8962
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9003
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9113
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9111
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9067
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9168
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9135
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9142
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
0.9178
[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 ..., 
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]

In [16]:
print weights[0][14]


[ -1.06755192e-06  -2.92071984e-08   3.39211401e-05  -1.87954896e-07
  -2.40277004e-05  -2.20967440e-08  -5.70709790e-06  -4.84702980e-07
  -1.16136789e-06  -1.23375150e-06]

In [11]:
data_sets = input_data.read_data_sets(FLAGS.train_dir, FLAGS.fake_data)


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-11-de3f0183d09b> in <module>()
----> 1 data_sets = input_data.read_data_sets(FLAGS.train_dir, FLAGS.fake_data)

NameError: name 'FLAGS' is not defined

In [6]:


In [7]:



0.9202

In [2]:
import numpy as np
#Simple network: Given three integers a,b,c, [-100,100] chooses three random x-values, and evaluates
#the quadratic function a*x^2 + b*x + c at those values.
def func(x,a,b,c):
    return x*x*a + x*b + c

def generatecandidate3(a,b,c):


    candidate = [np.random.random() for x in xrange(1)]
    candidatesolutions = [func(x,a,b,c) for x in candidate]
    
    
    return candidate, candidatesolutions

def generatecandidate4(a,b,c,tot):
    
    candidate = [[np.random.random() for x in xrange(1)] for y in xrange(tot)]
    candidatesolutions = [[func(x[0],a,b,c)] for x in candidate]
    
    return (candidate, candidatesolutions)

In [58]:
# Import MINST data
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)

import tensorflow as tf


Extracting /tmp/data/train-images-idx3-ubyte.gz
Extracting /tmp/data/train-labels-idx1-ubyte.gz
Extracting /tmp/data/t10k-images-idx3-ubyte.gz
Extracting /tmp/data/t10k-labels-idx1-ubyte.gz

In [59]:
# Parameters
learning_rate = 0.1
training_epochs = 15
batch_size = 1000
display_step = 1

# Network Parameters
n_hidden_1 = 4 # 1st layer number of features
n_hidden_2 = 4 # 2nd layer number of features
n_input = 1 # Guess quadratic function
n_classes = 1 # 

# tf Graph input
x = tf.placeholder("float", [None, n_input])
y = tf.placeholder("float", [None, n_classes])

In [317]:
# Create model

class multilayer_perceptron():
    
    #weights = {}
    #biases = {}
    
    def __init__(self, w=0, b=0, ind='00'):
        
        self.index = ind #used for reading values from file
        #See the filesystem convention below (is this really necessary?)
        #I'm going to eschew writing to file for now because I'll be generating too many files
        #Currently, the last value of the parameters is stored in self.params to be read
        
        learning_rate = 0.01
        training_epochs = 15
        batch_size = 1000
        display_step = 1

        # Network Parameters
        n_hidden_1 = 4 # 1st layer number of features
        n_hidden_2 = 4 # 2nd layer number of features
        n_input = 1 # Guess quadratic function
        n_classes = 1 # 
        self.g = tf.Graph()
        
        
        self.params = []
        
        with self.g.as_default():
        
            #Note that by default, weights and biases will be initialized to random normal dists
            if w==0:
                
                self.weights = {
                    'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
                    'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
                    'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
                }
                self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
                self.biases = {
                    'b1': tf.Variable(tf.random_normal([n_hidden_1])),
                    'b2': tf.Variable(tf.random_normal([n_hidden_2])),
                    'out': tf.Variable(tf.random_normal([n_classes]))
                }
                self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
                
            else:
                
                self.weights = {
                    'h1': tf.Variable(w[0]),
                    'h2': tf.Variable(w[1]),
                    'out': tf.Variable(w[2])
                }
                self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
                self.biases = {
                    'b1': tf.Variable(b[0]),
                    'b2': tf.Variable(b[1]),
                    'out': tf.Variable(b[2])
                }
                self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
            self.saver = tf.train.Saver()
    
    
    def UpdateWeights(self, w, b):
        with self.g.as_default():
            self.weights = {
                    'h1': tf.Variable(w[0]),
                    'h2': tf.Variable(w[1]),
                    'out': tf.Variable(w[2])
                }
            self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
            self.biases = {
                'b1': tf.Variable(b[0]),
                'b2': tf.Variable(b[1]),
                'out': tf.Variable(b[2])
            }
            self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
            

        
    def predict(self, x):
        
        with self.g.as_default():
            layer_1 = tf.add(tf.matmul(x, self.weights['h1']), self.biases['b1'])
            layer_1 = tf.nn.relu(layer_1)
            # Hidden layer with RELU activation
            layer_2 = tf.add(tf.matmul(layer_1, self.weights['h2']), self.biases['b2'])
            layer_2 = tf.nn.relu(layer_2)
            # Output layer with linear activation
            out_layer = tf.matmul(layer_2, self.weights['out']) + self.biases['out']
            return out_layer
        
    def ReturnParamsAsList(self):
        
        with self.g.as_default():

            with tf.Session() as sess:
                # Restore variables from disk
                self.saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model"+str(self.index)+".ckpt")                
                return sess.run(self.weightslist), sess.run(self.biaseslist)

        
        
        
        
        

'''def multilayer_perceptron(x, weights, biases):
    # Hidden layer with RELU activation
    layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
    layer_1 = tf.nn.relu(layer_1)
    # Hidden layer with RELU activation
    layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
    layer_2 = tf.nn.relu(layer_2)
    # Output layer with linear activation
    out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
    return out_layer'''


Out[317]:
"def multilayer_perceptron(x, weights, biases):\n    # Hidden layer with RELU activation\n    layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])\n    layer_1 = tf.nn.relu(layer_1)\n    # Hidden layer with RELU activation\n    layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])\n    layer_2 = tf.nn.relu(layer_2)\n    # Output layer with linear activation\n    out_layer = tf.matmul(layer_2, weights['out']) + biases['out']\n    return out_layer"

In [53]:
# Store layers weight & bias
weights = {
    'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
    'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
    'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
    'b1': tf.Variable(tf.random_normal([n_hidden_1])),
    'b2': tf.Variable(tf.random_normal([n_hidden_2])),
    'out': tf.Variable(tf.random_normal([n_classes]))
}

# Construct model
test_model = multilayer_perceptron(weights, biases)

pred = test_model.predict(x)

# Define loss and optimizer
#cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
cost = tf.reduce_mean(tf.square(pred-y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)

# Initializing the variables
init = tf.initialize_all_variables()

In [108]:
xtest, ytest = generatecandidate4(.5,.25,.1,1000)

# Launch the graph
with tf.Session() as sess:
    sess.run(init)

    # Training cycle
    for epoch in range(training_epochs):
        avg_cost = 0.
        total_batch = int(10000/batch_size)
        # Loop over all batches
        for i in range(total_batch):
            batch_x, batch_y = generatecandidate4(.5,.25,.1,batch_size)
            # Run optimization op (backprop) and cost op (to get loss value)
            _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
                                                          y: batch_y})
            # Compute average loss
            avg_cost += c / total_batch
        # Display logs per epoch step
        if epoch % display_step == 0:
            print "Epoch:", '%04d' % (epoch+1), "cost=", \
                "{:.9f}".format(avg_cost)
    print "Optimization Finished!"

    # Test model
    correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
    # Calculate accuracy
    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
    print "Accuracy:", accuracy.eval({x: xtest, y: ytest})


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-108-78ddd71c1329> in <module>()
      3 # Launch the graph
      4 with tf.Session() as sess:
----> 5     sess.run(init)
      6 
      7     # Training cycle

/usr/local/lib/python2.7/dist-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    338     try:
    339       result = self._run(None, fetches, feed_dict, options_ptr,
--> 340                          run_metadata_ptr)
    341       if run_metadata:
    342         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/usr/local/lib/python2.7/dist-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
    521 
    522     # Validate and process fetches.
--> 523     processed_fetches = self._process_fetches(fetches)
    524     unique_fetches = processed_fetches[0]
    525     target_list = processed_fetches[1]

/usr/local/lib/python2.7/dist-packages/tensorflow/python/client/session.pyc in _process_fetches(self, fetches)
    494         except ValueError as e:
    495           raise ValueError('Fetch argument %r of %r cannot be interpreted as a '
--> 496                            'Tensor. (%s)' % (subfetch, fetch, str(e)))
    497         except KeyError as e:
    498           raise ValueError('Fetch argument %r of %r cannot be interpreted as a '

ValueError: Fetch argument <tensorflow.python.framework.ops.Operation object at 0x7fdafd982d10> of <tensorflow.python.framework.ops.Operation object at 0x7fdafd982d10> cannot be interpreted as a Tensor. (Operation name: "init_2"
op: "NoOp"
input: "^Variable/Assign"
input: "^Variable_1/Assign"
input: "^Variable_2/Assign"
input: "^Variable_3/Assign"
input: "^Variable_4/Assign"
input: "^Variable_5/Assign"
 is not an element of this graph.)

In [39]:
x,y = mnist.train.next_batch(2)
print x
print y
x,y = generatecandidate4(.5,.25,.1,2)
print x
print y


[[ 0.  0.  0. ...,  0.  0.  0.]
 [ 0.  0.  0. ...,  0.  0.  0.]]
[[ 0.  0.  0.  0.  0.  1.  0.  0.  0.  0.]
 [ 0.  0.  0.  0.  0.  0.  0.  0.  1.  0.]]
[[0.2913145477141732], [0.7197537451338574]]
[[0.21526071978349995], [0.5389611631005712]]

In [196]:
#xdat,ydat = generatecandidate4(.5, .25, .1, 10)

print xdat, ydat

xdat = np.array(xdat)
ydat = np.array(ydat)

print func(xdat[0][0],.5,.25,.1)


with models[0].g.as_default():

    x = tf.placeholder("float", [None, n_input])
    y = tf.placeholder("float", [None, n_classes])
    pred = models[0].predict(x)

    #cost = tf.reduce_mean(tf.square(pred-y))
    #optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)

    # Initializing the variables
    #init = tf.initialize_all_variables()
    #init = tf.initialize_local_variables()
    #init = tf.initialize_variables([x,y])


    
    print "****"
    
    with tf.Session() as sess:
        
        
        #print sess.run(models[0].weights['h1'])
        #sess.run(init)
        models[0].saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model0.ckpt")
            
        print sess.run(models[0].weights['h1'])
        
        
        print "*************"
        #print x.eval()
        correct_prediction = tf.reduce_mean(tf.square(pred-y))
        #correct_prediction = tf.sub(pred,y)
        #print "Diff prediction:"
        #print correct_prediction.eval({x: xdat, y: ydat})
        print "Pred:"
        print pred.eval({x: xdat})
        #print sess.run(pred)
        print "Real:"
        print ydat
        #print sess.run(pred)
        #print sess.run(y)
        # Calculate accuracy
        accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
        print "Accuracy:", accuracy.eval({x: xdat, y: ydat})
        
print models[0].params


[[ 0.6449075 ]
 [ 0.45473615]
 [ 0.33488262]
 [ 0.46034365]
 [ 0.83584571]
 [ 0.89909018]
 [ 0.76755036]
 [ 0.49760915]
 [ 0.90528123]
 [ 0.73978044]] [[ 0.46917971]
 [ 0.31707652]
 [ 0.23979384]
 [ 0.32104405]
 [ 0.65828045]
 [ 0.72895413]
 [ 0.58645437]
 [ 0.34820972]
 [ 0.73608736]
 [ 0.55858266]]
0.469179712727
****
[[ 0.11819226  0.38378665  0.27990231 -1.74266922]]
*************
Pred:
[[ 0.55615944]
 [ 0.3261233 ]
 [ 0.18114549]
 [ 0.33290631]
 [ 0.78712326]
 [ 0.86362571]
 [ 0.70451182]
 [ 0.37798351]
 [ 0.87111408]
 [ 0.67092055]]
Real:
[[ 0.46917971]
 [ 0.31707652]
 [ 0.23979384]
 [ 0.32104405]
 [ 0.65828045]
 [ 0.72895413]
 [ 0.58645437]
 [ 0.34820972]
 [ 0.73608736]
 [ 0.55858266]]
Accuracy: 0.00916406
([array([[ 0.11819226,  0.38378665,  0.27990231, -1.74266922]], dtype=float32), array([[ 0.22078152,  1.18347752,  0.91004926,  0.42721173],
       [ 0.10281421,  0.18740587,  1.87701643, -0.83394569],
       [ 0.63315231,  0.03253949, -0.70630091,  1.60297143],
       [-1.35935378, -1.31399155, -0.5892598 , -0.83139694]], dtype=float32), array([[-1.59010446],
       [-0.17921025],
       [ 2.39534712],
       [ 0.48026386]], dtype=float32)], [array([ 0.20191318,  0.39881784,  1.57099736, -1.2472862 ], dtype=float32), array([-0.1087203 , -1.55095255,  0.33243829, -0.15809895], dtype=float32), array([-0.06598968], dtype=float32)])

In [324]:
import copy

alpha,hidden_dim,hidden_dim2 = (.001,4,4)

thresh = .04

# Parameters
learning_rate = 0.003
training_epochs = 15
batch_size = 2000
display_step = 1

# Network Parameters
n_hidden_1 = 4 # 1st layer number of features
n_hidden_2 = 4 # 2nd layer number of features
n_input = 1 # Guess quadratic function
n_classes = 1 # 
#synapses = []
models = []

#Testing starting in the same place
#synapse0 = 2*np.random.random((1,hidden_dim)) - 1
#synapse1 = 2*np.random.random((hidden_dim,hidden_dim2)) - 1
#synapse2 = 2*np.random.random((hidden_dim2,1)) - 1
copy_model = multilayer_perceptron(ind=0)

for ii in xrange(3):

    '''weights = {
        'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
        'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
        'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
    }
    biases = {
        'b1': tf.Variable(tf.random_normal([n_hidden_1])),
        'b2': tf.Variable(tf.random_normal([n_hidden_2])),
        'out': tf.Variable(tf.random_normal([n_classes]))
    }'''

    # Construct model with different initial weights
    test_model = multilayer_perceptron(ind=ii)
    
    #Construct model with same initial weights
    #test_model = copy.copy(copy_model)
    #test_model.index = ii
    
    
    
    
    #print test_model.weights
    

    
    models.append(test_model)
    with test_model.g.as_default():

        x = tf.placeholder("float", [None, n_input])
        y = tf.placeholder("float", [None, n_classes])
        pred = test_model.predict(x)

        # Define loss and optimizer
        #cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
        cost = tf.reduce_mean(tf.square(pred-y))
        optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)

        # Initializing the variables
        init = tf.initialize_all_variables()


        #remove the comment to get random initialization
        stopcond = True




        with tf.Session() as sess:
            sess.run(init)
            xtest, ytest = generatecandidate4(.5,.25,.1,1000)

            while stopcond:
                #print 'epoch:' + str(e)
                #X = []
                #y = []
                j = 0
                # Training cycle
                for epoch in range(training_epochs):
                    avg_cost = 0.
                    total_batch = int(10000/batch_size)

                    if (avg_cost > thresh or avg_cost == 0.) and stopcond:
                    # Loop over all batches
                        for i in range(total_batch):
                            batch_x, batch_y = generatecandidate4(.5,.25,.1,batch_size)
                            # Run optimization op (backprop) and cost op (to get loss value)
                            _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
                                                                          y: batch_y})
                            # Compute average loss
                            avg_cost += c / total_batch
                        # Display logs per epoch step
                        if epoch % display_step == 0:
                            print "Epoch:", '%04d' % (epoch+1), "cost=", \
                                "{:.9f}".format(avg_cost)

                        if avg_cost < thresh:
                            stopcond = False
                            #test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
                            #save_path = test_model.saver.save(sess,"/home/dfreeman/PythonFun/tmp/model" + str(ii) + ".ckpt")
                            
                print "Optimization Finished!"

                # Test model
                #correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                correct_prediction = tf.reduce_mean(tf.square(pred-y))
                # Calculate accuracy
                accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                print "Accuracy:", accuracy.eval({x: xtest, y: ytest})

                if (j%5000) == 0:
                    print "Error after "+str(j)+" iterations:" + str(accuracy.eval({x: xtest, y: ytest}))

                if accuracy.eval({x: xtest, y: ytest}) < thresh or stopcond == False:
                    #print "Changing stopcond!"
                    stopcond = False
                    print "Final params:"
                    test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
                    save_path = test_model.saver.save(sess,"/home/dfreeman/PythonFun/tmp/model" + str(ii) + ".ckpt")
                j+=1
    #remove the comment to get random initialization

    
    #synapses.append([synapse_0,synapse_1,synapse_2


Epoch: 0001 cost= 1.546539235
Epoch: 0002 cost= 0.726056075
Epoch: 0003 cost= 0.468982208
Epoch: 0004 cost= 0.371891242
Epoch: 0005 cost= 0.338695151
Epoch: 0006 cost= 0.325073898
Epoch: 0007 cost= 0.306272101
Epoch: 0008 cost= 0.304094410
Epoch: 0009 cost= 0.297462350
Epoch: 0010 cost= 0.289633816
Epoch: 0011 cost= 0.285320055
Epoch: 0012 cost= 0.275727344
Epoch: 0013 cost= 0.271105832
Epoch: 0014 cost= 0.265889364
Epoch: 0015 cost= 0.264696819
Optimization Finished!
Accuracy: 0.253653
Error after 0 iterations:0.253653
Epoch: 0001 cost= 0.256881350
Epoch: 0002 cost= 0.252774486
Epoch: 0003 cost= 0.247116360
Epoch: 0004 cost= 0.241427022
Epoch: 0005 cost= 0.239372128
Epoch: 0006 cost= 0.231538355
Epoch: 0007 cost= 0.229884392
Epoch: 0008 cost= 0.225733417
Epoch: 0009 cost= 0.222091913
Epoch: 0010 cost= 0.216191697
Epoch: 0011 cost= 0.215894768
Epoch: 0012 cost= 0.210283139
Epoch: 0013 cost= 0.211168277
Epoch: 0014 cost= 0.201413536
Epoch: 0015 cost= 0.197245428
Optimization Finished!
Accuracy: 0.195142
Error after 0 iterations:0.195142
Epoch: 0001 cost= 0.198949969
Epoch: 0002 cost= 0.195963615
Epoch: 0003 cost= 0.190704900
Epoch: 0004 cost= 0.187575325
Epoch: 0005 cost= 0.186489898
Epoch: 0006 cost= 0.181730253
Epoch: 0007 cost= 0.178239095
Epoch: 0008 cost= 0.176646730
Epoch: 0009 cost= 0.177408561
Epoch: 0010 cost= 0.172958112
Epoch: 0011 cost= 0.170805529
Epoch: 0012 cost= 0.167440605
Epoch: 0013 cost= 0.163213798
Epoch: 0014 cost= 0.165419233
Epoch: 0015 cost= 0.161706936
Optimization Finished!
Accuracy: 0.157715
Error after 0 iterations:0.157715
Epoch: 0001 cost= 0.163076648
Epoch: 0002 cost= 0.162775302
Epoch: 0003 cost= 0.158340147
Epoch: 0004 cost= 0.152136916
Epoch: 0005 cost= 0.151680198
Epoch: 0006 cost= 0.150020403
Epoch: 0007 cost= 0.150287020
Epoch: 0008 cost= 0.147683969
Epoch: 0009 cost= 0.145647821
Epoch: 0010 cost= 0.143752423
Epoch: 0011 cost= 0.139379817
Epoch: 0012 cost= 0.139022496
Epoch: 0013 cost= 0.138891986
Epoch: 0014 cost= 0.137515041
Epoch: 0015 cost= 0.134515047
Optimization Finished!
Accuracy: 0.131386
Error after 0 iterations:0.131386
Epoch: 0001 cost= 0.133812436
Epoch: 0002 cost= 0.131385994
Epoch: 0003 cost= 0.129083854
Epoch: 0004 cost= 0.127117170
Epoch: 0005 cost= 0.127755603
Epoch: 0006 cost= 0.125004120
Epoch: 0007 cost= 0.124489291
Epoch: 0008 cost= 0.122517926
Epoch: 0009 cost= 0.123761418
Epoch: 0010 cost= 0.121522665
Epoch: 0011 cost= 0.117702048
Epoch: 0012 cost= 0.118131408
Epoch: 0013 cost= 0.117897923
Epoch: 0014 cost= 0.117229010
Epoch: 0015 cost= 0.114930528
Optimization Finished!
Accuracy: 0.112039
Error after 0 iterations:0.112039
Epoch: 0001 cost= 0.113547328
Epoch: 0002 cost= 0.112387024
Epoch: 0003 cost= 0.113393638
Epoch: 0004 cost= 0.110166049
Epoch: 0005 cost= 0.109929189
Epoch: 0006 cost= 0.108112994
Epoch: 0007 cost= 0.106765455
Epoch: 0008 cost= 0.106871258
Epoch: 0009 cost= 0.104436879
Epoch: 0010 cost= 0.103937900
Epoch: 0011 cost= 0.103166109
Epoch: 0012 cost= 0.102844770
Epoch: 0013 cost= 0.101253751
Epoch: 0014 cost= 0.099285546
Epoch: 0015 cost= 0.099474207
Optimization Finished!
Accuracy: 0.0970474
Error after 0 iterations:0.0970474
Epoch: 0001 cost= 0.098463550
Epoch: 0002 cost= 0.097472744
Epoch: 0003 cost= 0.097528788
Epoch: 0004 cost= 0.094436763
Epoch: 0005 cost= 0.094459198
Epoch: 0006 cost= 0.092911321
Epoch: 0007 cost= 0.093669274
Epoch: 0008 cost= 0.091968700
Epoch: 0009 cost= 0.093294002
Epoch: 0010 cost= 0.090461728
Epoch: 0011 cost= 0.088195883
Epoch: 0012 cost= 0.090213989
Epoch: 0013 cost= 0.089147928
Epoch: 0014 cost= 0.087736958
Epoch: 0015 cost= 0.088686068
Optimization Finished!
Accuracy: 0.0850546
Error after 0 iterations:0.0850546
Epoch: 0001 cost= 0.085532498
Epoch: 0002 cost= 0.086135949
Epoch: 0003 cost= 0.086004792
Epoch: 0004 cost= 0.084344020
Epoch: 0005 cost= 0.084772630
Epoch: 0006 cost= 0.082218421
Epoch: 0007 cost= 0.082147887
Epoch: 0008 cost= 0.081462933
Epoch: 0009 cost= 0.081632112
Epoch: 0010 cost= 0.080422960
Epoch: 0011 cost= 0.079048443
Epoch: 0012 cost= 0.078605095
Epoch: 0013 cost= 0.078135487
Epoch: 0014 cost= 0.076852448
Epoch: 0015 cost= 0.076110750
Optimization Finished!
Accuracy: 0.0750833
Error after 0 iterations:0.0750833
Epoch: 0001 cost= 0.077415177
Epoch: 0002 cost= 0.075987865
Epoch: 0003 cost= 0.073883742
Epoch: 0004 cost= 0.074659795
Epoch: 0005 cost= 0.073504931
Epoch: 0006 cost= 0.073560694
Epoch: 0007 cost= 0.071904735
Epoch: 0008 cost= 0.071009888
Epoch: 0009 cost= 0.072846240
Epoch: 0010 cost= 0.071401595
Epoch: 0011 cost= 0.070399921
Epoch: 0012 cost= 0.070502067
Epoch: 0013 cost= 0.068797827
Epoch: 0014 cost= 0.069129442
Epoch: 0015 cost= 0.068166229
Optimization Finished!
Accuracy: 0.0667121
Error after 0 iterations:0.0667121
Epoch: 0001 cost= 0.066168949
Epoch: 0002 cost= 0.067895883
Epoch: 0003 cost= 0.067659993
Epoch: 0004 cost= 0.066189013
Epoch: 0005 cost= 0.064933491
Epoch: 0006 cost= 0.064875525
Epoch: 0007 cost= 0.065296333
Epoch: 0008 cost= 0.063573267
Epoch: 0009 cost= 0.064138910
Epoch: 0010 cost= 0.062352950
Epoch: 0011 cost= 0.061561159
Epoch: 0012 cost= 0.062987527
Epoch: 0013 cost= 0.061581376
Epoch: 0014 cost= 0.061360917
Epoch: 0015 cost= 0.061069011
Optimization Finished!
Accuracy: 0.059466
Error after 0 iterations:0.059466
Epoch: 0001 cost= 0.060584641
Epoch: 0002 cost= 0.060679759
Epoch: 0003 cost= 0.059219609
Epoch: 0004 cost= 0.059246646
Epoch: 0005 cost= 0.058918683
Epoch: 0006 cost= 0.057453769
Epoch: 0007 cost= 0.059142497
Epoch: 0008 cost= 0.057311519
Epoch: 0009 cost= 0.057553303
Epoch: 0010 cost= 0.056878109
Epoch: 0011 cost= 0.055486434
Epoch: 0012 cost= 0.056004417
Epoch: 0013 cost= 0.055038737
Epoch: 0014 cost= 0.054924677
Epoch: 0015 cost= 0.053455942
Optimization Finished!
Accuracy: 0.0532072
Error after 0 iterations:0.0532072
Epoch: 0001 cost= 0.053924987
Epoch: 0002 cost= 0.053547463
Epoch: 0003 cost= 0.052853929
Epoch: 0004 cost= 0.053364626
Epoch: 0005 cost= 0.052634537
Epoch: 0006 cost= 0.051543725
Epoch: 0007 cost= 0.052122825
Epoch: 0008 cost= 0.051007863
Epoch: 0009 cost= 0.050092195
Epoch: 0010 cost= 0.049482488
Epoch: 0011 cost= 0.049911764
Epoch: 0012 cost= 0.049988496
Epoch: 0013 cost= 0.049300329
Epoch: 0014 cost= 0.049083300
Epoch: 0015 cost= 0.049274565
Optimization Finished!
Accuracy: 0.0476675
Error after 0 iterations:0.0476675
Epoch: 0001 cost= 0.047977978
Epoch: 0002 cost= 0.047475428
Epoch: 0003 cost= 0.047139394
Epoch: 0004 cost= 0.047406337
Epoch: 0005 cost= 0.047453449
Epoch: 0006 cost= 0.046087275
Epoch: 0007 cost= 0.045941752
Epoch: 0008 cost= 0.046661605
Epoch: 0009 cost= 0.045381975
Epoch: 0010 cost= 0.044745644
Epoch: 0011 cost= 0.044629844
Epoch: 0012 cost= 0.044803131
Epoch: 0013 cost= 0.043753164
Epoch: 0014 cost= 0.043457160
Epoch: 0015 cost= 0.044460298
Optimization Finished!
Accuracy: 0.0427088
Error after 0 iterations:0.0427088
Epoch: 0001 cost= 0.042903084
Epoch: 0002 cost= 0.043674634
Epoch: 0003 cost= 0.042273197
Epoch: 0004 cost= 0.042141888
Epoch: 0005 cost= 0.042179315
Epoch: 0006 cost= 0.041873763
Epoch: 0007 cost= 0.041812723
Epoch: 0008 cost= 0.041817351
Epoch: 0009 cost= 0.040541782
Epoch: 0010 cost= 0.040469306
Epoch: 0011 cost= 0.040234438
Epoch: 0012 cost= 0.039894967
Optimization Finished!
Accuracy: 0.0390812
Error after 0 iterations:0.0390812
Final params:
Epoch: 0001 cost= 8.566463947
Epoch: 0002 cost= 1.064710212
Epoch: 0003 cost= 0.796430969
Epoch: 0004 cost= 0.690348804
Epoch: 0005 cost= 0.642105854
Epoch: 0006 cost= 0.594614196
Epoch: 0007 cost= 0.559064019
Epoch: 0008 cost= 0.524094748
Epoch: 0009 cost= 0.486766696
Epoch: 0010 cost= 0.468546420
Epoch: 0011 cost= 0.436114985
Epoch: 0012 cost= 0.411756277
Epoch: 0013 cost= 0.389212108
Epoch: 0014 cost= 0.360528725
Epoch: 0015 cost= 0.346240360
Optimization Finished!
Accuracy: 0.332947
Error after 0 iterations:0.332947
Epoch: 0001 cost= 0.333816266
Epoch: 0002 cost= 0.309229755
Epoch: 0003 cost= 0.302655715
Epoch: 0004 cost= 0.282960224
Epoch: 0005 cost= 0.272563648
Epoch: 0006 cost= 0.265296829
Epoch: 0007 cost= 0.248479089
Epoch: 0008 cost= 0.242529991
Epoch: 0009 cost= 0.229666674
Epoch: 0010 cost= 0.227045941
Epoch: 0011 cost= 0.213323674
Epoch: 0012 cost= 0.206528360
Epoch: 0013 cost= 0.202082947
Epoch: 0014 cost= 0.196204180
Epoch: 0015 cost= 0.185154352
Optimization Finished!
Accuracy: 0.183362
Error after 0 iterations:0.183362
Epoch: 0001 cost= 0.185177988
Epoch: 0002 cost= 0.176006979
Epoch: 0003 cost= 0.168627429
Epoch: 0004 cost= 0.168467119
Epoch: 0005 cost= 0.162896842
Epoch: 0006 cost= 0.160756466
Epoch: 0007 cost= 0.154645005
Epoch: 0008 cost= 0.150571913
Epoch: 0009 cost= 0.150225806
Epoch: 0010 cost= 0.143278953
Epoch: 0011 cost= 0.143163109
Epoch: 0012 cost= 0.137917808
Epoch: 0013 cost= 0.133700353
Epoch: 0014 cost= 0.132146746
Epoch: 0015 cost= 0.132464200
Optimization Finished!
Accuracy: 0.12687
Error after 0 iterations:0.12687
Epoch: 0001 cost= 0.127495019
Epoch: 0002 cost= 0.126423810
Epoch: 0003 cost= 0.122304903
Epoch: 0004 cost= 0.122292608
Epoch: 0005 cost= 0.116570395
Epoch: 0006 cost= 0.114487562
Epoch: 0007 cost= 0.113745336
Epoch: 0008 cost= 0.113937132
Epoch: 0009 cost= 0.109732327
Epoch: 0010 cost= 0.110923895
Epoch: 0011 cost= 0.107965569
Epoch: 0012 cost= 0.106585783
Epoch: 0013 cost= 0.104301442
Epoch: 0014 cost= 0.105843543
Epoch: 0015 cost= 0.100753459
Optimization Finished!
Accuracy: 0.0998525
Error after 0 iterations:0.0998525
Epoch: 0001 cost= 0.099734506
Epoch: 0002 cost= 0.099917333
Epoch: 0003 cost= 0.097511281
Epoch: 0004 cost= 0.098219968
Epoch: 0005 cost= 0.096415462
Epoch: 0006 cost= 0.093392847
Epoch: 0007 cost= 0.094391374
Epoch: 0008 cost= 0.092680717
Epoch: 0009 cost= 0.092915891
Epoch: 0010 cost= 0.091703179
Epoch: 0011 cost= 0.088996011
Epoch: 0012 cost= 0.087280218
Epoch: 0013 cost= 0.088482521
Epoch: 0014 cost= 0.086573911
Epoch: 0015 cost= 0.086244839
Optimization Finished!
Accuracy: 0.0848296
Error after 0 iterations:0.0848296
Epoch: 0001 cost= 0.084971145
Epoch: 0002 cost= 0.084309912
Epoch: 0003 cost= 0.083791973
Epoch: 0004 cost= 0.084584427
Epoch: 0005 cost= 0.081617291
Epoch: 0006 cost= 0.082801946
Epoch: 0007 cost= 0.082823662
Epoch: 0008 cost= 0.078996341
Epoch: 0009 cost= 0.080115581
Epoch: 0010 cost= 0.078821410
Epoch: 0011 cost= 0.078726292
Epoch: 0012 cost= 0.077802026
Epoch: 0013 cost= 0.078393909
Epoch: 0014 cost= 0.078035265
Epoch: 0015 cost= 0.077601656
Optimization Finished!
Accuracy: 0.0754932
Error after 0 iterations:0.0754932
Epoch: 0001 cost= 0.075531417
Epoch: 0002 cost= 0.075105910
Epoch: 0003 cost= 0.076327583
Epoch: 0004 cost= 0.074457875
Epoch: 0005 cost= 0.074286415
Epoch: 0006 cost= 0.075074551
Epoch: 0007 cost= 0.073094501
Epoch: 0008 cost= 0.074067698
Epoch: 0009 cost= 0.071829167
Epoch: 0010 cost= 0.072250536
Epoch: 0011 cost= 0.070105064
Epoch: 0012 cost= 0.070901772
Epoch: 0013 cost= 0.071322148
Epoch: 0014 cost= 0.068845813
Epoch: 0015 cost= 0.069080067
Optimization Finished!
Accuracy: 0.0693448
Error after 0 iterations:0.0693448
Epoch: 0001 cost= 0.069503373
Epoch: 0002 cost= 0.069107661
Epoch: 0003 cost= 0.069306919
Epoch: 0004 cost= 0.069003411
Epoch: 0005 cost= 0.068499662
Epoch: 0006 cost= 0.067780909
Epoch: 0007 cost= 0.067542446
Epoch: 0008 cost= 0.067936090
Epoch: 0009 cost= 0.067400116
Epoch: 0010 cost= 0.067066595
Epoch: 0011 cost= 0.066455472
Epoch: 0012 cost= 0.066700408
Epoch: 0013 cost= 0.066432859
Epoch: 0014 cost= 0.065332144
Epoch: 0015 cost= 0.065632342
Optimization Finished!
Accuracy: 0.0647464
Error after 0 iterations:0.0647464
Epoch: 0001 cost= 0.064837322
Epoch: 0002 cost= 0.064592192
Epoch: 0003 cost= 0.064322582
Epoch: 0004 cost= 0.065385102
Epoch: 0005 cost= 0.065262750
Epoch: 0006 cost= 0.064164549
Epoch: 0007 cost= 0.064503368
Epoch: 0008 cost= 0.064301394
Epoch: 0009 cost= 0.063619985
Epoch: 0010 cost= 0.064070615
Epoch: 0011 cost= 0.062586683
Epoch: 0012 cost= 0.062844453
Epoch: 0013 cost= 0.062452724
Epoch: 0014 cost= 0.060446823
Epoch: 0015 cost= 0.062058555
Optimization Finished!
Accuracy: 0.0612448
Error after 0 iterations:0.0612448
Epoch: 0001 cost= 0.060893839
Epoch: 0002 cost= 0.062190268
Epoch: 0003 cost= 0.061269444
Epoch: 0004 cost= 0.061101802
Epoch: 0005 cost= 0.060952849
Epoch: 0006 cost= 0.060279492
Epoch: 0007 cost= 0.059256952
Epoch: 0008 cost= 0.059650630
Epoch: 0009 cost= 0.059484483
Epoch: 0010 cost= 0.059861670
Epoch: 0011 cost= 0.059457712
Epoch: 0012 cost= 0.059182473
Epoch: 0013 cost= 0.060134260
Epoch: 0014 cost= 0.059177559
Epoch: 0015 cost= 0.058948533
Optimization Finished!
Accuracy: 0.0584297
Error after 0 iterations:0.0584297
Epoch: 0001 cost= 0.058869768
Epoch: 0002 cost= 0.058879386
Epoch: 0003 cost= 0.059115867
Epoch: 0004 cost= 0.058690875
Epoch: 0005 cost= 0.058058673
Epoch: 0006 cost= 0.057099386
Epoch: 0007 cost= 0.058092479
Epoch: 0008 cost= 0.058133975
Epoch: 0009 cost= 0.057097281
Epoch: 0010 cost= 0.057827421
Epoch: 0011 cost= 0.056372645
Epoch: 0012 cost= 0.057329427
Epoch: 0013 cost= 0.057429782
Epoch: 0014 cost= 0.056763455
Epoch: 0015 cost= 0.055340821
Optimization Finished!
Accuracy: 0.056038
Error after 0 iterations:0.056038
Epoch: 0001 cost= 0.055752822
Epoch: 0002 cost= 0.055908512
Epoch: 0003 cost= 0.055744616
Epoch: 0004 cost= 0.056788372
Epoch: 0005 cost= 0.055741844
Epoch: 0006 cost= 0.056041112
Epoch: 0007 cost= 0.055551896
Epoch: 0008 cost= 0.055808373
Epoch: 0009 cost= 0.055538557
Epoch: 0010 cost= 0.054873340
Epoch: 0011 cost= 0.055890999
Epoch: 0012 cost= 0.054538029
Epoch: 0013 cost= 0.054010520
Epoch: 0014 cost= 0.055385700
Epoch: 0015 cost= 0.055012139
Optimization Finished!
Accuracy: 0.0539788
Error after 0 iterations:0.0539788
Epoch: 0001 cost= 0.054686516
Epoch: 0002 cost= 0.053705826
Epoch: 0003 cost= 0.053611966
Epoch: 0004 cost= 0.053104047
Epoch: 0005 cost= 0.053974798
Epoch: 0006 cost= 0.053628957
Epoch: 0007 cost= 0.053862391
Epoch: 0008 cost= 0.053038710
Epoch: 0009 cost= 0.053125345
Epoch: 0010 cost= 0.053477424
Epoch: 0011 cost= 0.052307987
Epoch: 0012 cost= 0.052880688
Epoch: 0013 cost= 0.053299719
Epoch: 0014 cost= 0.053047597
Epoch: 0015 cost= 0.053262226
Optimization Finished!
Accuracy: 0.0522032
Error after 0 iterations:0.0522032
Epoch: 0001 cost= 0.053343763
Epoch: 0002 cost= 0.053040379
Epoch: 0003 cost= 0.052391575
Epoch: 0004 cost= 0.052539149
Epoch: 0005 cost= 0.052320442
Epoch: 0006 cost= 0.052189039
Epoch: 0007 cost= 0.051661751
Epoch: 0008 cost= 0.052403764
Epoch: 0009 cost= 0.052584539
Epoch: 0010 cost= 0.051787600
Epoch: 0011 cost= 0.052362988
Epoch: 0012 cost= 0.051107147
Epoch: 0013 cost= 0.052094477
Epoch: 0014 cost= 0.051490303
Epoch: 0015 cost= 0.051472074
Optimization Finished!
Accuracy: 0.0506067
Error after 0 iterations:0.0506067
Epoch: 0001 cost= 0.050801662
Epoch: 0002 cost= 0.050570377
Epoch: 0003 cost= 0.050793492
Epoch: 0004 cost= 0.050811878
Epoch: 0005 cost= 0.050715162
Epoch: 0006 cost= 0.051126106
Epoch: 0007 cost= 0.051154767
Epoch: 0008 cost= 0.049691425
Epoch: 0009 cost= 0.049928429
Epoch: 0010 cost= 0.049692575
Epoch: 0011 cost= 0.050048838
Epoch: 0012 cost= 0.048723960
Epoch: 0013 cost= 0.049974027
Epoch: 0014 cost= 0.050198421
Epoch: 0015 cost= 0.049943878
Optimization Finished!
Accuracy: 0.0491496
Error after 0 iterations:0.0491496
Epoch: 0001 cost= 0.049375637
Epoch: 0002 cost= 0.049322151
Epoch: 0003 cost= 0.049759895
Epoch: 0004 cost= 0.048878849
Epoch: 0005 cost= 0.049339660
Epoch: 0006 cost= 0.048555721
Epoch: 0007 cost= 0.049001835
Epoch: 0008 cost= 0.049365056
Epoch: 0009 cost= 0.049365703
Epoch: 0010 cost= 0.047889847
Epoch: 0011 cost= 0.047902779
Epoch: 0012 cost= 0.047994951
Epoch: 0013 cost= 0.047238435
Epoch: 0014 cost= 0.047494858
Epoch: 0015 cost= 0.048470453
Optimization Finished!
Accuracy: 0.0478334
Error after 0 iterations:0.0478334
Epoch: 0001 cost= 0.048287441
Epoch: 0002 cost= 0.047970657
Epoch: 0003 cost= 0.047853217
Epoch: 0004 cost= 0.048682059
Epoch: 0005 cost= 0.047310498
Epoch: 0006 cost= 0.048118959
Epoch: 0007 cost= 0.047641085
Epoch: 0008 cost= 0.047694085
Epoch: 0009 cost= 0.046854388
Epoch: 0010 cost= 0.047374241
Epoch: 0011 cost= 0.046512441
Epoch: 0012 cost= 0.046902710
Epoch: 0013 cost= 0.047157067
Epoch: 0014 cost= 0.047281326
Epoch: 0015 cost= 0.047530643
Optimization Finished!
Accuracy: 0.0465899
Error after 0 iterations:0.0465899
Epoch: 0001 cost= 0.047589399
Epoch: 0002 cost= 0.046444053
Epoch: 0003 cost= 0.046073205
Epoch: 0004 cost= 0.046695638
Epoch: 0005 cost= 0.046598849
Epoch: 0006 cost= 0.046403646
Epoch: 0007 cost= 0.046923540
Epoch: 0008 cost= 0.046435170
Epoch: 0009 cost= 0.047734407
Epoch: 0010 cost= 0.045901536
Epoch: 0011 cost= 0.046502541
Epoch: 0012 cost= 0.045914182
Epoch: 0013 cost= 0.046231589
Epoch: 0014 cost= 0.045819083
Epoch: 0015 cost= 0.045305481
Optimization Finished!
Accuracy: 0.0454031
Error after 0 iterations:0.0454031
Epoch: 0001 cost= 0.045268896
Epoch: 0002 cost= 0.045656554
Epoch: 0003 cost= 0.045905023
Epoch: 0004 cost= 0.046105563
Epoch: 0005 cost= 0.044778515
Epoch: 0006 cost= 0.045094290
Epoch: 0007 cost= 0.045164354
Epoch: 0008 cost= 0.045567322
Epoch: 0009 cost= 0.044941000
Epoch: 0010 cost= 0.045006262
Epoch: 0011 cost= 0.044399160
Epoch: 0012 cost= 0.045408165
Epoch: 0013 cost= 0.044582972
Epoch: 0014 cost= 0.044945883
Epoch: 0015 cost= 0.044648310
Optimization Finished!
Accuracy: 0.0442766
Error after 0 iterations:0.0442766
Epoch: 0001 cost= 0.045014425
Epoch: 0002 cost= 0.044651471
Epoch: 0003 cost= 0.044218207
Epoch: 0004 cost= 0.043857373
Epoch: 0005 cost= 0.043595613
Epoch: 0006 cost= 0.044836535
Epoch: 0007 cost= 0.043541814
Epoch: 0008 cost= 0.044928414
Epoch: 0009 cost= 0.043808300
Epoch: 0010 cost= 0.043849496
Epoch: 0011 cost= 0.044271658
Epoch: 0012 cost= 0.043546348
Epoch: 0013 cost= 0.043675383
Epoch: 0014 cost= 0.043137664
Epoch: 0015 cost= 0.043838938
Optimization Finished!
Accuracy: 0.0431605
Error after 0 iterations:0.0431605
Epoch: 0001 cost= 0.043409174
Epoch: 0002 cost= 0.042649242
Epoch: 0003 cost= 0.043513295
Epoch: 0004 cost= 0.042931935
Epoch: 0005 cost= 0.043061178
Epoch: 0006 cost= 0.043389377
Epoch: 0007 cost= 0.042110616
Epoch: 0008 cost= 0.043165375
Epoch: 0009 cost= 0.042470063
Epoch: 0010 cost= 0.043074392
Epoch: 0011 cost= 0.043196395
Epoch: 0012 cost= 0.042930654
Epoch: 0013 cost= 0.042853247
Epoch: 0014 cost= 0.043040925
Epoch: 0015 cost= 0.042569283
Optimization Finished!
Accuracy: 0.0420717
Error after 0 iterations:0.0420717
Epoch: 0001 cost= 0.042788143
Epoch: 0002 cost= 0.042538600
Epoch: 0003 cost= 0.042429248
Epoch: 0004 cost= 0.041468786
Epoch: 0005 cost= 0.041658641
Epoch: 0006 cost= 0.041812805
Epoch: 0007 cost= 0.041602422
Epoch: 0008 cost= 0.041700535
Epoch: 0009 cost= 0.042398057
Epoch: 0010 cost= 0.041900747
Epoch: 0011 cost= 0.040977459
Epoch: 0012 cost= 0.041760762
Epoch: 0013 cost= 0.041182321
Epoch: 0014 cost= 0.040770950
Epoch: 0015 cost= 0.041375127
Optimization Finished!
Accuracy: 0.0409894
Error after 0 iterations:0.0409894
Epoch: 0001 cost= 0.040996078
Epoch: 0002 cost= 0.041708016
Epoch: 0003 cost= 0.041588204
Epoch: 0004 cost= 0.041178996
Epoch: 0005 cost= 0.041320446
Epoch: 0006 cost= 0.041261443
Epoch: 0007 cost= 0.040098912
Epoch: 0008 cost= 0.040184060
Epoch: 0009 cost= 0.040978369
Epoch: 0010 cost= 0.040390159
Epoch: 0011 cost= 0.040776813
Epoch: 0012 cost= 0.039766789
Optimization Finished!
Accuracy: 0.0401125
Error after 0 iterations:0.0401125
Final params:
Epoch: 0001 cost= 10.790929222
Epoch: 0002 cost= 6.615515518
Epoch: 0003 cost= 5.041683769
Epoch: 0004 cost= 3.926103354
Epoch: 0005 cost= 3.138649893
Epoch: 0006 cost= 2.549687576
Epoch: 0007 cost= 2.098869371
Epoch: 0008 cost= 1.760066390
Epoch: 0009 cost= 1.477623272
Epoch: 0010 cost= 1.253864884
Epoch: 0011 cost= 1.063316727
Epoch: 0012 cost= 0.912886786
Epoch: 0013 cost= 0.787523949
Epoch: 0014 cost= 0.689002657
Epoch: 0015 cost= 0.594726467
Optimization Finished!
Accuracy: 0.554579
Error after 0 iterations:0.554579
Epoch: 0001 cost= 0.524168193
Epoch: 0002 cost= 0.463574213
Epoch: 0003 cost= 0.403589100
Epoch: 0004 cost= 0.362662965
Epoch: 0005 cost= 0.324361277
Epoch: 0006 cost= 0.290613532
Epoch: 0007 cost= 0.257543418
Epoch: 0008 cost= 0.234293789
Epoch: 0009 cost= 0.210543993
Epoch: 0010 cost= 0.188290930
Epoch: 0011 cost= 0.173252285
Epoch: 0012 cost= 0.157225800
Epoch: 0013 cost= 0.144940710
Epoch: 0014 cost= 0.133287808
Epoch: 0015 cost= 0.122622849
Optimization Finished!
Accuracy: 0.118823
Error after 0 iterations:0.118823
Epoch: 0001 cost= 0.116179819
Epoch: 0002 cost= 0.107084142
Epoch: 0003 cost= 0.099481502
Epoch: 0004 cost= 0.094476135
Epoch: 0005 cost= 0.089731270
Epoch: 0006 cost= 0.083300330
Epoch: 0007 cost= 0.078752890
Epoch: 0008 cost= 0.076548107
Epoch: 0009 cost= 0.073688187
Epoch: 0010 cost= 0.069668484
Epoch: 0011 cost= 0.067454807
Epoch: 0012 cost= 0.066650476
Epoch: 0013 cost= 0.064159001
Epoch: 0014 cost= 0.061742461
Epoch: 0015 cost= 0.060264765
Optimization Finished!
Accuracy: 0.0606294
Error after 0 iterations:0.0606294
Epoch: 0001 cost= 0.059388682
Epoch: 0002 cost= 0.058468252
Epoch: 0003 cost= 0.057498447
Epoch: 0004 cost= 0.056769572
Epoch: 0005 cost= 0.055129112
Epoch: 0006 cost= 0.054352273
Epoch: 0007 cost= 0.054286887
Epoch: 0008 cost= 0.053857258
Epoch: 0009 cost= 0.052729324
Epoch: 0010 cost= 0.052721773
Epoch: 0011 cost= 0.052059088
Epoch: 0012 cost= 0.051396886
Epoch: 0013 cost= 0.051348340
Epoch: 0014 cost= 0.050721341
Epoch: 0015 cost= 0.050438755
Optimization Finished!
Accuracy: 0.0513711
Error after 0 iterations:0.0513711
Epoch: 0001 cost= 0.050481229
Epoch: 0002 cost= 0.051119781
Epoch: 0003 cost= 0.050789712
Epoch: 0004 cost= 0.049708962
Epoch: 0005 cost= 0.049906341
Epoch: 0006 cost= 0.049898318
Epoch: 0007 cost= 0.049463717
Epoch: 0008 cost= 0.049627953
Epoch: 0009 cost= 0.049954203
Epoch: 0010 cost= 0.049580370
Epoch: 0011 cost= 0.049730325
Epoch: 0012 cost= 0.048957632
Epoch: 0013 cost= 0.049479124
Epoch: 0014 cost= 0.048804142
Epoch: 0015 cost= 0.048713103
Optimization Finished!
Accuracy: 0.049785
Error after 0 iterations:0.049785
Epoch: 0001 cost= 0.049452427
Epoch: 0002 cost= 0.049430264
Epoch: 0003 cost= 0.049377968
Epoch: 0004 cost= 0.048862975
Epoch: 0005 cost= 0.049392025
Epoch: 0006 cost= 0.048459014
Epoch: 0007 cost= 0.049795487
Epoch: 0008 cost= 0.049122655
Epoch: 0009 cost= 0.049053328
Epoch: 0010 cost= 0.048358537
Epoch: 0011 cost= 0.048060153
Epoch: 0012 cost= 0.049410796
Epoch: 0013 cost= 0.047786710
Epoch: 0014 cost= 0.049458544
Epoch: 0015 cost= 0.049285045
Optimization Finished!
Accuracy: 0.0494971
Error after 0 iterations:0.0494971
Epoch: 0001 cost= 0.049567846
Epoch: 0002 cost= 0.048771080
Epoch: 0003 cost= 0.049096259
Epoch: 0004 cost= 0.048732157
Epoch: 0005 cost= 0.049036012
Epoch: 0006 cost= 0.048585312
Epoch: 0007 cost= 0.048589584
Epoch: 0008 cost= 0.048803698
Epoch: 0009 cost= 0.048277301
Epoch: 0010 cost= 0.049224573
Epoch: 0011 cost= 0.049047057
Epoch: 0012 cost= 0.048502915
Epoch: 0013 cost= 0.048083623
Epoch: 0014 cost= 0.048910847
Epoch: 0015 cost= 0.049498590
Optimization Finished!
Accuracy: 0.04942
Error after 0 iterations:0.04942
Epoch: 0001 cost= 0.049022395
Epoch: 0002 cost= 0.049247310
Epoch: 0003 cost= 0.048957936
Epoch: 0004 cost= 0.048159167
Epoch: 0005 cost= 0.048846506
Epoch: 0006 cost= 0.049439479
Epoch: 0007 cost= 0.048716715
Epoch: 0008 cost= 0.048837696
Epoch: 0009 cost= 0.048853827
Epoch: 0010 cost= 0.048055349
Epoch: 0011 cost= 0.049590901
Epoch: 0012 cost= 0.047877679
Epoch: 0013 cost= 0.048249652
Epoch: 0014 cost= 0.048840744
Epoch: 0015 cost= 0.048564484
Optimization Finished!
Accuracy: 0.0493821
Error after 0 iterations:0.0493821
Epoch: 0001 cost= 0.048902089
Epoch: 0002 cost= 0.047923821
Epoch: 0003 cost= 0.049143681
Epoch: 0004 cost= 0.048490671
Epoch: 0005 cost= 0.048816292
Epoch: 0006 cost= 0.048719263
Epoch: 0007 cost= 0.049137305
Epoch: 0008 cost= 0.048094285
Epoch: 0009 cost= 0.048589945
Epoch: 0010 cost= 0.048131168
Epoch: 0011 cost= 0.048135771
Epoch: 0012 cost= 0.048365120
Epoch: 0013 cost= 0.048822995
Epoch: 0014 cost= 0.048628632
Epoch: 0015 cost= 0.048749568
Optimization Finished!
Accuracy: 0.0493513
Error after 0 iterations:0.0493513
Epoch: 0001 cost= 0.048630971
Epoch: 0002 cost= 0.049022853
Epoch: 0003 cost= 0.048071335
Epoch: 0004 cost= 0.048904847
Epoch: 0005 cost= 0.049201741
Epoch: 0006 cost= 0.047798178
Epoch: 0007 cost= 0.048456882
Epoch: 0008 cost= 0.049206592
Epoch: 0009 cost= 0.048419531
Epoch: 0010 cost= 0.048841723
Epoch: 0011 cost= 0.048888360
Epoch: 0012 cost= 0.049043179
Epoch: 0013 cost= 0.048113661
Epoch: 0014 cost= 0.048335703
Epoch: 0015 cost= 0.048651598
Optimization Finished!
Accuracy: 0.0493218
Error after 0 iterations:0.0493218
Epoch: 0001 cost= 0.048328601
Epoch: 0002 cost= 0.048600413
Epoch: 0003 cost= 0.048475151
Epoch: 0004 cost= 0.048125194
Epoch: 0005 cost= 0.050073416
Epoch: 0006 cost= 0.048743097
Epoch: 0007 cost= 0.048309336
Epoch: 0008 cost= 0.048089091
Epoch: 0009 cost= 0.048389865
Epoch: 0010 cost= 0.049293631
Epoch: 0011 cost= 0.049674571
Epoch: 0012 cost= 0.049406606
Epoch: 0013 cost= 0.048214621
Epoch: 0014 cost= 0.048335665
Epoch: 0015 cost= 0.047678391
Optimization Finished!
Accuracy: 0.0492943
Error after 0 iterations:0.0492943
Epoch: 0001 cost= 0.048692862
Epoch: 0002 cost= 0.047671479
Epoch: 0003 cost= 0.047695066
Epoch: 0004 cost= 0.048486266
Epoch: 0005 cost= 0.048635809
Epoch: 0006 cost= 0.047914825
Epoch: 0007 cost= 0.048382169
Epoch: 0008 cost= 0.048761358
Epoch: 0009 cost= 0.049037540
Epoch: 0010 cost= 0.049278696
Epoch: 0011 cost= 0.047787777
Epoch: 0012 cost= 0.048869339
Epoch: 0013 cost= 0.048121874
Epoch: 0014 cost= 0.048206332
Epoch: 0015 cost= 0.048607846
Optimization Finished!
Accuracy: 0.0492687
Error after 0 iterations:0.0492687
Epoch: 0001 cost= 0.048949092
Epoch: 0002 cost= 0.048043349
Epoch: 0003 cost= 0.049589956
Epoch: 0004 cost= 0.048563167
Epoch: 0005 cost= 0.048226089
Epoch: 0006 cost= 0.048734631
Epoch: 0007 cost= 0.048500268
Epoch: 0008 cost= 0.048657852
Epoch: 0009 cost= 0.049450391
Epoch: 0010 cost= 0.048246451
Epoch: 0011 cost= 0.048498495
Epoch: 0012 cost= 0.048183686
Epoch: 0013 cost= 0.048995344
Epoch: 0014 cost= 0.049133755
Epoch: 0015 cost= 0.047759859
Optimization Finished!
Accuracy: 0.0492439
Error after 0 iterations:0.0492439
Epoch: 0001 cost= 0.047994556
Epoch: 0002 cost= 0.048187640
Epoch: 0003 cost= 0.048794816
Epoch: 0004 cost= 0.048973627
Epoch: 0005 cost= 0.048865767
Epoch: 0006 cost= 0.049292595
Epoch: 0007 cost= 0.048835436
Epoch: 0008 cost= 0.048340970
Epoch: 0009 cost= 0.048744459
Epoch: 0010 cost= 0.047846773
Epoch: 0011 cost= 0.048581691
Epoch: 0012 cost= 0.048069009
Epoch: 0013 cost= 0.048310385
Epoch: 0014 cost= 0.048900881
Epoch: 0015 cost= 0.048336533
Optimization Finished!
Accuracy: 0.0492199
Error after 0 iterations:0.0492199
Epoch: 0001 cost= 0.048866355
Epoch: 0002 cost= 0.049169998
Epoch: 0003 cost= 0.048837877
Epoch: 0004 cost= 0.048725035
Epoch: 0005 cost= 0.049641018
Epoch: 0006 cost= 0.047804546
Epoch: 0007 cost= 0.048410941
Epoch: 0008 cost= 0.047706359
Epoch: 0009 cost= 0.048740654
Epoch: 0010 cost= 0.048118470
Epoch: 0011 cost= 0.048698383
Epoch: 0012 cost= 0.047513823
Epoch: 0013 cost= 0.047584718
Epoch: 0014 cost= 0.047808725
Epoch: 0015 cost= 0.047476926
Optimization Finished!
Accuracy: 0.0480732
Error after 0 iterations:0.0480732
Epoch: 0001 cost= 0.047608682
Epoch: 0002 cost= 0.047219665
Epoch: 0003 cost= 0.046454381
Epoch: 0004 cost= 0.046215186
Epoch: 0005 cost= 0.046426025
Epoch: 0006 cost= 0.044367995
Epoch: 0007 cost= 0.042867199
Epoch: 0008 cost= 0.042732234
Epoch: 0009 cost= 0.041039610
Epoch: 0010 cost= 0.040383118
Epoch: 0011 cost= 0.039281069
Optimization Finished!
Accuracy: 0.0388307
Error after 0 iterations:0.0388307
Final params:

In [73]:
def synapse_interpolate(synapse1, synapse2, t):
    return (synapse2-synapse1)*t + synapse1



'''ii=0
weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}'''

#def model_interpolate(m1, m2, t):
    
    

with models[0].g.as_default():

    with tf.Session() as sess:
        
        # Restore variables from disk.
        
        models[0].saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model0.ckpt")
        modelparams = sess.run(models[0].weightslist[0])
        print sess.run(models[0].weights['h1'])
        print modelparams

        print("Model restored.")
        # Do some work with the model
        
        print models[0].ReturnParamsAsList()[0]
        
        print "**"
        
        print models[1].ReturnParamsAsList()[1]
        
        print "***"
        
        #print models[2].weightslist
        print models[1].params
        print models[1].ReturnParamsAsList()


[[ 0.09790999  0.62101752  1.03343666 -0.38115153]]
[[ 0.09790999  0.62101752  1.03343666 -0.38115153]]
Model restored.
[array([[ 0.09790999,  0.62101752,  1.03343666, -0.38115153]], dtype=float32), array([[ 0.44235808,  0.21506836,  0.761383  ,  0.69092709],
       [-0.03715954,  1.03665876, -0.65453172,  0.45300439],
       [-1.8361553 ,  0.02977604, -0.00432135,  0.1767289 ],
       [-0.53395027,  1.61765206, -0.49521273, -1.70978189]], dtype=float32), array([[ 1.2507863 ],
       [-0.10358144],
       [ 0.16815047],
       [-0.0012698 ]], dtype=float32)]
**
[array([-0.60335797,  0.39746606, -0.38903069,  0.09408516], dtype=float32), array([-0.19858831, -1.76788437,  1.60696042, -2.91697049], dtype=float32), array([-1.86957383], dtype=float32)]
***
([array([[-0.25917089, -2.03158522, -0.12641285, -0.82842195]], dtype=float32), array([[ 0.32461718, -1.12694073,  0.07095902, -0.09691573],
       [-0.1926053 ,  0.11757999, -1.41996026,  0.05515131],
       [-0.11004622,  0.41341448, -0.53388637, -1.1399796 ],
       [ 0.58652341, -1.66347241, -1.35447073,  2.79148102]], dtype=float32), array([[-1.09897745],
       [ 0.26642719],
       [ 1.46171522],
       [-0.94601887]], dtype=float32)], [array([-0.60335797,  0.39746606, -0.38903069,  0.09408516], dtype=float32), array([-0.19858831, -1.76788437,  1.60696042, -2.91697049], dtype=float32), array([-1.86957383], dtype=float32)])
([array([[-0.25917089, -2.03158522, -0.12641285, -0.82842195]], dtype=float32), array([[ 0.32461718, -1.12694073,  0.07095902, -0.09691573],
       [-0.1926053 ,  0.11757999, -1.41996026,  0.05515131],
       [-0.11004622,  0.41341448, -0.53388637, -1.1399796 ],
       [ 0.58652341, -1.66347241, -1.35447073,  2.79148102]], dtype=float32), array([[-1.09897745],
       [ 0.26642719],
       [ 1.46171522],
       [-0.94601887]], dtype=float32)], [array([-0.60335797,  0.39746606, -0.38903069,  0.09408516], dtype=float32), array([-0.19858831, -1.76788437,  1.60696042, -2.91697049], dtype=float32), array([-1.86957383], dtype=float32)])

In [211]:
def synapse_interpolate(synapse1, synapse2, t):
    return (synapse2-synapse1)*t + synapse1

def model_interpolate(w1,b1,w2,b2,t):
    
    m1w = w1
    m1b = b1
    m2w = w2 
    m2b = b2
    
    mwi = [synapse_interpolate(m1we,m2we,t) for m1we, m2we in zip(m1w,m2w)]
    mbi = [synapse_interpolate(m1be,m2be,t) for m1be, m2be in zip(m1b,m2b)]
    
    return mwi, mbi

In [313]:
def synapse_interpolate(synapse1, synapse2, t):
    return (synapse2-synapse1)*t + synapse1




class WeightString:
    
    def __init__(self, w1, b1, w2, b2, numbeads, threshold):
        self.w1 = w1
        self.w2 = w2
        self.b1 = b1
        self.b2 = b2
        #self.w2, self.b2 = m2.params
        self.AllBeads = []

        self.threshold = threshold
        
        self.AllBeads.append([w1,b1])
        
        
        for n in xrange(numbeads):
            ws,bs = model_interpolate(w1,b1,w2,b2, (n + 1.)/(numbeads+1.))
            self.AllBeads.append([ws,bs])
            
        self.AllBeads.append([w2,b2])
        
        
        self.ConvergedList = [False for f in xrange(len(self.AllBeads))]
        self.ConvergedList[0] = True
        self.ConvergedList[-1] = True
    
    
    def SpringNorm(self, order):
        
        total = 0.
        
        #Energy between mobile beads
        for i,b in enumerate(self.AllBeads):
            if i < len(self.AllBeads)-1:
                #print "Tallying energy between bead " + str(i) + " and bead " + str(i+1)
                subtotal = 0.
                for j in xrange(len(b)):
                    subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][0][j],self.AllBeads[i+1][0][j]),ord=order)#/len(self.beads[0][j])
                for j in xrange(len(b)):
                    subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][1][j],self.AllBeads[i+1][1][j]),ord=order)#/len(self.beads[0][j])
                total+=subtotal
        
        return total#/len(self.beads)
        
    
    
    def SGDBead(self, bead, thresh, maxindex):
        
        finalerror = 0.
        
        #thresh = .05

        # Parameters
        learning_rate = 0.01
        training_epochs = 15
        batch_size = 1000
        display_step = 1
        
        curWeights, curBiases = self.AllBeads[bead]
        test_model = multilayer_perceptron(w=curWeights, b=curBiases)

        with test_model.g.as_default():

            x = tf.placeholder("float", [None, n_input])
            y = tf.placeholder("float", [None, n_classes])
            pred = test_model.predict(x)
            cost = tf.reduce_mean(tf.square(pred-y))
            optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
            init = tf.initialize_all_variables()
            stopcond = True

            with tf.Session() as sess:
                sess.run(init)
                xtest, ytest = generatecandidate4(.5,.25,.1,1000)
                j = 0
                while stopcond:
                    for epoch in range(training_epochs):
                        avg_cost = 0.
                        total_batch = int(10000/batch_size)
                        if (avg_cost > thresh or avg_cost == 0.) and stopcond:
                        # Loop over all batches
                            for i in range(total_batch):
                                batch_x, batch_y = generatecandidate4(.5,.25,.1,batch_size)
                                # Run optimization op (backprop) and cost op (to get loss value)
                                _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
                                                                              y: batch_y})
                                # Compute average loss
                                avg_cost += c / total_batch
                            # Display logs per epoch step
                            #if epoch % display_step == 0:
                            #    print "Epoch:", '%04d' % (epoch+1), "cost=", \
                            #        "{:.9f}".format(avg_cost)

                            if avg_cost < thresh:
                                stopcond = False
                    #print "Optimization Finished!"

                    # Test model
                    #correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
                    correct_prediction = tf.reduce_mean(tf.square(pred-y))
                    # Calculate accuracy
                    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                    print "Accuracy:", accuracy.eval({x: xtest, y: ytest})

                    #if (j%5000) == 0:
                    #    print "Error after "+str(j)+" iterations:" + str(accuracy.eval({x: xtest, y: ytest}))

                    finalerror = accuracy.eval({x: xtest, y: ytest})
                    
                    if finalerror < thresh or stopcond==False:# or j > maxindex:
                        #print "Changing stopcond!"
                        stopcond = False
                        #print "Final params:"
                        test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
                        self.AllBeads[bead]=test_model.params
                        print "Final bead error: " + str(finalerror)
                        
                    j+=1

            return finalerror

In [240]:
i1=0
i2=1
test = WeightString(models[i1].params[0],models[i1].params[1],models[i2].params[0],models[i2].params[1],1,1)
print len(test.AllBeads)
print test.SGDBead(1,.01,10)


3
0.00979065

In [314]:
def InterpBeadError(w1,b1, w2,b2, write = False, name = "00"):
    errors = []
    
    xdat,ydat = generatecandidate4(.5, .25, .1, 1000)
    xdat = np.array(xdat)
    ydat = np.array(ydat)
    
    
    
    
    for tt in xrange(100):
        #print tt
        #accuracy = 0.
        t = tt/100.
        thiserror = 0

        #x0 = tf.placeholder("float", [None, n_input])
        #y0 = tf.placeholder("float", [None, n_classes])
        weights, biases = model_interpolate(w1,b1,w2,b2, t)
        interp_model = multilayer_perceptron(w=weights, b=biases)
        
        with interp_model.g.as_default():
            
            #interp_model.UpdateWeights(weights, biases)


            x = tf.placeholder("float", [None, n_input])
            y = tf.placeholder("float", [None, n_classes])
            pred = interp_model.predict(x)
            init = tf.initialize_all_variables()


            with tf.Session() as sess:
                sess.run(init)
                correct_prediction = tf.reduce_mean(tf.square(pred-y))
                accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                print "Accuracy:", accuracy.eval({x: xdat, y: ydat}),"\t",tt
                thiserror = accuracy.eval({x: xdat, y: ydat})


        errors.append(thiserror)

    if write == True:
        with open("f" + str(name) + ".out",'w+') as f:
            for e in errors:
                f.write(str(e) + "\n")
    
    return max(errors), np.argmax(errors)

In [227]:
InterpBeadError(models[0].params[0],models[0].params[1],models[1].params[0],models[1].params[1])


Out[227]:
0.25772259

In [330]:
#Used for softening the training criteria.  There's some fuzz required due to the difference in 
#training error between test and training
thresh_multiplier = 1.1

    
results = []

connecteddict = {}
for i1 in xrange(len(models)):
    connecteddict[i1] = 'not connected'


for i1 in xrange(len(models)):
    print i1
    for i2 in xrange(len(models)):
        
        if i2 > i1 and ((connecteddict[i1] != connecteddict[i2]) or (connecteddict[i1] == 'not connected' or connecteddict[i2] == 'not connected')) :
            #print "slow1?"
            #print i1,i2
            #print models[0]
            #print models[1]
            #print models[0].params
            #print models[1].params
            test = WeightString(models[i1].params[0],models[i1].params[1],models[i2].params[0],models[i2].params[1],1,1)

            training_threshold = thresh

            depth = 0
            d_max = 10

            #Check error between beads
            #Alg: for each bead at depth i, SGD until converged.
            #For beads with max error along path too large, add another bead between them, repeat

            
            #Keeps track of which indices to check the interpbeaderror between
            newindices = [0,1]
            
            while (depth < d_max):
                print newindices
                #print "slow2?"
                #X, y = GenTest(X,y)
                counter = 0

                for i,c in enumerate(test.ConvergedList):
                    if c == False:
                        #print "slow3?"
                        error = test.SGDBead(i, .5*training_threshold, 20)
                        #print "slow4?"
                            #if counter%5000==0:
                            #    print counter
                            #    print error
                        test.ConvergedList[i] = True

                print test.ConvergedList

                interperrors = []
                interp_bead_indices = []
                for b in xrange(len(test.AllBeads)-1):
                    if b in newindices:
                        e = InterpBeadError(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1])

                        interperrors.append(e)
                        interp_bead_indices.append(b)
                print interperrors

                if max([ee[0] for ee in interperrors]) < thresh_multiplier*training_threshold:
                    depth = 2*d_max
                    #print test.ConvergedList
                    #print test.SpringNorm(2)
                    #print "Done!"

                else:
                    del newindices[:]
                    #Interperrors stores the maximum error on the path between beads
                    #shift index to account for added beads
                    shift = 0
                    for i, ie in enumerate(interperrors):
                        if ie[0] > thresh_multiplier*training_threshold:
                            k = interp_bead_indices[i]
                            
                            ws,bs = model_interpolate(test.AllBeads[k+shift][0],test.AllBeads[k+shift][1],\
                                                      test.AllBeads[k+shift+1][0],test.AllBeads[k+shift+1][1],\
                                                      ie[1]/100.)
                            
                            test.AllBeads.insert(k+shift+1,[ws,bs])
                            test.ConvergedList.insert(k+shift+1, False)
                            newindices.append(k+shift+1)
                            newindices.append(k+shift)
                            shift+=1
                            #print test.ConvergedList
                            #print test.SpringNorm(2)


                    #print d_max
                    depth += 1
            if depth == 2*d_max:
                results.append([i1,i2,test.SpringNorm(2),"Connected"])
                if connecteddict[i1] == 'not connected' and connecteddict[i2] == 'not connected':
                    connecteddict[i1] = i1
                    connecteddict[i2] = i1

                if connecteddict[i1] == 'not connected':
                    connecteddict[i1] = connecteddict[i2]
                else:
                    if connecteddict[i2] == 'not connected':
                        connecteddict[i2] = connecteddict[i1]
                    else:
                        if connecteddict[i1] != 'not connected' and connecteddict[i2] != 'not connected':
                            hold = connecteddict[i2]
                            connecteddict[i2] = connecteddict[i1]
                            for h in xrange(len(models)):
                                if connecteddict[h] == hold:
                                    connecteddict[h] = connecteddict[i1]
                    
            else:
                results.append([i1,i2,test.SpringNorm(2),"Disconnected"])
            #print results[-1]
	
	
	

uniquecomps = []
totalcomps = 0
for i in xrange(len(models)):
    if not (connecteddict[i] in uniquecomps):
        uniquecomps.append(connecteddict[i])
    
    if connecteddict[i] == 'not connected':
        totalcomps += 1
        
    #print i,connecteddict[i]

notconoffset = 0

if 'not connected' in uniquecomps:
    notconoffset = -1
    
print "Thresh: " + str(thresh)
print "Comps: " + str(len(uniquecomps) + notconoffset + totalcomps)



#for i in xrange(len(synapses)):
#    print connecteddict[i]

connsum = []
for r in results:
    if r[3] == "Connected":
        connsum.append(r[2])
        #print r[2]
        
print "***"
print np.average(connsum)
print np.std(connsum)


0
[0, 1]
Accuracy: 0.0277865
Accuracy: 0.0182306
Final bead error: 0.0182306
[True, True, True]
Accuracy: 0.0423283 	0
Accuracy: 0.0424689 	1
Accuracy: 0.0426616 	2
Accuracy: 0.0429018 	3
Accuracy: 0.0431851 	4
Accuracy: 0.043507 	5
Accuracy: 0.0438634 	6
Accuracy: 0.0442503 	7
Accuracy: 0.0446639 	8
Accuracy: 0.0451004 	9
Accuracy: 0.0455565 	10
Accuracy: 0.0460287 	11
Accuracy: 0.0465137 	12
Accuracy: 0.0470087 	13
Accuracy: 0.0475105 	14
Accuracy: 0.0480166 	15
Accuracy: 0.0485243 	16
Accuracy: 0.049031 	17
Accuracy: 0.0495345 	18
Accuracy: 0.0500325 	19
Accuracy: 0.050523 	20
Accuracy: 0.051004 	21
Accuracy: 0.0514738 	22
Accuracy: 0.0519305 	23
Accuracy: 0.0523728 	24
Accuracy: 0.052799 	25
Accuracy: 0.053208 	26
Accuracy: 0.0535984 	27
Accuracy: 0.0539519 	28
Accuracy: 0.05426 	29
Accuracy: 0.0545226 	30
Accuracy: 0.0547568 	31
Accuracy: 0.0549814 	32
Accuracy: 0.0551839 	33
Accuracy: 0.0553951 	34
Accuracy: 0.0555975 	35
Accuracy: 0.0557932 	36
Accuracy: 0.0559965 	37
Accuracy: 0.0562203 	38
Accuracy: 0.0564561 	39
Accuracy: 0.0567029 	40
Accuracy: 0.0569559 	41
Accuracy: 0.0572096 	42
Accuracy: 0.057468 	43
Accuracy: 0.0577236 	44
Accuracy: 0.0579749 	45
Accuracy: 0.0582176 	46
Accuracy: 0.0584549 	47
Accuracy: 0.0586852 	48
Accuracy: 0.058904 	49
Accuracy: 0.0591084 	50
Accuracy: 0.0592881 	51
Accuracy: 0.0594458 	52
Accuracy: 0.0595859 	53
Accuracy: 0.0597003 	54
Accuracy: 0.0597868 	55
Accuracy: 0.0598598 	56
Accuracy: 0.0598995 	57
Accuracy: 0.0598932 	58
Accuracy: 0.0598415 	59
Accuracy: 0.0597469 	60
Accuracy: 0.0596093 	61
Accuracy: 0.0594301 	62
Accuracy: 0.0591959 	63
Accuracy: 0.05891 	64
Accuracy: 0.0585715 	65
Accuracy: 0.0581958 	66
Accuracy: 0.0577541 	67
Accuracy: 0.0572719 	68
Accuracy: 0.0567203 	69
Accuracy: 0.0560836 	70
Accuracy: 0.0553676 	71
Accuracy: 0.0545665 	72
Accuracy: 0.0536779 	73
Accuracy: 0.0527312 	74
Accuracy: 0.0517025 	75
Accuracy: 0.0505939 	76
Accuracy: 0.0494054 	77
Accuracy: 0.0481438 	78
Accuracy: 0.0468075 	79
Accuracy: 0.045399 	80
Accuracy: 0.0439451 	81
Accuracy: 0.0424385 	82
Accuracy: 0.0408455 	83
Accuracy: 0.0391784 	84
Accuracy: 0.0374533 	85
Accuracy: 0.0356874 	86
Accuracy: 0.0338992 	87
Accuracy: 0.0321087 	88
Accuracy: 0.030337 	89
Accuracy: 0.0286071 	90
Accuracy: 0.0269429 	91
Accuracy: 0.0253701 	92
Accuracy: 0.0239155 	93
Accuracy: 0.0226076 	94
Accuracy: 0.0214764 	95
Accuracy: 0.0205532 	96
Accuracy: 0.0198709 	97
Accuracy: 0.0194639 	98
Accuracy: 0.019368 	99
Accuracy: 0.0175801 	0
Accuracy: 0.0190741 	1
Accuracy: 0.0206992 	2
Accuracy: 0.0224984 	3
Accuracy: 0.024563 	4
Accuracy: 0.0269281 	5
Accuracy: 0.0296059 	6
Accuracy: 0.0326133 	7
Accuracy: 0.0360061 	8
Accuracy: 0.0398361 	9
Accuracy: 0.0441531 	10
Accuracy: 0.0489919 	11
Accuracy: 0.0543967 	12
Accuracy: 0.059927 	13
Accuracy: 0.0649335 	14
Accuracy: 0.0696305 	15
Accuracy: 0.0739014 	16
Accuracy: 0.0777752 	17
Accuracy: 0.0809601 	18
Accuracy: 0.0837327 	19
Accuracy: 0.0862666 	20
Accuracy: 0.0885 	21
Accuracy: 0.0905953 	22
Accuracy: 0.0925334 	23
Accuracy: 0.0943451 	24
Accuracy: 0.0959154 	25
Accuracy: 0.0973864 	26
Accuracy: 0.0987426 	27
Accuracy: 0.0999937 	28
Accuracy: 0.101191 	29
Accuracy: 0.102312 	30
Accuracy: 0.103364 	31
Accuracy: 0.104325 	32
Accuracy: 0.105161 	33
Accuracy: 0.105837 	34
Accuracy: 0.106422 	35
Accuracy: 0.107025 	36
Accuracy: 0.10763 	37
Accuracy: 0.108168 	38
Accuracy: 0.108445 	39
Accuracy: 0.108487 	40
Accuracy: 0.108283 	41
Accuracy: 0.107803 	42
Accuracy: 0.107095 	43
Accuracy: 0.106194 	44
Accuracy: 0.105103 	45
Accuracy: 0.103833 	46
Accuracy: 0.102401 	47
Accuracy: 0.100846 	48
Accuracy: 0.099199 	49
Accuracy: 0.0974771 	50
Accuracy: 0.0957158 	51
Accuracy: 0.0939228 	52
Accuracy: 0.0921184 	53
Accuracy: 0.0903216 	54
Accuracy: 0.0885605 	55
Accuracy: 0.0868737 	56
Accuracy: 0.0852606 	57
Accuracy: 0.0836829 	58
Accuracy: 0.0821337 	59
Accuracy: 0.0806186 	60
Accuracy: 0.0791399 	61
Accuracy: 0.0776888 	62
Accuracy: 0.0762658 	63
Accuracy: 0.0748489 	64
Accuracy: 0.0734288 	65
Accuracy: 0.0720062 	66
Accuracy: 0.070582 	67
Accuracy: 0.0691575 	68
Accuracy: 0.0677342 	69
Accuracy: 0.0663135 	70
Accuracy: 0.0648971 	71
Accuracy: 0.0634868 	72
Accuracy: 0.0620847 	73
Accuracy: 0.0606929 	74
Accuracy: 0.0593138 	75
Accuracy: 0.0579498 	76
Accuracy: 0.0566032 	77
Accuracy: 0.0552766 	78
Accuracy: 0.0539727 	79
Accuracy: 0.0526947 	80
Accuracy: 0.0514462 	81
Accuracy: 0.0502294 	82
Accuracy: 0.0490478 	83
Accuracy: 0.047905 	84
Accuracy: 0.0468038 	85
Accuracy: 0.0457466 	86
Accuracy: 0.0447372 	87
Accuracy: 0.0437814 	88
Accuracy: 0.0428833 	89
Accuracy: 0.0420452 	90
Accuracy: 0.0412721 	91
Accuracy: 0.0405686 	92
Accuracy: 0.0399417 	93
Accuracy: 0.0393934 	94
Accuracy: 0.0389253 	95
Accuracy: 0.0385487 	96
Accuracy: 0.0382636 	97
Accuracy: 0.0380781 	98
Accuracy: 0.0379937 	99
[(0.05989955, 57), (0.10848656, 40)]
[1, 0, 3, 2]
Accuracy: 0.0193971
Final bead error: 0.0193971
Accuracy: 0.0181225
Final bead error: 0.0181225
[True, True, True, True, True]
Accuracy: 0.0397869 	0
Accuracy: 0.0398243 	1
Accuracy: 0.0398637 	2
Accuracy: 0.0399048 	3
Accuracy: 0.0399472 	4
Accuracy: 0.0399907 	5
Accuracy: 0.040035 	6
Accuracy: 0.0400799 	7
Accuracy: 0.040125 	8
Accuracy: 0.0401703 	9
Accuracy: 0.0402154 	10
Accuracy: 0.0402602 	11
Accuracy: 0.0403045 	12
Accuracy: 0.0403481 	13
Accuracy: 0.0403909 	14
Accuracy: 0.0404328 	15
Accuracy: 0.0404735 	16
Accuracy: 0.0405131 	17
Accuracy: 0.0405513 	18
Accuracy: 0.0405881 	19
Accuracy: 0.0406235 	20
Accuracy: 0.0406574 	21
Accuracy: 0.0406897 	22
Accuracy: 0.0407204 	23
Accuracy: 0.0407495 	24
Accuracy: 0.040777 	25
Accuracy: 0.0408029 	26
Accuracy: 0.0408272 	27
Accuracy: 0.0408499 	28
Accuracy: 0.0408711 	29
Accuracy: 0.0408908 	30
Accuracy: 0.0409091 	31
Accuracy: 0.0409261 	32
Accuracy: 0.0409419 	33
Accuracy: 0.0409565 	34
Accuracy: 0.0409701 	35
Accuracy: 0.0409828 	36
Accuracy: 0.0409947 	37
Accuracy: 0.041006 	38
Accuracy: 0.0410168 	39
Accuracy: 0.0410273 	40
Accuracy: 0.0410376 	41
Accuracy: 0.0410481 	42
Accuracy: 0.0410587 	43
Accuracy: 0.0410699 	44
Accuracy: 0.0410817 	45
Accuracy: 0.0410944 	46
Accuracy: 0.0411082 	47
Accuracy: 0.041121 	48
Accuracy: 0.0410911 	49
Accuracy: 0.041018 	50
Accuracy: 0.0409059 	51
Accuracy: 0.0407762 	52
Accuracy: 0.0406346 	53
Accuracy: 0.040463 	54
Accuracy: 0.0402605 	55
Accuracy: 0.0400226 	56
Accuracy: 0.0397648 	57
Accuracy: 0.0394857 	58
Accuracy: 0.0391688 	59
Accuracy: 0.0388319 	60
Accuracy: 0.0384752 	61
Accuracy: 0.0381017 	62
Accuracy: 0.0377188 	63
Accuracy: 0.0373372 	64
Accuracy: 0.0369403 	65
Accuracy: 0.036526 	66
Accuracy: 0.0360907 	67
Accuracy: 0.0356558 	68
Accuracy: 0.035212 	69
Accuracy: 0.0347545 	70
Accuracy: 0.0342838 	71
Accuracy: 0.0338051 	72
Accuracy: 0.0333076 	73
Accuracy: 0.0328043 	74
Accuracy: 0.0322879 	75
Accuracy: 0.0317654 	76
Accuracy: 0.0312408 	77
Accuracy: 0.0307132 	78
Accuracy: 0.0301839 	79
Accuracy: 0.0296547 	80
Accuracy: 0.0291226 	81
Accuracy: 0.0285876 	82
Accuracy: 0.0280525 	83
Accuracy: 0.0275125 	84
Accuracy: 0.0269653 	85
Accuracy: 0.0264154 	86
Accuracy: 0.0258672 	87
Accuracy: 0.0253203 	88
Accuracy: 0.0247722 	89
Accuracy: 0.0242246 	90
Accuracy: 0.0236798 	91
Accuracy: 0.023138 	92
Accuracy: 0.0225996 	93
Accuracy: 0.0220642 	94
Accuracy: 0.0215343 	95
Accuracy: 0.0210088 	96
Accuracy: 0.0204905 	97
Accuracy: 0.0199798 	98
Accuracy: 0.0194773 	99
Accuracy: 0.0204237 	0
Accuracy: 0.0201368 	1
Accuracy: 0.0198718 	2
Accuracy: 0.0196285 	3
Accuracy: 0.0194064 	4
Accuracy: 0.0192047 	5
Accuracy: 0.0190226 	6
Accuracy: 0.0188593 	7
Accuracy: 0.0187143 	8
Accuracy: 0.0185873 	9
Accuracy: 0.0184792 	10
Accuracy: 0.0183885 	11
Accuracy: 0.0183149 	12
Accuracy: 0.0182578 	13
Accuracy: 0.0182168 	14
Accuracy: 0.0181892 	15
Accuracy: 0.0181752 	16
Accuracy: 0.0181731 	17
Accuracy: 0.0181843 	18
Accuracy: 0.0182094 	19
Accuracy: 0.0182474 	20
Accuracy: 0.0182976 	21
Accuracy: 0.0183583 	22
Accuracy: 0.0184256 	23
Accuracy: 0.0185008 	24
Accuracy: 0.0185861 	25
Accuracy: 0.0186773 	26
Accuracy: 0.0187787 	27
Accuracy: 0.0188908 	28
Accuracy: 0.0190102 	29
Accuracy: 0.0191346 	30
Accuracy: 0.0192627 	31
Accuracy: 0.0193957 	32
Accuracy: 0.0195314 	33
Accuracy: 0.0196705 	34
Accuracy: 0.0198105 	35
Accuracy: 0.0199488 	36
Accuracy: 0.0200835 	37
Accuracy: 0.020213 	38
Accuracy: 0.0203417 	39
Accuracy: 0.0204692 	40
Accuracy: 0.0205927 	41
Accuracy: 0.0207192 	42
Accuracy: 0.0208414 	43
Accuracy: 0.0209615 	44
Accuracy: 0.021075 	45
Accuracy: 0.0211866 	46
Accuracy: 0.0212942 	47
Accuracy: 0.0213964 	48
Accuracy: 0.0214942 	49
Accuracy: 0.0215867 	50
Accuracy: 0.0216772 	51
Accuracy: 0.0217622 	52
Accuracy: 0.0218392 	53
Accuracy: 0.0219116 	54
Accuracy: 0.0219773 	55
Accuracy: 0.0220317 	56
Accuracy: 0.0220718 	57
Accuracy: 0.0220976 	58
Accuracy: 0.0221094 	59
Accuracy: 0.0221075 	60
Accuracy: 0.0220921 	61
Accuracy: 0.0220637 	62
Accuracy: 0.0220224 	63
Accuracy: 0.0219689 	64
Accuracy: 0.0219036 	65
Accuracy: 0.021827 	66
Accuracy: 0.0217396 	67
Accuracy: 0.0216423 	68
Accuracy: 0.0215355 	69
Accuracy: 0.02142 	70
Accuracy: 0.0212967 	71
Accuracy: 0.0211663 	72
Accuracy: 0.0210298 	73
Accuracy: 0.0208879 	74
Accuracy: 0.0207417 	75
Accuracy: 0.0205923 	76
Accuracy: 0.0204407 	77
Accuracy: 0.020288 	78
Accuracy: 0.0201354 	79
Accuracy: 0.0199841 	80
Accuracy: 0.0198354 	81
Accuracy: 0.0196906 	82
Accuracy: 0.0195512 	83
Accuracy: 0.0194185 	84
Accuracy: 0.019294 	85
Accuracy: 0.0191792 	86
Accuracy: 0.0190758 	87
Accuracy: 0.0189854 	88
Accuracy: 0.0189097 	89
Accuracy: 0.0188504 	90
Accuracy: 0.0188093 	91
Accuracy: 0.0187883 	92
Accuracy: 0.0187893 	93
Accuracy: 0.0188142 	94
Accuracy: 0.0188651 	95
Accuracy: 0.0189441 	96
Accuracy: 0.0190532 	97
Accuracy: 0.0191946 	98
Accuracy: 0.0193706 	99
Accuracy: 0.0191336 	0
Accuracy: 0.0197237 	1
Accuracy: 0.0203322 	2
Accuracy: 0.0209601 	3
Accuracy: 0.0216082 	4
Accuracy: 0.0222776 	5
Accuracy: 0.0229698 	6
Accuracy: 0.0237003 	7
Accuracy: 0.0244704 	8
Accuracy: 0.025282 	9
Accuracy: 0.026139 	10
Accuracy: 0.0270452 	11
Accuracy: 0.0279981 	12
Accuracy: 0.0289987 	13
Accuracy: 0.0300578 	14
Accuracy: 0.0311802 	15
Accuracy: 0.0323679 	16
Accuracy: 0.0336229 	17
Accuracy: 0.0349506 	18
Accuracy: 0.0363546 	19
Accuracy: 0.0378337 	20
Accuracy: 0.0394031 	21
Accuracy: 0.0410656 	22
Accuracy: 0.0428112 	23
Accuracy: 0.0446427 	24
Accuracy: 0.0465651 	25
Accuracy: 0.0485839 	26
Accuracy: 0.0505033 	27
Accuracy: 0.052279 	28
Accuracy: 0.0538935 	29
Accuracy: 0.0553287 	30
Accuracy: 0.056567 	31
Accuracy: 0.0576997 	32
Accuracy: 0.058657 	33
Accuracy: 0.0594237 	34
Accuracy: 0.0601156 	35
Accuracy: 0.0606991 	36
Accuracy: 0.061197 	37
Accuracy: 0.0616715 	38
Accuracy: 0.0620502 	39
Accuracy: 0.0623444 	40
Accuracy: 0.0625551 	41
Accuracy: 0.0626738 	42
Accuracy: 0.0626925 	43
Accuracy: 0.0626534 	44
Accuracy: 0.0625356 	45
Accuracy: 0.0623945 	46
Accuracy: 0.0621789 	47
Accuracy: 0.061894 	48
Accuracy: 0.0615376 	49
Accuracy: 0.0611082 	50
Accuracy: 0.0606272 	51
Accuracy: 0.0601003 	52
Accuracy: 0.059521 	53
Accuracy: 0.0588988 	54
Accuracy: 0.0582157 	55
Accuracy: 0.0574859 	56
Accuracy: 0.0567328 	57
Accuracy: 0.055954 	58
Accuracy: 0.0551162 	59
Accuracy: 0.0542273 	60
Accuracy: 0.0533105 	61
Accuracy: 0.0523648 	62
Accuracy: 0.051393 	63
Accuracy: 0.050394 	64
Accuracy: 0.0493669 	65
Accuracy: 0.0483131 	66
Accuracy: 0.0472408 	67
Accuracy: 0.0461497 	68
Accuracy: 0.0450462 	69
Accuracy: 0.043932 	70
Accuracy: 0.0428147 	71
Accuracy: 0.0416895 	72
Accuracy: 0.0405536 	73
Accuracy: 0.0394148 	74
Accuracy: 0.0382739 	75
Accuracy: 0.0371371 	76
Accuracy: 0.0360079 	77
Accuracy: 0.0348908 	78
Accuracy: 0.0337897 	79
Accuracy: 0.0327088 	80
Accuracy: 0.0316525 	81
Accuracy: 0.0306262 	82
Accuracy: 0.0296382 	83
Accuracy: 0.0286926 	84
Accuracy: 0.0278 	85
Accuracy: 0.0269455 	86
Accuracy: 0.0261301 	87
Accuracy: 0.0253513 	88
Accuracy: 0.0246105 	89
Accuracy: 0.0238914 	90
Accuracy: 0.023201 	91
Accuracy: 0.0225356 	92
Accuracy: 0.0218989 	93
Accuracy: 0.0212982 	94
Accuracy: 0.0207367 	95
Accuracy: 0.0202154 	96
Accuracy: 0.0197463 	97
Accuracy: 0.0193406 	98
Accuracy: 0.0190084 	99
Accuracy: 0.0182896 	0
Accuracy: 0.0185782 	1
Accuracy: 0.0188669 	2
Accuracy: 0.0191473 	3
Accuracy: 0.0194229 	4
Accuracy: 0.0196926 	5
Accuracy: 0.0199578 	6
Accuracy: 0.0202216 	7
Accuracy: 0.0204855 	8
Accuracy: 0.0207515 	9
Accuracy: 0.0210225 	10
Accuracy: 0.0212992 	11
Accuracy: 0.0215838 	12
Accuracy: 0.0218782 	13
Accuracy: 0.0221895 	14
Accuracy: 0.02252 	15
Accuracy: 0.0228727 	16
Accuracy: 0.0232444 	17
Accuracy: 0.0236363 	18
Accuracy: 0.0240525 	19
Accuracy: 0.0245004 	20
Accuracy: 0.0249817 	21
Accuracy: 0.0255036 	22
Accuracy: 0.0260723 	23
Accuracy: 0.0266911 	24
Accuracy: 0.0273166 	25
Accuracy: 0.0279379 	26
Accuracy: 0.0285541 	27
Accuracy: 0.0291639 	28
Accuracy: 0.029763 	29
Accuracy: 0.030352 	30
Accuracy: 0.0309306 	31
Accuracy: 0.031498 	32
Accuracy: 0.0320543 	33
Accuracy: 0.0325972 	34
Accuracy: 0.0331275 	35
Accuracy: 0.033645 	36
Accuracy: 0.0341491 	37
Accuracy: 0.0346387 	38
Accuracy: 0.0351131 	39
Accuracy: 0.0355721 	40
Accuracy: 0.0360152 	41
Accuracy: 0.0364426 	42
Accuracy: 0.0368547 	43
Accuracy: 0.0372513 	44
Accuracy: 0.0376324 	45
Accuracy: 0.0379973 	46
Accuracy: 0.0383451 	47
Accuracy: 0.0386765 	48
Accuracy: 0.0389913 	49
Accuracy: 0.0392897 	50
Accuracy: 0.0395716 	51
Accuracy: 0.0398371 	52
Accuracy: 0.0400862 	53
Accuracy: 0.0403185 	54
Accuracy: 0.0405345 	55
Accuracy: 0.0407343 	56
Accuracy: 0.0409182 	57
Accuracy: 0.0410863 	58
Accuracy: 0.0412389 	59
Accuracy: 0.0413762 	60
Accuracy: 0.0414985 	61
Accuracy: 0.0416064 	62
Accuracy: 0.0417003 	63
Accuracy: 0.0417805 	64
Accuracy: 0.0418476 	65
Accuracy: 0.0419021 	66
Accuracy: 0.0419443 	67
Accuracy: 0.0419744 	68
Accuracy: 0.0419944 	69
Accuracy: 0.0420036 	70
Accuracy: 0.0420018 	71
Accuracy: 0.0419901 	72
Accuracy: 0.0419688 	73
Accuracy: 0.0419388 	74
Accuracy: 0.0419024 	75
Accuracy: 0.0418599 	76
Accuracy: 0.0418111 	77
Accuracy: 0.0417561 	78
Accuracy: 0.0416954 	79
Accuracy: 0.0416296 	80
Accuracy: 0.0415609 	81
Accuracy: 0.04149 	82
Accuracy: 0.0414187 	83
Accuracy: 0.0413475 	84
Accuracy: 0.0412764 	85
Accuracy: 0.0412069 	86
Accuracy: 0.0411376 	87
Accuracy: 0.04107 	88
Accuracy: 0.0410065 	89
Accuracy: 0.0409461 	90
Accuracy: 0.040891 	91
Accuracy: 0.0408452 	92
Accuracy: 0.0408047 	93
Accuracy: 0.0407726 	94
Accuracy: 0.0407512 	95
Accuracy: 0.040739 	96
Accuracy: 0.0407397 	97
Accuracy: 0.0407537 	98
Accuracy: 0.0407812 	99
[(0.041120995, 48), (0.022109443, 59), (0.062692501, 43), (0.042003606, 70)]
[3, 2]
Accuracy: 0.0150774
Final bead error: 0.0150774
[True, True, True, True, True, True]
Accuracy: 0.019174 	0
Accuracy: 0.0193772 	1
Accuracy: 0.0195845 	2
Accuracy: 0.019796 	3
Accuracy: 0.0200121 	4
Accuracy: 0.0202328 	5
Accuracy: 0.0204584 	6
Accuracy: 0.0206891 	7
Accuracy: 0.0209252 	8
Accuracy: 0.0211669 	9
Accuracy: 0.0214145 	10
Accuracy: 0.0216725 	11
Accuracy: 0.0219435 	12
Accuracy: 0.0222239 	13
Accuracy: 0.0225159 	14
Accuracy: 0.0228176 	15
Accuracy: 0.0231322 	16
Accuracy: 0.0234591 	17
Accuracy: 0.0238031 	18
Accuracy: 0.0241648 	19
Accuracy: 0.0245428 	20
Accuracy: 0.0249354 	21
Accuracy: 0.0253438 	22
Accuracy: 0.0257686 	23
Accuracy: 0.0262092 	24
Accuracy: 0.0266687 	25
Accuracy: 0.0271451 	26
Accuracy: 0.0276383 	27
Accuracy: 0.0281507 	28
Accuracy: 0.0286838 	29
Accuracy: 0.0292393 	30
Accuracy: 0.0298169 	31
Accuracy: 0.0304153 	32
Accuracy: 0.0310392 	33
Accuracy: 0.0316888 	34
Accuracy: 0.0323674 	35
Accuracy: 0.033073 	36
Accuracy: 0.033801 	37
Accuracy: 0.0345553 	38
Accuracy: 0.0353363 	39
Accuracy: 0.0361437 	40
Accuracy: 0.036979 	41
Accuracy: 0.0377939 	42
Accuracy: 0.0385542 	43
Accuracy: 0.0392164 	44
Accuracy: 0.0398191 	45
Accuracy: 0.0403648 	46
Accuracy: 0.0408249 	47
Accuracy: 0.0411928 	48
Accuracy: 0.0415104 	49
Accuracy: 0.0417602 	50
Accuracy: 0.0419605 	51
Accuracy: 0.0421197 	52
Accuracy: 0.0422058 	53
Accuracy: 0.0422291 	54
Accuracy: 0.0421901 	55
Accuracy: 0.0421128 	56
Accuracy: 0.0419794 	57
Accuracy: 0.0418121 	58
Accuracy: 0.0416069 	59
Accuracy: 0.0413415 	60
Accuracy: 0.0410459 	61
Accuracy: 0.0407004 	62
Accuracy: 0.0403087 	63
Accuracy: 0.0398955 	64
Accuracy: 0.0394494 	65
Accuracy: 0.038977 	66
Accuracy: 0.0384757 	67
Accuracy: 0.0379395 	68
Accuracy: 0.0373707 	69
Accuracy: 0.0367743 	70
Accuracy: 0.0361564 	71
Accuracy: 0.0355125 	72
Accuracy: 0.034842 	73
Accuracy: 0.0341524 	74
Accuracy: 0.033434 	75
Accuracy: 0.032707 	76
Accuracy: 0.0319718 	77
Accuracy: 0.0312221 	78
Accuracy: 0.0304626 	79
Accuracy: 0.0296976 	80
Accuracy: 0.0289331 	81
Accuracy: 0.0281721 	82
Accuracy: 0.0274107 	83
Accuracy: 0.0266408 	84
Accuracy: 0.0258685 	85
Accuracy: 0.0251017 	86
Accuracy: 0.024344 	87
Accuracy: 0.0235973 	88
Accuracy: 0.0228655 	89
Accuracy: 0.0221445 	90
Accuracy: 0.021433 	91
Accuracy: 0.020735 	92
Accuracy: 0.0200514 	93
Accuracy: 0.0193847 	94
Accuracy: 0.0187369 	95
Accuracy: 0.0181109 	96
Accuracy: 0.0175093 	97
Accuracy: 0.016932 	98
Accuracy: 0.016385 	99
Accuracy: 0.0147135 	0
Accuracy: 0.0147757 	1
Accuracy: 0.0148405 	2
Accuracy: 0.014907 	3
Accuracy: 0.0149758 	4
Accuracy: 0.0150467 	5
Accuracy: 0.0151195 	6
Accuracy: 0.0151944 	7
Accuracy: 0.0152711 	8
Accuracy: 0.0153495 	9
Accuracy: 0.0154303 	10
Accuracy: 0.0155124 	11
Accuracy: 0.0155963 	12
Accuracy: 0.0156818 	13
Accuracy: 0.0157685 	14
Accuracy: 0.0158559 	15
Accuracy: 0.0159434 	16
Accuracy: 0.0160308 	17
Accuracy: 0.0161184 	18
Accuracy: 0.0162057 	19
Accuracy: 0.0162925 	20
Accuracy: 0.0163782 	21
Accuracy: 0.0164631 	22
Accuracy: 0.0165468 	23
Accuracy: 0.0166294 	24
Accuracy: 0.0167112 	25
Accuracy: 0.0167916 	26
Accuracy: 0.0168714 	27
Accuracy: 0.0169497 	28
Accuracy: 0.0170265 	29
Accuracy: 0.0171015 	30
Accuracy: 0.0171749 	31
Accuracy: 0.0172465 	32
Accuracy: 0.0173161 	33
Accuracy: 0.0173837 	34
Accuracy: 0.0174488 	35
Accuracy: 0.0175115 	36
Accuracy: 0.0175719 	37
Accuracy: 0.0176301 	38
Accuracy: 0.0176863 	39
Accuracy: 0.0177395 	40
Accuracy: 0.0177905 	41
Accuracy: 0.0178385 	42
Accuracy: 0.0178838 	43
Accuracy: 0.0179262 	44
Accuracy: 0.0179657 	45
Accuracy: 0.0180022 	46
Accuracy: 0.0180355 	47
Accuracy: 0.0180659 	48
Accuracy: 0.0180935 	49
Accuracy: 0.0181184 	50
Accuracy: 0.0181411 	51
Accuracy: 0.0181611 	52
Accuracy: 0.0181784 	53
Accuracy: 0.0181928 	54
Accuracy: 0.0182047 	55
Accuracy: 0.0182143 	56
Accuracy: 0.0182219 	57
Accuracy: 0.018228 	58
Accuracy: 0.0182323 	59
Accuracy: 0.0182349 	60
Accuracy: 0.0182363 	61
Accuracy: 0.0182363 	62
Accuracy: 0.0182355 	63
Accuracy: 0.0182343 	64
Accuracy: 0.0182322 	65
Accuracy: 0.0182296 	66
Accuracy: 0.0182276 	67
Accuracy: 0.0182259 	68
Accuracy: 0.0182254 	69
Accuracy: 0.0182267 	70
Accuracy: 0.0182297 	71
Accuracy: 0.0182358 	72
Accuracy: 0.0182443 	73
Accuracy: 0.0182527 	74
Accuracy: 0.018261 	75
Accuracy: 0.0182693 	76
Accuracy: 0.0182771 	77
Accuracy: 0.0182846 	78
Accuracy: 0.0182918 	79
Accuracy: 0.0182979 	80
Accuracy: 0.0182996 	81
Accuracy: 0.0182941 	82
Accuracy: 0.0182805 	83
Accuracy: 0.0182615 	84
Accuracy: 0.0182378 	85
Accuracy: 0.0182099 	86
Accuracy: 0.0181762 	87
Accuracy: 0.0181331 	88
Accuracy: 0.0180843 	89
Accuracy: 0.0180319 	90
Accuracy: 0.0179758 	91
Accuracy: 0.0179147 	92
Accuracy: 0.0178471 	93
Accuracy: 0.0177733 	94
Accuracy: 0.0176968 	95
Accuracy: 0.0176178 	96
Accuracy: 0.0175387 	97
Accuracy: 0.01746 	98
Accuracy: 0.0173811 	99
[(0.042229086, 54), (0.018299641, 81)]
[0, 1]
Accuracy: 0.029521
Accuracy: 0.0219839
Accuracy: 0.0189029
Final bead error: 0.0189029
[True, True, True]
Accuracy: 0.0390626 	0
Accuracy: 0.0397728 	1
Accuracy: 0.0405221 	2
Accuracy: 0.0413076 	3
Accuracy: 0.0421267 	4
Accuracy: 0.0429766 	5
Accuracy: 0.0438546 	6
Accuracy: 0.0447579 	7
Accuracy: 0.0456838 	8
Accuracy: 0.0466296 	9
Accuracy: 0.0475925 	10
Accuracy: 0.0485699 	11
Accuracy: 0.0495589 	12
Accuracy: 0.0505571 	13
Accuracy: 0.0515617 	14
Accuracy: 0.0525701 	15
Accuracy: 0.0535797 	16
Accuracy: 0.0545879 	17
Accuracy: 0.0555922 	18
Accuracy: 0.0565901 	19
Accuracy: 0.0575791 	20
Accuracy: 0.0585569 	21
Accuracy: 0.059521 	22
Accuracy: 0.060469 	23
Accuracy: 0.0613988 	24
Accuracy: 0.0623081 	25
Accuracy: 0.0631947 	26
Accuracy: 0.0640566 	27
Accuracy: 0.0648917 	28
Accuracy: 0.065698 	29
Accuracy: 0.0664737 	30
Accuracy: 0.067217 	31
Accuracy: 0.0679259 	32
Accuracy: 0.068599 	33
Accuracy: 0.0692346 	34
Accuracy: 0.0698314 	35
Accuracy: 0.0702587 	36
Accuracy: 0.0704463 	37
Accuracy: 0.0704765 	38
Accuracy: 0.0704051 	39
Accuracy: 0.0702869 	40
Accuracy: 0.0701438 	41
Accuracy: 0.0699899 	42
Accuracy: 0.0698345 	43
Accuracy: 0.0696818 	44
Accuracy: 0.0695314 	45
Accuracy: 0.069376 	46
Accuracy: 0.069223 	47
Accuracy: 0.0690696 	48
Accuracy: 0.0689111 	49
Accuracy: 0.0687386 	50
Accuracy: 0.0685463 	51
Accuracy: 0.0683392 	52
Accuracy: 0.0681061 	53
Accuracy: 0.067839 	54
Accuracy: 0.0675386 	55
Accuracy: 0.0672051 	56
Accuracy: 0.0668325 	57
Accuracy: 0.0664169 	58
Accuracy: 0.0659511 	59
Accuracy: 0.0654359 	60
Accuracy: 0.0648744 	61
Accuracy: 0.0642597 	62
Accuracy: 0.0635909 	63
Accuracy: 0.0628669 	64
Accuracy: 0.0620831 	65
Accuracy: 0.0612451 	66
Accuracy: 0.0603516 	67
Accuracy: 0.0594014 	68
Accuracy: 0.0583971 	69
Accuracy: 0.0573421 	70
Accuracy: 0.0562336 	71
Accuracy: 0.0550764 	72
Accuracy: 0.0538712 	73
Accuracy: 0.0526147 	74
Accuracy: 0.0513092 	75
Accuracy: 0.0499598 	76
Accuracy: 0.0485705 	77
Accuracy: 0.0471429 	78
Accuracy: 0.0456824 	79
Accuracy: 0.044195 	80
Accuracy: 0.0426825 	81
Accuracy: 0.0411521 	82
Accuracy: 0.03961 	83
Accuracy: 0.0380601 	84
Accuracy: 0.0365089 	85
Accuracy: 0.0349614 	86
Accuracy: 0.0334258 	87
Accuracy: 0.0319096 	88
Accuracy: 0.0304239 	89
Accuracy: 0.0289759 	90
Accuracy: 0.0275707 	91
Accuracy: 0.0262179 	92
Accuracy: 0.0249269 	93
Accuracy: 0.0237068 	94
Accuracy: 0.0225727 	95
Accuracy: 0.0215702 	96
Accuracy: 0.0207013 	97
Accuracy: 0.01999 	98
Accuracy: 0.0194321 	99
Accuracy: 0.0197967 	0
Accuracy: 0.0200087 	1
Accuracy: 0.020623 	2
Accuracy: 0.0216373 	3
Accuracy: 0.0230579 	4
Accuracy: 0.0248714 	5
Accuracy: 0.0270752 	6
Accuracy: 0.0296616 	7
Accuracy: 0.0326298 	8
Accuracy: 0.0359752 	9
Accuracy: 0.0396949 	10
Accuracy: 0.0437855 	11
Accuracy: 0.0482437 	12
Accuracy: 0.0530655 	13
Accuracy: 0.0582481 	14
Accuracy: 0.0637906 	15
Accuracy: 0.0696913 	16
Accuracy: 0.0758731 	17
Accuracy: 0.082126 	18
Accuracy: 0.0882204 	19
Accuracy: 0.0939695 	20
Accuracy: 0.0994068 	21
Accuracy: 0.104545 	22
Accuracy: 0.109098 	23
Accuracy: 0.113044 	24
Accuracy: 0.116363 	25
Accuracy: 0.118857 	26
Accuracy: 0.120427 	27
Accuracy: 0.121052 	28
Accuracy: 0.120692 	29
Accuracy: 0.119305 	30
Accuracy: 0.116845 	31
Accuracy: 0.114268 	32
Accuracy: 0.111751 	33
Accuracy: 0.109294 	34
Accuracy: 0.106899 	35
Accuracy: 0.10457 	36
Accuracy: 0.102303 	37
Accuracy: 0.100097 	38
Accuracy: 0.0979507 	39
Accuracy: 0.0958652 	40
Accuracy: 0.0938383 	41
Accuracy: 0.0918726 	42
Accuracy: 0.0899592 	43
Accuracy: 0.0881019 	44
Accuracy: 0.0863019 	45
Accuracy: 0.0845596 	46
Accuracy: 0.0828715 	47
Accuracy: 0.0812404 	48
Accuracy: 0.0796652 	49
Accuracy: 0.078145 	50
Accuracy: 0.0766797 	51
Accuracy: 0.0752707 	52
Accuracy: 0.0739106 	53
Accuracy: 0.0725968 	54
Accuracy: 0.0713339 	55
Accuracy: 0.0701248 	56
Accuracy: 0.0689675 	57
Accuracy: 0.0678574 	58
Accuracy: 0.0667919 	59
Accuracy: 0.0657761 	60
Accuracy: 0.0648012 	61
Accuracy: 0.063872 	62
Accuracy: 0.0629861 	63
Accuracy: 0.0621447 	64
Accuracy: 0.0613466 	65
Accuracy: 0.0605929 	66
Accuracy: 0.0598817 	67
Accuracy: 0.0591986 	68
Accuracy: 0.058541 	69
Accuracy: 0.0579086 	70
Accuracy: 0.0573011 	71
Accuracy: 0.0567181 	72
Accuracy: 0.0561595 	73
Accuracy: 0.0556248 	74
Accuracy: 0.0551139 	75
Accuracy: 0.0546264 	76
Accuracy: 0.054162 	77
Accuracy: 0.0537204 	78
Accuracy: 0.0533014 	79
Accuracy: 0.0529046 	80
Accuracy: 0.0525299 	81
Accuracy: 0.0521692 	82
Accuracy: 0.0516918 	83
Accuracy: 0.0511635 	84
Accuracy: 0.0506192 	85
Accuracy: 0.0500452 	86
Accuracy: 0.0494164 	87
Accuracy: 0.0487848 	88
Accuracy: 0.0481636 	89
Accuracy: 0.047524 	90
Accuracy: 0.0468534 	91
Accuracy: 0.0461369 	92
Accuracy: 0.0453858 	93
Accuracy: 0.0446259 	94
Accuracy: 0.0438645 	95
Accuracy: 0.0430671 	96
Accuracy: 0.042321 	97
Accuracy: 0.0415712 	98
Accuracy: 0.0408037 	99
[(0.070476457, 38), (0.12105218, 28)]
[1, 0, 3, 2]
Accuracy: 0.0407804
Accuracy: 0.0334014
Accuracy: 0.026179
Accuracy: 0.019185
Final bead error: 0.019185
Accuracy: 0.0249412
Accuracy: 0.0200601
Final bead error: 0.0200601
[True, True, True, True, True]
Accuracy: 0.0411401 	0
Accuracy: 0.0411488 	1
Accuracy: 0.0411612 	2
Accuracy: 0.0411768 	3
Accuracy: 0.0411952 	4
Accuracy: 0.0412158 	5
Accuracy: 0.0412382 	6
Accuracy: 0.041262 	7
Accuracy: 0.0412866 	8
Accuracy: 0.0413118 	9
Accuracy: 0.041337 	10
Accuracy: 0.0413618 	11
Accuracy: 0.041386 	12
Accuracy: 0.0414089 	13
Accuracy: 0.0414303 	14
Accuracy: 0.0414499 	15
Accuracy: 0.0414672 	16
Accuracy: 0.0414819 	17
Accuracy: 0.0414937 	18
Accuracy: 0.0415023 	19
Accuracy: 0.0415072 	20
Accuracy: 0.0415084 	21
Accuracy: 0.0415053 	22
Accuracy: 0.0414978 	23
Accuracy: 0.0414857 	24
Accuracy: 0.0414686 	25
Accuracy: 0.0414462 	26
Accuracy: 0.0414185 	27
Accuracy: 0.041385 	28
Accuracy: 0.0413457 	29
Accuracy: 0.0413004 	30
Accuracy: 0.0412488 	31
Accuracy: 0.0411907 	32
Accuracy: 0.0411261 	33
Accuracy: 0.0410547 	34
Accuracy: 0.0409765 	35
Accuracy: 0.0408912 	36
Accuracy: 0.0407989 	37
Accuracy: 0.0406993 	38
Accuracy: 0.0405924 	39
Accuracy: 0.0404782 	40
Accuracy: 0.0403564 	41
Accuracy: 0.0402273 	42
Accuracy: 0.0400906 	43
Accuracy: 0.0399463 	44
Accuracy: 0.0397944 	45
Accuracy: 0.039635 	46
Accuracy: 0.0394681 	47
Accuracy: 0.0392936 	48
Accuracy: 0.0391117 	49
Accuracy: 0.0389223 	50
Accuracy: 0.0387256 	51
Accuracy: 0.0385216 	52
Accuracy: 0.0383104 	53
Accuracy: 0.0380922 	54
Accuracy: 0.0378671 	55
Accuracy: 0.0376351 	56
Accuracy: 0.0373965 	57
Accuracy: 0.0371513 	58
Accuracy: 0.0368999 	59
Accuracy: 0.0366424 	60
Accuracy: 0.036379 	61
Accuracy: 0.0361099 	62
Accuracy: 0.0358353 	63
Accuracy: 0.0355556 	64
Accuracy: 0.0352709 	65
Accuracy: 0.0349816 	66
Accuracy: 0.0346879 	67
Accuracy: 0.0343902 	68
Accuracy: 0.0340887 	69
Accuracy: 0.033784 	70
Accuracy: 0.0334762 	71
Accuracy: 0.0331657 	72
Accuracy: 0.032853 	73
Accuracy: 0.0325384 	74
Accuracy: 0.0322224 	75
Accuracy: 0.0319054 	76
Accuracy: 0.0315879 	77
Accuracy: 0.0312702 	78
Accuracy: 0.030953 	79
Accuracy: 0.0306366 	80
Accuracy: 0.0303216 	81
Accuracy: 0.0300086 	82
Accuracy: 0.0296433 	83
Accuracy: 0.0291349 	84
Accuracy: 0.0285484 	85
Accuracy: 0.0279196 	86
Accuracy: 0.0272737 	87
Accuracy: 0.0266292 	88
Accuracy: 0.0259948 	89
Accuracy: 0.0253775 	90
Accuracy: 0.0247797 	91
Accuracy: 0.0242044 	92
Accuracy: 0.0236508 	93
Accuracy: 0.0231217 	94
Accuracy: 0.0226194 	95
Accuracy: 0.0221354 	96
Accuracy: 0.0216682 	97
Accuracy: 0.0212177 	98
Accuracy: 0.0207859 	99
Accuracy: 0.0192428 	0
Accuracy: 0.0193128 	1
Accuracy: 0.0193863 	2
Accuracy: 0.0194632 	3
Accuracy: 0.0195433 	4
Accuracy: 0.0196264 	5
Accuracy: 0.0197123 	6
Accuracy: 0.0198008 	7
Accuracy: 0.0198918 	8
Accuracy: 0.019985 	9
Accuracy: 0.0200802 	10
Accuracy: 0.0201773 	11
Accuracy: 0.020276 	12
Accuracy: 0.0203761 	13
Accuracy: 0.0204775 	14
Accuracy: 0.0205798 	15
Accuracy: 0.020683 	16
Accuracy: 0.0207868 	17
Accuracy: 0.0208909 	18
Accuracy: 0.0209953 	19
Accuracy: 0.0210995 	20
Accuracy: 0.0212036 	21
Accuracy: 0.0213071 	22
Accuracy: 0.02141 	23
Accuracy: 0.021512 	24
Accuracy: 0.021613 	25
Accuracy: 0.0217126 	26
Accuracy: 0.0218107 	27
Accuracy: 0.0219071 	28
Accuracy: 0.0220015 	29
Accuracy: 0.0220938 	30
Accuracy: 0.0221838 	31
Accuracy: 0.0222713 	32
Accuracy: 0.0223561 	33
Accuracy: 0.0224379 	34
Accuracy: 0.0225166 	35
Accuracy: 0.0225921 	36
Accuracy: 0.0226641 	37
Accuracy: 0.0227325 	38
Accuracy: 0.022797 	39
Accuracy: 0.0228576 	40
Accuracy: 0.022914 	41
Accuracy: 0.0229661 	42
Accuracy: 0.0230138 	43
Accuracy: 0.0230569 	44
Accuracy: 0.0230953 	45
Accuracy: 0.0231288 	46
Accuracy: 0.0231573 	47
Accuracy: 0.0231807 	48
Accuracy: 0.0231989 	49
Accuracy: 0.0232118 	50
Accuracy: 0.0232193 	51
Accuracy: 0.0232214 	52
Accuracy: 0.0232178 	53
Accuracy: 0.0232087 	54
Accuracy: 0.0231939 	55
Accuracy: 0.0231734 	56
Accuracy: 0.0231471 	57
Accuracy: 0.0231151 	58
Accuracy: 0.0230773 	59
Accuracy: 0.0230337 	60
Accuracy: 0.0229843 	61
Accuracy: 0.0229292 	62
Accuracy: 0.0228684 	63
Accuracy: 0.022802 	64
Accuracy: 0.02273 	65
Accuracy: 0.0226524 	66
Accuracy: 0.0225695 	67
Accuracy: 0.0224813 	68
Accuracy: 0.022388 	69
Accuracy: 0.0222896 	70
Accuracy: 0.0221863 	71
Accuracy: 0.0220784 	72
Accuracy: 0.0219659 	73
Accuracy: 0.0218492 	74
Accuracy: 0.0217285 	75
Accuracy: 0.0216039 	76
Accuracy: 0.0214757 	77
Accuracy: 0.0213443 	78
Accuracy: 0.02121 	79
Accuracy: 0.021073 	80
Accuracy: 0.0209338 	81
Accuracy: 0.0207926 	82
Accuracy: 0.0206499 	83
Accuracy: 0.0205061 	84
Accuracy: 0.0203616 	85
Accuracy: 0.0202169 	86
Accuracy: 0.0200724 	87
Accuracy: 0.0199288 	88
Accuracy: 0.0197864 	89
Accuracy: 0.0196459 	90
Accuracy: 0.0195135 	91
Accuracy: 0.0193943 	92
Accuracy: 0.0192877 	93
Accuracy: 0.0191969 	94
Accuracy: 0.0191196 	95
Accuracy: 0.0190581 	96
Accuracy: 0.0190106 	97
Accuracy: 0.0189779 	98
Accuracy: 0.0189572 	99
Accuracy: 0.020243 	0
Accuracy: 0.0202141 	1
Accuracy: 0.020186 	2
Accuracy: 0.0201585 	3
Accuracy: 0.0201317 	4
Accuracy: 0.0201062 	5
Accuracy: 0.0200812 	6
Accuracy: 0.0200578 	7
Accuracy: 0.0200358 	8
Accuracy: 0.0200146 	9
Accuracy: 0.0199938 	10
Accuracy: 0.0199739 	11
Accuracy: 0.0199544 	12
Accuracy: 0.0199357 	13
Accuracy: 0.0199173 	14
Accuracy: 0.0198992 	15
Accuracy: 0.0198819 	16
Accuracy: 0.0198652 	17
Accuracy: 0.0198489 	18
Accuracy: 0.0198335 	19
Accuracy: 0.0198186 	20
Accuracy: 0.0198045 	21
Accuracy: 0.0197918 	22
Accuracy: 0.0197801 	23
Accuracy: 0.0197691 	24
Accuracy: 0.0197585 	25
Accuracy: 0.0197483 	26
Accuracy: 0.0197383 	27
Accuracy: 0.0197289 	28
Accuracy: 0.01972 	29
Accuracy: 0.019712 	30
Accuracy: 0.0197044 	31
Accuracy: 0.0196974 	32
Accuracy: 0.0196912 	33
Accuracy: 0.0196855 	34
Accuracy: 0.0196801 	35
Accuracy: 0.0196758 	36
Accuracy: 0.0196719 	37
Accuracy: 0.019669 	38
Accuracy: 0.0196668 	39
Accuracy: 0.019665 	40
Accuracy: 0.0196641 	41
Accuracy: 0.0196645 	42
Accuracy: 0.0196659 	43
Accuracy: 0.0196682 	44
Accuracy: 0.0196706 	45
Accuracy: 0.019673 	46
Accuracy: 0.0196758 	47
Accuracy: 0.0196794 	48
Accuracy: 0.0196836 	49
Accuracy: 0.0196884 	50
Accuracy: 0.019694 	51
Accuracy: 0.0197002 	52
Accuracy: 0.0197065 	53
Accuracy: 0.0197135 	54
Accuracy: 0.0197212 	55
Accuracy: 0.0197296 	56
Accuracy: 0.0197385 	57
Accuracy: 0.0197477 	58
Accuracy: 0.0197575 	59
Accuracy: 0.0197677 	60
Accuracy: 0.0197782 	61
Accuracy: 0.0197896 	62
Accuracy: 0.0198019 	63
Accuracy: 0.0198146 	64
Accuracy: 0.0198282 	65
Accuracy: 0.0198426 	66
Accuracy: 0.0198575 	67
Accuracy: 0.0198728 	68
Accuracy: 0.0198893 	69
Accuracy: 0.0199069 	70
Accuracy: 0.0199253 	71
Accuracy: 0.0199445 	72
Accuracy: 0.0199643 	73
Accuracy: 0.0199849 	74
Accuracy: 0.0200064 	75
Accuracy: 0.0200287 	76
Accuracy: 0.0200517 	77
Accuracy: 0.0200754 	78
Accuracy: 0.0201004 	79
Accuracy: 0.0201258 	80
Accuracy: 0.0201523 	81
Accuracy: 0.02018 	82
Accuracy: 0.0202086 	83
Accuracy: 0.0202378 	84
Accuracy: 0.0202677 	85
Accuracy: 0.0202984 	86
Accuracy: 0.0203299 	87
Accuracy: 0.0203623 	88
Accuracy: 0.0203956 	89
Accuracy: 0.0204296 	90
Accuracy: 0.0204648 	91
Accuracy: 0.0205005 	92
Accuracy: 0.020537 	93
Accuracy: 0.0205742 	94
Accuracy: 0.0206123 	95
Accuracy: 0.0206512 	96
Accuracy: 0.0206912 	97
Accuracy: 0.0207322 	98
Accuracy: 0.0207743 	99
Accuracy: 0.0216279 	0
Accuracy: 0.023445 	1
Accuracy: 0.0257038 	2
Accuracy: 0.0282211 	3
Accuracy: 0.0309256 	4
Accuracy: 0.033755 	5
Accuracy: 0.0366221 	6
Accuracy: 0.0395311 	7
Accuracy: 0.0424623 	8
Accuracy: 0.0453361 	9
Accuracy: 0.0480925 	10
Accuracy: 0.0506435 	11
Accuracy: 0.0529263 	12
Accuracy: 0.0549498 	13
Accuracy: 0.0566666 	14
Accuracy: 0.0579023 	15
Accuracy: 0.0586454 	16
Accuracy: 0.0588588 	17
Accuracy: 0.0586227 	18
Accuracy: 0.0583065 	19
Accuracy: 0.0579959 	20
Accuracy: 0.0576909 	21
Accuracy: 0.0573913 	22
Accuracy: 0.0570968 	23
Accuracy: 0.0568078 	24
Accuracy: 0.0565245 	25
Accuracy: 0.0562474 	26
Accuracy: 0.0559767 	27
Accuracy: 0.0557123 	28
Accuracy: 0.0554538 	29
Accuracy: 0.0552014 	30
Accuracy: 0.054956 	31
Accuracy: 0.0547173 	32
Accuracy: 0.0544854 	33
Accuracy: 0.0542602 	34
Accuracy: 0.054043 	35
Accuracy: 0.0538327 	36
Accuracy: 0.0536307 	37
Accuracy: 0.0534363 	38
Accuracy: 0.0532488 	39
Accuracy: 0.053068 	40
Accuracy: 0.0528937 	41
Accuracy: 0.052727 	42
Accuracy: 0.0525675 	43
Accuracy: 0.0524138 	44
Accuracy: 0.0522677 	45
Accuracy: 0.0521299 	46
Accuracy: 0.0519991 	47
Accuracy: 0.0518783 	48
Accuracy: 0.0517653 	49
Accuracy: 0.0516621 	50
Accuracy: 0.0515677 	51
Accuracy: 0.0514805 	52
Accuracy: 0.0513996 	53
Accuracy: 0.0513273 	54
Accuracy: 0.0512636 	55
Accuracy: 0.0512084 	56
Accuracy: 0.0511608 	57
Accuracy: 0.0511205 	58
Accuracy: 0.0510858 	59
Accuracy: 0.0510566 	60
Accuracy: 0.0510293 	61
Accuracy: 0.0510036 	62
Accuracy: 0.0509793 	63
Accuracy: 0.0509566 	64
Accuracy: 0.0509354 	65
Accuracy: 0.0509156 	66
Accuracy: 0.0508973 	67
Accuracy: 0.0508805 	68
Accuracy: 0.050865 	69
Accuracy: 0.050851 	70
Accuracy: 0.0508384 	71
Accuracy: 0.0508272 	72
Accuracy: 0.0508173 	73
Accuracy: 0.0508089 	74
Accuracy: 0.050802 	75
Accuracy: 0.0507906 	76
Accuracy: 0.0507231 	77
Accuracy: 0.0505589 	78
Accuracy: 0.0503091 	79
Accuracy: 0.0499933 	80
Accuracy: 0.0496452 	81
Accuracy: 0.0492533 	82
Accuracy: 0.0488411 	83
Accuracy: 0.0484305 	84
Accuracy: 0.0479977 	85
Accuracy: 0.0475396 	86
Accuracy: 0.0470824 	87
Accuracy: 0.0466181 	88
Accuracy: 0.0461474 	89
Accuracy: 0.0456183 	90
Accuracy: 0.0450643 	91
Accuracy: 0.0445212 	92
Accuracy: 0.0439625 	93
Accuracy: 0.0434002 	94
Accuracy: 0.0428441 	95
Accuracy: 0.0422689 	96
Accuracy: 0.0416949 	97
Accuracy: 0.0411297 	98
Accuracy: 0.0405514 	99
[(0.041508354, 21), (0.023221364, 52), (0.020774256, 99), (0.05885876, 17)]
[4, 3]
Accuracy: 0.0486189
Accuracy: 0.0220093
Accuracy: 0.0197364
Final bead error: 0.0197364
[True, True, True, True, True, True]
Accuracy: 0.0204262 	0
Accuracy: 0.0204177 	1
Accuracy: 0.0204092 	2
Accuracy: 0.0204009 	3
Accuracy: 0.0203926 	4
Accuracy: 0.0203844 	5
Accuracy: 0.0203763 	6
Accuracy: 0.0203682 	7
Accuracy: 0.0203603 	8
Accuracy: 0.0203524 	9
Accuracy: 0.0203445 	10
Accuracy: 0.0203368 	11
Accuracy: 0.0203291 	12
Accuracy: 0.0203214 	13
Accuracy: 0.0203139 	14
Accuracy: 0.0203064 	15
Accuracy: 0.0202991 	16
Accuracy: 0.0202918 	17
Accuracy: 0.0202846 	18
Accuracy: 0.0202775 	19
Accuracy: 0.0202704 	20
Accuracy: 0.0202634 	21
Accuracy: 0.0202564 	22
Accuracy: 0.0202496 	23
Accuracy: 0.0202428 	24
Accuracy: 0.0202361 	25
Accuracy: 0.0202294 	26
Accuracy: 0.0202228 	27
Accuracy: 0.0202163 	28
Accuracy: 0.0202099 	29
Accuracy: 0.0202035 	30
Accuracy: 0.0201971 	31
Accuracy: 0.0201908 	32
Accuracy: 0.0201846 	33
Accuracy: 0.0201785 	34
Accuracy: 0.0201724 	35
Accuracy: 0.0201663 	36
Accuracy: 0.0201604 	37
Accuracy: 0.0201545 	38
Accuracy: 0.0201486 	39
Accuracy: 0.020143 	40
Accuracy: 0.0201378 	41
Accuracy: 0.0201326 	42
Accuracy: 0.0201275 	43
Accuracy: 0.0201226 	44
Accuracy: 0.0201177 	45
Accuracy: 0.0201129 	46
Accuracy: 0.0201082 	47
Accuracy: 0.0201035 	48
Accuracy: 0.0200989 	49
Accuracy: 0.0200944 	50
Accuracy: 0.0200899 	51
Accuracy: 0.0200855 	52
Accuracy: 0.0200812 	53
Accuracy: 0.020077 	54
Accuracy: 0.0200728 	55
Accuracy: 0.0200687 	56
Accuracy: 0.0200647 	57
Accuracy: 0.0200607 	58
Accuracy: 0.0200567 	59
Accuracy: 0.0200529 	60
Accuracy: 0.0200491 	61
Accuracy: 0.0200454 	62
Accuracy: 0.0200417 	63
Accuracy: 0.0200381 	64
Accuracy: 0.0200346 	65
Accuracy: 0.0200311 	66
Accuracy: 0.0200277 	67
Accuracy: 0.0200243 	68
Accuracy: 0.020021 	69
Accuracy: 0.0200177 	70
Accuracy: 0.0200145 	71
Accuracy: 0.0200113 	72
Accuracy: 0.0200082 	73
Accuracy: 0.0200051 	74
Accuracy: 0.0200021 	75
Accuracy: 0.0199992 	76
Accuracy: 0.0199963 	77
Accuracy: 0.0199935 	78
Accuracy: 0.0199908 	79
Accuracy: 0.0199881 	80
Accuracy: 0.0199855 	81
Accuracy: 0.0199829 	82
Accuracy: 0.0199804 	83
Accuracy: 0.0199779 	84
Accuracy: 0.0199755 	85
Accuracy: 0.0199732 	86
Accuracy: 0.0199709 	87
Accuracy: 0.0199686 	88
Accuracy: 0.0199665 	89
Accuracy: 0.0199643 	90
Accuracy: 0.0199623 	91
Accuracy: 0.0199602 	92
Accuracy: 0.0199583 	93
Accuracy: 0.0199563 	94
Accuracy: 0.0199545 	95
Accuracy: 0.0199526 	96
Accuracy: 0.0199509 	97
Accuracy: 0.0199491 	98
Accuracy: 0.0199474 	99
Accuracy: 0.0180867 	0
Accuracy: 0.0198475 	1
Accuracy: 0.022153 	2
Accuracy: 0.0247582 	3
Accuracy: 0.027622 	4
Accuracy: 0.0306812 	5
Accuracy: 0.0338234 	6
Accuracy: 0.0369779 	7
Accuracy: 0.0400631 	8
Accuracy: 0.0430925 	9
Accuracy: 0.0460233 	10
Accuracy: 0.0487659 	11
Accuracy: 0.0512479 	12
Accuracy: 0.0533827 	13
Accuracy: 0.0551471 	14
Accuracy: 0.0564141 	15
Accuracy: 0.0571515 	16
Accuracy: 0.057423 	17
Accuracy: 0.0572233 	18
Accuracy: 0.0569041 	19
Accuracy: 0.0565896 	20
Accuracy: 0.0562812 	21
Accuracy: 0.0559787 	22
Accuracy: 0.0556821 	23
Accuracy: 0.0553909 	24
Accuracy: 0.0551051 	25
Accuracy: 0.0548243 	26
Accuracy: 0.0545484 	27
Accuracy: 0.0542785 	28
Accuracy: 0.054014 	29
Accuracy: 0.0537547 	30
Accuracy: 0.0535004 	31
Accuracy: 0.053252 	32
Accuracy: 0.0530098 	33
Accuracy: 0.0527738 	34
Accuracy: 0.0525445 	35
Accuracy: 0.0523207 	36
Accuracy: 0.0521037 	37
Accuracy: 0.0518933 	38
Accuracy: 0.0516908 	39
Accuracy: 0.0514947 	40
Accuracy: 0.0513056 	41
Accuracy: 0.0511224 	42
Accuracy: 0.0509466 	43
Accuracy: 0.0507769 	44
Accuracy: 0.0506153 	45
Accuracy: 0.0504611 	46
Accuracy: 0.0503143 	47
Accuracy: 0.0501757 	48
Accuracy: 0.0500456 	49
Accuracy: 0.0499232 	50
Accuracy: 0.0498069 	51
Accuracy: 0.0496977 	52
Accuracy: 0.0495958 	53
Accuracy: 0.0494996 	54
Accuracy: 0.0494098 	55
Accuracy: 0.0493243 	56
Accuracy: 0.0492477 	57
Accuracy: 0.0491784 	58
Accuracy: 0.0491116 	59
Accuracy: 0.0490463 	60
Accuracy: 0.0489824 	61
Accuracy: 0.0489201 	62
Accuracy: 0.0488592 	63
Accuracy: 0.0487998 	64
Accuracy: 0.0487419 	65
Accuracy: 0.0486854 	66
Accuracy: 0.0486303 	67
Accuracy: 0.0485767 	68
Accuracy: 0.0485249 	69
Accuracy: 0.0484751 	70
Accuracy: 0.0484124 	71
Accuracy: 0.0483259 	72
Accuracy: 0.0482018 	73
Accuracy: 0.0480627 	74
Accuracy: 0.0479204 	75
Accuracy: 0.0477589 	76
Accuracy: 0.0475844 	77
Accuracy: 0.0473904 	78
Accuracy: 0.0471761 	79
Accuracy: 0.0469382 	80
Accuracy: 0.0466631 	81
Accuracy: 0.0463692 	82
Accuracy: 0.0460678 	83
Accuracy: 0.0457606 	84
Accuracy: 0.0454464 	85
Accuracy: 0.0451309 	86
Accuracy: 0.0448308 	87
Accuracy: 0.044518 	88
Accuracy: 0.0441982 	89
Accuracy: 0.0438879 	90
Accuracy: 0.0435804 	91
Accuracy: 0.0432674 	92
Accuracy: 0.0429265 	93
Accuracy: 0.0425814 	94
Accuracy: 0.042239 	95
Accuracy: 0.0418846 	96
Accuracy: 0.0415159 	97
Accuracy: 0.0411496 	98
Accuracy: 0.0407783 	99
[(0.020426163, 0), (0.057423033, 17)]
[5, 4]
Accuracy: 0.0479585
Accuracy: 0.0230529
Accuracy: 0.0192294
Final bead error: 0.0192294
[True, True, True, True, True, True, True]
Accuracy: 0.0189601 	0
Accuracy: 0.0189481 	1
Accuracy: 0.0189362 	2
Accuracy: 0.0189243 	3
Accuracy: 0.0189123 	4
Accuracy: 0.0189004 	5
Accuracy: 0.0188885 	6
Accuracy: 0.0188766 	7
Accuracy: 0.0188648 	8
Accuracy: 0.0188529 	9
Accuracy: 0.0188411 	10
Accuracy: 0.0188293 	11
Accuracy: 0.0188175 	12
Accuracy: 0.0188057 	13
Accuracy: 0.0187939 	14
Accuracy: 0.0187822 	15
Accuracy: 0.0187719 	16
Accuracy: 0.0187618 	17
Accuracy: 0.0187518 	18
Accuracy: 0.0187418 	19
Accuracy: 0.0187318 	20
Accuracy: 0.0187218 	21
Accuracy: 0.0187118 	22
Accuracy: 0.0187018 	23
Accuracy: 0.0186918 	24
Accuracy: 0.0186819 	25
Accuracy: 0.018672 	26
Accuracy: 0.018662 	27
Accuracy: 0.0186521 	28
Accuracy: 0.0186422 	29
Accuracy: 0.0186323 	30
Accuracy: 0.0186224 	31
Accuracy: 0.0186125 	32
Accuracy: 0.0186027 	33
Accuracy: 0.0185928 	34
Accuracy: 0.018583 	35
Accuracy: 0.0185732 	36
Accuracy: 0.0185634 	37
Accuracy: 0.0185535 	38
Accuracy: 0.0185437 	39
Accuracy: 0.018534 	40
Accuracy: 0.0185242 	41
Accuracy: 0.0185144 	42
Accuracy: 0.0185047 	43
Accuracy: 0.0184949 	44
Accuracy: 0.0184852 	45
Accuracy: 0.0184755 	46
Accuracy: 0.0184658 	47
Accuracy: 0.0184561 	48
Accuracy: 0.0184464 	49
Accuracy: 0.0184368 	50
Accuracy: 0.0184271 	51
Accuracy: 0.0184174 	52
Accuracy: 0.0184078 	53
Accuracy: 0.0183982 	54
Accuracy: 0.0183886 	55
Accuracy: 0.018379 	56
Accuracy: 0.0183694 	57
Accuracy: 0.0183598 	58
Accuracy: 0.0183502 	59
Accuracy: 0.0183406 	60
Accuracy: 0.0183311 	61
Accuracy: 0.0183216 	62
Accuracy: 0.018312 	63
Accuracy: 0.0183025 	64
Accuracy: 0.018293 	65
Accuracy: 0.0182835 	66
Accuracy: 0.018274 	67
Accuracy: 0.0182646 	68
Accuracy: 0.0182551 	69
Accuracy: 0.0182456 	70
Accuracy: 0.0182362 	71
Accuracy: 0.0182268 	72
Accuracy: 0.0182174 	73
Accuracy: 0.018208 	74
Accuracy: 0.0181986 	75
Accuracy: 0.0181892 	76
Accuracy: 0.0181799 	77
Accuracy: 0.0181705 	78
Accuracy: 0.0181612 	79
Accuracy: 0.0181519 	80
Accuracy: 0.0181425 	81
Accuracy: 0.0181332 	82
Accuracy: 0.018124 	83
Accuracy: 0.0181147 	84
Accuracy: 0.0181054 	85
Accuracy: 0.0180961 	86
Accuracy: 0.0180869 	87
Accuracy: 0.0180777 	88
Accuracy: 0.0180684 	89
Accuracy: 0.0180592 	90
Accuracy: 0.01805 	91
Accuracy: 0.0180408 	92
Accuracy: 0.0180316 	93
Accuracy: 0.0180224 	94
Accuracy: 0.0180133 	95
Accuracy: 0.0180041 	96
Accuracy: 0.017995 	97
Accuracy: 0.0179858 	98
Accuracy: 0.0179767 	99
Accuracy: 0.01615 	0
Accuracy: 0.0178266 	1
Accuracy: 0.0200974 	2
Accuracy: 0.0226827 	3
Accuracy: 0.0254772 	4
Accuracy: 0.0284133 	5
Accuracy: 0.0314561 	6
Accuracy: 0.0345471 	7
Accuracy: 0.0375681 	8
Accuracy: 0.0404295 	9
Accuracy: 0.0430665 	10
Accuracy: 0.0454956 	11
Accuracy: 0.0477461 	12
Accuracy: 0.0497094 	13
Accuracy: 0.0512788 	14
Accuracy: 0.0524421 	15
Accuracy: 0.053263 	16
Accuracy: 0.0536616 	17
Accuracy: 0.0535358 	18
Accuracy: 0.0532368 	19
Accuracy: 0.0529422 	20
Accuracy: 0.0526524 	21
Accuracy: 0.0523681 	22
Accuracy: 0.0520891 	23
Accuracy: 0.0518156 	24
Accuracy: 0.0515469 	25
Accuracy: 0.0512834 	26
Accuracy: 0.0510269 	27
Accuracy: 0.0507774 	28
Accuracy: 0.0505338 	29
Accuracy: 0.050296 	30
Accuracy: 0.0500629 	31
Accuracy: 0.0498346 	32
Accuracy: 0.0496135 	33
Accuracy: 0.0493981 	34
Accuracy: 0.0491867 	35
Accuracy: 0.0489802 	36
Accuracy: 0.048779 	37
Accuracy: 0.0485832 	38
Accuracy: 0.0483933 	39
Accuracy: 0.0482098 	40
Accuracy: 0.0480323 	41
Accuracy: 0.0478619 	42
Accuracy: 0.0476979 	43
Accuracy: 0.0475399 	44
Accuracy: 0.0473891 	45
Accuracy: 0.0472437 	46
Accuracy: 0.0471043 	47
Accuracy: 0.0469714 	48
Accuracy: 0.0468434 	49
Accuracy: 0.0467204 	50
Accuracy: 0.0466019 	51
Accuracy: 0.0464895 	52
Accuracy: 0.0463852 	53
Accuracy: 0.0462862 	54
Accuracy: 0.0461939 	55
Accuracy: 0.0461092 	56
Accuracy: 0.0460303 	57
Accuracy: 0.0459558 	58
Accuracy: 0.0458829 	59
Accuracy: 0.0458117 	60
Accuracy: 0.0457421 	61
Accuracy: 0.0456741 	62
Accuracy: 0.0456079 	63
Accuracy: 0.0455439 	64
Accuracy: 0.0454775 	65
Accuracy: 0.0453762 	66
Accuracy: 0.0452523 	67
Accuracy: 0.0451279 	68
Accuracy: 0.0449811 	69
Accuracy: 0.0448031 	70
Accuracy: 0.0446064 	71
Accuracy: 0.0444051 	72
Accuracy: 0.0442105 	73
Accuracy: 0.0440066 	74
Accuracy: 0.043788 	75
Accuracy: 0.0435575 	76
Accuracy: 0.0433105 	77
Accuracy: 0.0430393 	78
Accuracy: 0.0427574 	79
Accuracy: 0.0424637 	80
Accuracy: 0.0421665 	81
Accuracy: 0.0418635 	82
Accuracy: 0.0415567 	83
Accuracy: 0.0412515 	84
Accuracy: 0.0409553 	85
Accuracy: 0.0406627 	86
Accuracy: 0.0403794 	87
Accuracy: 0.0401019 	88
Accuracy: 0.0398233 	89
Accuracy: 0.0395279 	90
Accuracy: 0.039236 	91
Accuracy: 0.0389528 	92
Accuracy: 0.0386699 	93
Accuracy: 0.0383831 	94
Accuracy: 0.0380893 	95
Accuracy: 0.0377936 	96
Accuracy: 0.0374934 	97
Accuracy: 0.037199 	98
Accuracy: 0.0369015 	99
[(0.018960103, 0), (0.053661644, 17)]
[6, 5]
Accuracy: 0.0442602
Accuracy: 0.0178528
Final bead error: 0.0178528
[True, True, True, True, True, True, True, True]
Accuracy: 0.0189386 	0
Accuracy: 0.0189393 	1
Accuracy: 0.01894 	2
Accuracy: 0.0189407 	3
Accuracy: 0.0189414 	4
Accuracy: 0.0189421 	5
Accuracy: 0.0189428 	6
Accuracy: 0.0189435 	7
Accuracy: 0.0189442 	8
Accuracy: 0.0189449 	9
Accuracy: 0.0189456 	10
Accuracy: 0.0189463 	11
Accuracy: 0.018947 	12
Accuracy: 0.0189477 	13
Accuracy: 0.0189484 	14
Accuracy: 0.0189492 	15
Accuracy: 0.0189499 	16
Accuracy: 0.0189508 	17
Accuracy: 0.0189516 	18
Accuracy: 0.0189524 	19
Accuracy: 0.0189532 	20
Accuracy: 0.0189541 	21
Accuracy: 0.0189549 	22
Accuracy: 0.0189557 	23
Accuracy: 0.0189565 	24
Accuracy: 0.0189573 	25
Accuracy: 0.0189582 	26
Accuracy: 0.0189591 	27
Accuracy: 0.01896 	28
Accuracy: 0.018961 	29
Accuracy: 0.0189619 	30
Accuracy: 0.0189628 	31
Accuracy: 0.0189638 	32
Accuracy: 0.0189647 	33
Accuracy: 0.0189657 	34
Accuracy: 0.0189666 	35
Accuracy: 0.0189676 	36
Accuracy: 0.0189685 	37
Accuracy: 0.0189695 	38
Accuracy: 0.0189704 	39
Accuracy: 0.0189713 	40
Accuracy: 0.0189723 	41
Accuracy: 0.0189732 	42
Accuracy: 0.0189742 	43
Accuracy: 0.0189751 	44
Accuracy: 0.0189761 	45
Accuracy: 0.018977 	46
Accuracy: 0.018978 	47
Accuracy: 0.0189789 	48
Accuracy: 0.0189799 	49
Accuracy: 0.0189809 	50
Accuracy: 0.0189819 	51
Accuracy: 0.0189829 	52
Accuracy: 0.0189839 	53
Accuracy: 0.0189848 	54
Accuracy: 0.0189858 	55
Accuracy: 0.0189869 	56
Accuracy: 0.0189879 	57
Accuracy: 0.0189889 	58
Accuracy: 0.0189899 	59
Accuracy: 0.018991 	60
Accuracy: 0.018992 	61
Accuracy: 0.0189931 	62
Accuracy: 0.0189942 	63
Accuracy: 0.0189952 	64
Accuracy: 0.0189963 	65
Accuracy: 0.0189973 	66
Accuracy: 0.0189985 	67
Accuracy: 0.0189996 	68
Accuracy: 0.0190007 	69
Accuracy: 0.0190019 	70
Accuracy: 0.019003 	71
Accuracy: 0.0190041 	72
Accuracy: 0.0190053 	73
Accuracy: 0.0190064 	74
Accuracy: 0.0190076 	75
Accuracy: 0.0190088 	76
Accuracy: 0.0190099 	77
Accuracy: 0.0190111 	78
Accuracy: 0.0190124 	79
Accuracy: 0.0190136 	80
Accuracy: 0.0190148 	81
Accuracy: 0.019016 	82
Accuracy: 0.0190172 	83
Accuracy: 0.0190185 	84
Accuracy: 0.0190197 	85
Accuracy: 0.0190209 	86
Accuracy: 0.0190221 	87
Accuracy: 0.0190234 	88
Accuracy: 0.0190246 	89
Accuracy: 0.0190259 	90
Accuracy: 0.0190272 	91
Accuracy: 0.0190285 	92
Accuracy: 0.0190298 	93
Accuracy: 0.0190311 	94
Accuracy: 0.0190325 	95
Accuracy: 0.0190338 	96
Accuracy: 0.0190352 	97
Accuracy: 0.0190365 	98
Accuracy: 0.0190378 	99
Accuracy: 0.018337 	0
Accuracy: 0.0201817 	1
Accuracy: 0.0225423 	2
Accuracy: 0.025204 	3
Accuracy: 0.0280912 	4
Accuracy: 0.031182 	5
Accuracy: 0.0343541 	6
Accuracy: 0.0375217 	7
Accuracy: 0.0406048 	8
Accuracy: 0.0435379 	9
Accuracy: 0.0462567 	10
Accuracy: 0.0487346 	11
Accuracy: 0.0510204 	12
Accuracy: 0.0529908 	13
Accuracy: 0.0546474 	14
Accuracy: 0.0559264 	15
Accuracy: 0.0566869 	16
Accuracy: 0.0568809 	17
Accuracy: 0.0566665 	18
Accuracy: 0.0563964 	19
Accuracy: 0.0561299 	20
Accuracy: 0.0558684 	21
Accuracy: 0.0556109 	22
Accuracy: 0.0553573 	23
Accuracy: 0.0551089 	24
Accuracy: 0.0548657 	25
Accuracy: 0.0546267 	26
Accuracy: 0.0543919 	27
Accuracy: 0.0541632 	28
Accuracy: 0.0539405 	29
Accuracy: 0.053723 	30
Accuracy: 0.0535098 	31
Accuracy: 0.0533013 	32
Accuracy: 0.0530992 	33
Accuracy: 0.0529021 	34
Accuracy: 0.0527091 	35
Accuracy: 0.0525215 	36
Accuracy: 0.0523376 	37
Accuracy: 0.0521577 	38
Accuracy: 0.0519846 	39
Accuracy: 0.0518175 	40
Accuracy: 0.0516546 	41
Accuracy: 0.0514968 	42
Accuracy: 0.0513446 	43
Accuracy: 0.0511973 	44
Accuracy: 0.0510556 	45
Accuracy: 0.0509189 	46
Accuracy: 0.0507884 	47
Accuracy: 0.0506639 	48
Accuracy: 0.0505451 	49
Accuracy: 0.0504344 	50
Accuracy: 0.0503303 	51
Accuracy: 0.0502318 	52
Accuracy: 0.0501406 	53
Accuracy: 0.0500559 	54
Accuracy: 0.0499775 	55
Accuracy: 0.0499039 	56
Accuracy: 0.0498295 	57
Accuracy: 0.0497378 	58
Accuracy: 0.0496343 	59
Accuracy: 0.0495157 	60
Accuracy: 0.049377 	61
Accuracy: 0.049216 	62
Accuracy: 0.0490435 	63
Accuracy: 0.0488743 	64
Accuracy: 0.0487045 	65
Accuracy: 0.0485366 	66
Accuracy: 0.0483662 	67
Accuracy: 0.0481912 	68
Accuracy: 0.0480109 	69
Accuracy: 0.0478178 	70
Accuracy: 0.0476075 	71
Accuracy: 0.0473925 	72
Accuracy: 0.047169 	73
Accuracy: 0.0469362 	74
Accuracy: 0.0466929 	75
Accuracy: 0.0464417 	76
Accuracy: 0.0461888 	77
Accuracy: 0.0459278 	78
Accuracy: 0.0456597 	79
Accuracy: 0.0453717 	80
Accuracy: 0.0450803 	81
Accuracy: 0.0447891 	82
Accuracy: 0.0444986 	83
Accuracy: 0.0442121 	84
Accuracy: 0.0439309 	85
Accuracy: 0.0436537 	86
Accuracy: 0.0433825 	87
Accuracy: 0.0431158 	88
Accuracy: 0.0428401 	89
Accuracy: 0.042573 	90
Accuracy: 0.0423012 	91
Accuracy: 0.042028 	92
Accuracy: 0.0417601 	93
Accuracy: 0.0414831 	94
Accuracy: 0.0412057 	95
Accuracy: 0.0409206 	96
Accuracy: 0.0406301 	97
Accuracy: 0.040345 	98
Accuracy: 0.0400644 	99
[(0.01903783, 99), (0.05688085, 17)]
[7, 6]
Accuracy: 0.0510139
Accuracy: 0.0274898
Accuracy: 0.0205312
Final bead error: 0.0205312
[True, True, True, True, True, True, True, True, True]
Accuracy: 0.017036 	0
Accuracy: 0.0170355 	1
Accuracy: 0.017035 	2
Accuracy: 0.0170345 	3
Accuracy: 0.0170339 	4
Accuracy: 0.0170334 	5
Accuracy: 0.0170328 	6
Accuracy: 0.0170323 	7
Accuracy: 0.0170318 	8
Accuracy: 0.0170312 	9
Accuracy: 0.0170307 	10
Accuracy: 0.0170301 	11
Accuracy: 0.0170296 	12
Accuracy: 0.017029 	13
Accuracy: 0.0170285 	14
Accuracy: 0.0170279 	15
Accuracy: 0.0170273 	16
Accuracy: 0.0170268 	17
Accuracy: 0.0170263 	18
Accuracy: 0.0170258 	19
Accuracy: 0.0170252 	20
Accuracy: 0.0170248 	21
Accuracy: 0.0170243 	22
Accuracy: 0.0170238 	23
Accuracy: 0.0170234 	24
Accuracy: 0.0170229 	25
Accuracy: 0.0170224 	26
Accuracy: 0.017022 	27
Accuracy: 0.0170215 	28
Accuracy: 0.0170211 	29
Accuracy: 0.0170207 	30
Accuracy: 0.0170202 	31
Accuracy: 0.0170198 	32
Accuracy: 0.0170194 	33
Accuracy: 0.017019 	34
Accuracy: 0.0170186 	35
Accuracy: 0.0170181 	36
Accuracy: 0.0170177 	37
Accuracy: 0.0170173 	38
Accuracy: 0.0170168 	39
Accuracy: 0.0170164 	40
Accuracy: 0.0170159 	41
Accuracy: 0.0170155 	42
Accuracy: 0.017015 	43
Accuracy: 0.0170146 	44
Accuracy: 0.0170141 	45
Accuracy: 0.0170137 	46
Accuracy: 0.0170133 	47
Accuracy: 0.0170129 	48
Accuracy: 0.0170124 	49
Accuracy: 0.017012 	50
Accuracy: 0.0170116 	51
Accuracy: 0.0170111 	52
Accuracy: 0.0170107 	53
Accuracy: 0.0170103 	54
Accuracy: 0.0170098 	55
Accuracy: 0.0170094 	56
Accuracy: 0.0170089 	57
Accuracy: 0.0170085 	58
Accuracy: 0.017008 	59
Accuracy: 0.0170076 	60
Accuracy: 0.0170072 	61
Accuracy: 0.0170067 	62
Accuracy: 0.0170063 	63
Accuracy: 0.0170059 	64
Accuracy: 0.0170054 	65
Accuracy: 0.017005 	66
Accuracy: 0.0170045 	67
Accuracy: 0.0170041 	68
Accuracy: 0.0170036 	69
Accuracy: 0.0170032 	70
Accuracy: 0.0170027 	71
Accuracy: 0.0170023 	72
Accuracy: 0.0170018 	73
Accuracy: 0.0170014 	74
Accuracy: 0.0170009 	75
Accuracy: 0.0170004 	76
Accuracy: 0.017 	77
Accuracy: 0.0169995 	78
Accuracy: 0.016999 	79
Accuracy: 0.0169985 	80
Accuracy: 0.016998 	81
Accuracy: 0.0169975 	82
Accuracy: 0.0169971 	83
Accuracy: 0.0169966 	84
Accuracy: 0.0169961 	85
Accuracy: 0.0169956 	86
Accuracy: 0.016995 	87
Accuracy: 0.0169945 	88
Accuracy: 0.016994 	89
Accuracy: 0.0169935 	90
Accuracy: 0.016993 	91
Accuracy: 0.0169925 	92
Accuracy: 0.016992 	93
Accuracy: 0.0169914 	94
Accuracy: 0.0169909 	95
Accuracy: 0.0169904 	96
Accuracy: 0.0169899 	97
Accuracy: 0.0169894 	98
Accuracy: 0.0169889 	99
Accuracy: 0.0181507 	0
Accuracy: 0.0200463 	1
Accuracy: 0.0223601 	2
Accuracy: 0.024976 	3
Accuracy: 0.0278396 	4
Accuracy: 0.0308597 	5
Accuracy: 0.0339306 	6
Accuracy: 0.0369773 	7
Accuracy: 0.0399707 	8
Accuracy: 0.0429011 	9
Accuracy: 0.0457309 	10
Accuracy: 0.0484259 	11
Accuracy: 0.0508219 	12
Accuracy: 0.0528088 	13
Accuracy: 0.0543954 	14
Accuracy: 0.0555964 	15
Accuracy: 0.0563307 	16
Accuracy: 0.0565186 	17
Accuracy: 0.0562902 	18
Accuracy: 0.0560475 	19
Accuracy: 0.0558078 	20
Accuracy: 0.0555719 	21
Accuracy: 0.0553398 	22
Accuracy: 0.0551109 	23
Accuracy: 0.0548858 	24
Accuracy: 0.054664 	25
Accuracy: 0.0544459 	26
Accuracy: 0.0542317 	27
Accuracy: 0.0540206 	28
Accuracy: 0.0538126 	29
Accuracy: 0.0536088 	30
Accuracy: 0.0534085 	31
Accuracy: 0.0532118 	32
Accuracy: 0.0530183 	33
Accuracy: 0.052828 	34
Accuracy: 0.0526424 	35
Accuracy: 0.0524617 	36
Accuracy: 0.0522862 	37
Accuracy: 0.052115 	38
Accuracy: 0.0519491 	39
Accuracy: 0.0517886 	40
Accuracy: 0.051634 	41
Accuracy: 0.0514864 	42
Accuracy: 0.0513431 	43
Accuracy: 0.0512047 	44
Accuracy: 0.0510719 	45
Accuracy: 0.0509439 	46
Accuracy: 0.0508152 	47
Accuracy: 0.0506838 	48
Accuracy: 0.0505365 	49
Accuracy: 0.0503844 	50
Accuracy: 0.0502398 	51
Accuracy: 0.0500835 	52
Accuracy: 0.0499113 	53
Accuracy: 0.0497385 	54
Accuracy: 0.0495642 	55
Accuracy: 0.0493997 	56
Accuracy: 0.0492308 	57
Accuracy: 0.0490593 	58
Accuracy: 0.048881 	59
Accuracy: 0.0486841 	60
Accuracy: 0.0484749 	61
Accuracy: 0.0482626 	62
Accuracy: 0.04805 	63
Accuracy: 0.0478416 	64
Accuracy: 0.047631 	65
Accuracy: 0.0474125 	66
Accuracy: 0.0471871 	67
Accuracy: 0.0469599 	68
Accuracy: 0.0467368 	69
Accuracy: 0.0465145 	70
Accuracy: 0.0462771 	71
Accuracy: 0.0460396 	72
Accuracy: 0.0458039 	73
Accuracy: 0.0455677 	74
Accuracy: 0.0453256 	75
Accuracy: 0.0450873 	76
Accuracy: 0.0448477 	77
Accuracy: 0.0446057 	78
Accuracy: 0.0443649 	79
Accuracy: 0.0441266 	80
Accuracy: 0.0438921 	81
Accuracy: 0.0436552 	82
Accuracy: 0.0434172 	83
Accuracy: 0.0431749 	84
Accuracy: 0.0429262 	85
Accuracy: 0.0426798 	86
Accuracy: 0.0424364 	87
Accuracy: 0.0421897 	88
Accuracy: 0.0419356 	89
Accuracy: 0.0416823 	90
Accuracy: 0.0414265 	91
Accuracy: 0.0411678 	92
Accuracy: 0.0409098 	93
Accuracy: 0.0406549 	94
Accuracy: 0.0404005 	95
Accuracy: 0.0401488 	96
Accuracy: 0.0398975 	97
Accuracy: 0.0396468 	98
Accuracy: 0.0393979 	99
[(0.017036043, 0), (0.056518648, 17)]
[8, 7]
Accuracy: 0.0466223
Accuracy: 0.0267693
Accuracy: 0.018095
Final bead error: 0.018095
[True, True, True, True, True, True, True, True, True, True]
Accuracy: 0.0192793 	0
Accuracy: 0.0192828 	1
Accuracy: 0.0192864 	2
Accuracy: 0.0192901 	3
Accuracy: 0.0192937 	4
Accuracy: 0.0192973 	5
Accuracy: 0.019301 	6
Accuracy: 0.0193047 	7
Accuracy: 0.0193084 	8
Accuracy: 0.0193121 	9
Accuracy: 0.0193159 	10
Accuracy: 0.0193197 	11
Accuracy: 0.0193234 	12
Accuracy: 0.0193272 	13
Accuracy: 0.0193309 	14
Accuracy: 0.0193349 	15
Accuracy: 0.0193389 	16
Accuracy: 0.0193431 	17
Accuracy: 0.0193473 	18
Accuracy: 0.0193516 	19
Accuracy: 0.0193559 	20
Accuracy: 0.0193602 	21
Accuracy: 0.0193645 	22
Accuracy: 0.0193687 	23
Accuracy: 0.019373 	24
Accuracy: 0.0193772 	25
Accuracy: 0.0193815 	26
Accuracy: 0.0193857 	27
Accuracy: 0.0193899 	28
Accuracy: 0.0193942 	29
Accuracy: 0.0193984 	30
Accuracy: 0.0194027 	31
Accuracy: 0.019407 	32
Accuracy: 0.0194113 	33
Accuracy: 0.0194157 	34
Accuracy: 0.0194201 	35
Accuracy: 0.0194244 	36
Accuracy: 0.0194288 	37
Accuracy: 0.0194331 	38
Accuracy: 0.0194376 	39
Accuracy: 0.0194421 	40
Accuracy: 0.0194466 	41
Accuracy: 0.0194511 	42
Accuracy: 0.0194557 	43
Accuracy: 0.0194602 	44
Accuracy: 0.0194648 	45
Accuracy: 0.0194694 	46
Accuracy: 0.0194739 	47
Accuracy: 0.0194785 	48
Accuracy: 0.0194831 	49
Accuracy: 0.0194879 	50
Accuracy: 0.0194926 	51
Accuracy: 0.0194973 	52
Accuracy: 0.019502 	53
Accuracy: 0.0195068 	54
Accuracy: 0.0195116 	55
Accuracy: 0.0195165 	56
Accuracy: 0.0195214 	57
Accuracy: 0.0195263 	58
Accuracy: 0.0195312 	59
Accuracy: 0.0195362 	60
Accuracy: 0.0195411 	61
Accuracy: 0.019546 	62
Accuracy: 0.0195509 	63
Accuracy: 0.0195558 	64
Accuracy: 0.0195607 	65
Accuracy: 0.0195656 	66
Accuracy: 0.0195705 	67
Accuracy: 0.0195754 	68
Accuracy: 0.0195802 	69
Accuracy: 0.0195851 	70
Accuracy: 0.0195901 	71
Accuracy: 0.019595 	72
Accuracy: 0.0196 	73
Accuracy: 0.0196051 	74
Accuracy: 0.0196101 	75
Accuracy: 0.0196151 	76
Accuracy: 0.0196201 	77
Accuracy: 0.0196251 	78
Accuracy: 0.0196301 	79
Accuracy: 0.0196351 	80
Accuracy: 0.0196401 	81
Accuracy: 0.0196451 	82
Accuracy: 0.0196501 	83
Accuracy: 0.019655 	84
Accuracy: 0.01966 	85
Accuracy: 0.019665 	86
Accuracy: 0.0196699 	87
Accuracy: 0.0196749 	88
Accuracy: 0.0196799 	89
Accuracy: 0.0196848 	90
Accuracy: 0.0196898 	91
Accuracy: 0.0196947 	92
Accuracy: 0.0196997 	93
Accuracy: 0.0197046 	94
Accuracy: 0.0197096 	95
Accuracy: 0.0197145 	96
Accuracy: 0.0197194 	97
Accuracy: 0.0197244 	98
Accuracy: 0.0197293 	99
Accuracy: 0.0185368 	0
Accuracy: 0.0205025 	1
Accuracy: 0.0228227 	2
Accuracy: 0.0254294 	3
Accuracy: 0.0282424 	4
Accuracy: 0.0311954 	5
Accuracy: 0.0342424 	6
Accuracy: 0.0372955 	7
Accuracy: 0.0402457 	8
Accuracy: 0.0430966 	9
Accuracy: 0.0457634 	10
Accuracy: 0.048194 	11
Accuracy: 0.0502281 	12
Accuracy: 0.0519726 	13
Accuracy: 0.0533482 	14
Accuracy: 0.0543254 	15
Accuracy: 0.054895 	16
Accuracy: 0.0549538 	17
Accuracy: 0.0547424 	18
Accuracy: 0.0545324 	19
Accuracy: 0.054324 	20
Accuracy: 0.0541182 	21
Accuracy: 0.0539147 	22
Accuracy: 0.0537131 	23
Accuracy: 0.0535144 	24
Accuracy: 0.053319 	25
Accuracy: 0.0531267 	26
Accuracy: 0.0529378 	27
Accuracy: 0.052752 	28
Accuracy: 0.0525689 	29
Accuracy: 0.052389 	30
Accuracy: 0.0522119 	31
Accuracy: 0.0520385 	32
Accuracy: 0.0518648 	33
Accuracy: 0.05168 	34
Accuracy: 0.0514956 	35
Accuracy: 0.0513104 	36
Accuracy: 0.0511314 	37
Accuracy: 0.0509535 	38
Accuracy: 0.0507791 	39
Accuracy: 0.0506047 	40
Accuracy: 0.050425 	41
Accuracy: 0.050243 	42
Accuracy: 0.0500581 	43
Accuracy: 0.049866 	44
Accuracy: 0.0496785 	45
Accuracy: 0.049494 	46
Accuracy: 0.0493079 	47
Accuracy: 0.0491216 	48
Accuracy: 0.0489355 	49
Accuracy: 0.0487529 	50
Accuracy: 0.048576 	51
Accuracy: 0.0484006 	52
Accuracy: 0.0482275 	53
Accuracy: 0.048053 	54
Accuracy: 0.0478754 	55
Accuracy: 0.0476985 	56
Accuracy: 0.0475176 	57
Accuracy: 0.0473315 	58
Accuracy: 0.0471472 	59
Accuracy: 0.046967 	60
Accuracy: 0.0467906 	61
Accuracy: 0.0466061 	62
Accuracy: 0.0464205 	63
Accuracy: 0.0462325 	64
Accuracy: 0.0460455 	65
Accuracy: 0.0458613 	66
Accuracy: 0.0456754 	67
Accuracy: 0.0454872 	68
Accuracy: 0.0452986 	69
Accuracy: 0.0451107 	70
Accuracy: 0.0449157 	71
Accuracy: 0.0447153 	72
Accuracy: 0.0445129 	73
Accuracy: 0.0443094 	74
Accuracy: 0.0441035 	75
Accuracy: 0.0438942 	76
Accuracy: 0.0436859 	77
Accuracy: 0.0434666 	78
Accuracy: 0.0432398 	79
Accuracy: 0.0430129 	80
Accuracy: 0.0427854 	81
Accuracy: 0.0425572 	82
Accuracy: 0.0423295 	83
Accuracy: 0.0421039 	84
Accuracy: 0.0418792 	85
Accuracy: 0.0416564 	86
Accuracy: 0.0414365 	87
Accuracy: 0.0412161 	88
Accuracy: 0.040989 	89
Accuracy: 0.0407626 	90
Accuracy: 0.0405405 	91
Accuracy: 0.0403201 	92
Accuracy: 0.0400972 	93
Accuracy: 0.0398765 	94
Accuracy: 0.0396598 	95
Accuracy: 0.0394462 	96
Accuracy: 0.0392359 	97
Accuracy: 0.0390272 	98
Accuracy: 0.0388211 	99
[(0.019729288, 99), (0.054953828, 17)]
[9, 8]
Accuracy: 0.0497233
Accuracy: 0.0495117
Accuracy: 0.0492999
Accuracy: 0.0491201
Accuracy: 0.0489578
Accuracy: 0.0487608
Accuracy: 0.0485895
Accuracy: 0.0484178
Accuracy: 0.048205
Accuracy: 0.0479871
Accuracy: 0.0476775
Accuracy: 0.0443743
Accuracy: 0.0208035
Accuracy: 0.0192714
Final bead error: 0.0192714
[True, True, True, True, True, True, True, True, True, True, True]
Accuracy: 0.018964 	0
Accuracy: 0.0189672 	1
Accuracy: 0.0189702 	2
Accuracy: 0.0189732 	3
Accuracy: 0.0189761 	4
Accuracy: 0.0189789 	5
Accuracy: 0.0189816 	6
Accuracy: 0.0189842 	7
Accuracy: 0.0189868 	8
Accuracy: 0.0189893 	9
Accuracy: 0.0189917 	10
Accuracy: 0.018994 	11
Accuracy: 0.0189962 	12
Accuracy: 0.0189984 	13
Accuracy: 0.0190004 	14
Accuracy: 0.0190024 	15
Accuracy: 0.0190043 	16
Accuracy: 0.0190062 	17
Accuracy: 0.0190079 	18
Accuracy: 0.0190096 	19
Accuracy: 0.0190111 	20
Accuracy: 0.0190126 	21
Accuracy: 0.019014 	22
Accuracy: 0.0190153 	23
Accuracy: 0.0190166 	24
Accuracy: 0.0190177 	25
Accuracy: 0.0190188 	26
Accuracy: 0.0190197 	27
Accuracy: 0.0190206 	28
Accuracy: 0.0190214 	29
Accuracy: 0.0190222 	30
Accuracy: 0.0190228 	31
Accuracy: 0.0190234 	32
Accuracy: 0.0190238 	33
Accuracy: 0.0190242 	34
Accuracy: 0.0190246 	35
Accuracy: 0.0190248 	36
Accuracy: 0.019025 	37
Accuracy: 0.019025 	38
Accuracy: 0.0190251 	39
Accuracy: 0.019025 	40
Accuracy: 0.0190248 	41
Accuracy: 0.0190246 	42
Accuracy: 0.0190242 	43
Accuracy: 0.0190238 	44
Accuracy: 0.0190232 	45
Accuracy: 0.0190226 	46
Accuracy: 0.0190219 	47
Accuracy: 0.0190211 	48
Accuracy: 0.0190202 	49
Accuracy: 0.0190193 	50
Accuracy: 0.0190182 	51
Accuracy: 0.0190171 	52
Accuracy: 0.0190159 	53
Accuracy: 0.0190146 	54
Accuracy: 0.0190132 	55
Accuracy: 0.0190117 	56
Accuracy: 0.0190101 	57
Accuracy: 0.0190085 	58
Accuracy: 0.0190067 	59
Accuracy: 0.0190049 	60
Accuracy: 0.019003 	61
Accuracy: 0.019001 	62
Accuracy: 0.0189989 	63
Accuracy: 0.0189968 	64
Accuracy: 0.0189945 	65
Accuracy: 0.0189922 	66
Accuracy: 0.0189898 	67
Accuracy: 0.0189873 	68
Accuracy: 0.0189847 	69
Accuracy: 0.018982 	70
Accuracy: 0.0189792 	71
Accuracy: 0.0189764 	72
Accuracy: 0.0189735 	73
Accuracy: 0.0189704 	74
Accuracy: 0.0189673 	75
Accuracy: 0.0189642 	76
Accuracy: 0.0189609 	77
Accuracy: 0.0189576 	78
Accuracy: 0.0189542 	79
Accuracy: 0.0189506 	80
Accuracy: 0.0189471 	81
Accuracy: 0.0189434 	82
Accuracy: 0.0189397 	83
Accuracy: 0.0189358 	84
Accuracy: 0.0189319 	85
Accuracy: 0.0189279 	86
Accuracy: 0.0189238 	87
Accuracy: 0.0189197 	88
Accuracy: 0.0189154 	89
Accuracy: 0.0189111 	90
Accuracy: 0.0189067 	91
Accuracy: 0.0189023 	92
Accuracy: 0.0188977 	93
Accuracy: 0.0188931 	94
Accuracy: 0.0188884 	95
Accuracy: 0.0188836 	96
Accuracy: 0.0188787 	97
Accuracy: 0.0188737 	98
Accuracy: 0.0188686 	99
Accuracy: 0.0177282 	0
Accuracy: 0.0196204 	1
Accuracy: 0.0218728 	2
Accuracy: 0.0244151 	3
Accuracy: 0.0271951 	4
Accuracy: 0.0301369 	5
Accuracy: 0.0331351 	6
Accuracy: 0.036182 	7
Accuracy: 0.0391195 	8
Accuracy: 0.0418626 	9
Accuracy: 0.0444025 	10
Accuracy: 0.0467952 	11
Accuracy: 0.0489007 	12
Accuracy: 0.0505907 	13
Accuracy: 0.0519032 	14
Accuracy: 0.0527099 	15
Accuracy: 0.0530213 	16
Accuracy: 0.0530032 	17
Accuracy: 0.0529768 	18
Accuracy: 0.0529459 	19
Accuracy: 0.0529106 	20
Accuracy: 0.0528709 	21
Accuracy: 0.0528269 	22
Accuracy: 0.052778 	23
Accuracy: 0.0527228 	24
Accuracy: 0.0526549 	25
Accuracy: 0.0525788 	26
Accuracy: 0.0524977 	27
Accuracy: 0.0524073 	28
Accuracy: 0.0523007 	29
Accuracy: 0.0521829 	30
Accuracy: 0.0520536 	31
Accuracy: 0.0519228 	32
Accuracy: 0.0517887 	33
Accuracy: 0.0516488 	34
Accuracy: 0.0515056 	35
Accuracy: 0.0513568 	36
Accuracy: 0.0512015 	37
Accuracy: 0.0510401 	38
Accuracy: 0.0508712 	39
Accuracy: 0.0506997 	40
Accuracy: 0.0505272 	41
Accuracy: 0.0503548 	42
Accuracy: 0.0501818 	43
Accuracy: 0.0500082 	44
Accuracy: 0.0498276 	45
Accuracy: 0.0496382 	46
Accuracy: 0.0494449 	47
Accuracy: 0.0492471 	48
Accuracy: 0.0490489 	49
Accuracy: 0.0488512 	50
Accuracy: 0.048657 	51
Accuracy: 0.048465 	52
Accuracy: 0.048268 	53
Accuracy: 0.0480703 	54
Accuracy: 0.0478701 	55
Accuracy: 0.0476661 	56
Accuracy: 0.0474633 	57
Accuracy: 0.047256 	58
Accuracy: 0.0470528 	59
Accuracy: 0.0468541 	60
Accuracy: 0.0466551 	61
Accuracy: 0.0464589 	62
Accuracy: 0.0462658 	63
Accuracy: 0.0460762 	64
Accuracy: 0.0458885 	65
Accuracy: 0.0457038 	66
Accuracy: 0.0455155 	67
Accuracy: 0.0453235 	68
Accuracy: 0.0451309 	69
Accuracy: 0.0449396 	70
Accuracy: 0.0447482 	71
Accuracy: 0.0445603 	72
Accuracy: 0.0443745 	73
Accuracy: 0.0441884 	74
Accuracy: 0.0440034 	75
Accuracy: 0.0438185 	76
Accuracy: 0.0436359 	77
Accuracy: 0.0434505 	78
Accuracy: 0.0432684 	79
Accuracy: 0.0430858 	80
Accuracy: 0.0429035 	81
Accuracy: 0.0427189 	82
Accuracy: 0.042534 	83
Accuracy: 0.0423471 	84
Accuracy: 0.0421603 	85
Accuracy: 0.0419759 	86
Accuracy: 0.0417903 	87
Accuracy: 0.041603 	88
Accuracy: 0.0414161 	89
Accuracy: 0.0412315 	90
Accuracy: 0.0410448 	91
Accuracy: 0.0408574 	92
Accuracy: 0.0406688 	93
Accuracy: 0.0404842 	94
Accuracy: 0.040303 	95
Accuracy: 0.0401224 	96
Accuracy: 0.0399412 	97
Accuracy: 0.0397581 	98
Accuracy: 0.0395763 	99
[(0.019025052, 39), (0.053021256, 16)]
[10, 9]
Accuracy: 0.0464183
Accuracy: 0.0237074
Accuracy: 0.0187393
Final bead error: 0.0187393
[True, True, True, True, True, True, True, True, True, True, True, True]
Accuracy: 0.0181569 	0
Accuracy: 0.0181573 	1
Accuracy: 0.0181577 	2
Accuracy: 0.0181581 	3
Accuracy: 0.0181585 	4
Accuracy: 0.0181589 	5
Accuracy: 0.0181593 	6
Accuracy: 0.0181597 	7
Accuracy: 0.0181601 	8
Accuracy: 0.0181604 	9
Accuracy: 0.0181608 	10
Accuracy: 0.0181612 	11
Accuracy: 0.0181615 	12
Accuracy: 0.0181619 	13
Accuracy: 0.0181622 	14
Accuracy: 0.0181625 	15
Accuracy: 0.0181628 	16
Accuracy: 0.0181632 	17
Accuracy: 0.0181635 	18
Accuracy: 0.0181638 	19
Accuracy: 0.0181641 	20
Accuracy: 0.0181644 	21
Accuracy: 0.0181646 	22
Accuracy: 0.0181649 	23
Accuracy: 0.0181652 	24
Accuracy: 0.0181654 	25
Accuracy: 0.0181657 	26
Accuracy: 0.018166 	27
Accuracy: 0.0181662 	28
Accuracy: 0.0181664 	29
Accuracy: 0.0181667 	30
Accuracy: 0.0181669 	31
Accuracy: 0.0181671 	32
Accuracy: 0.0181673 	33
Accuracy: 0.0181675 	34
Accuracy: 0.0181677 	35
Accuracy: 0.0181679 	36
Accuracy: 0.0181681 	37
Accuracy: 0.0181683 	38
Accuracy: 0.0181684 	39
Accuracy: 0.0181686 	40
Accuracy: 0.0181688 	41
Accuracy: 0.0181689 	42
Accuracy: 0.0181691 	43
Accuracy: 0.0181692 	44
Accuracy: 0.0181693 	45
Accuracy: 0.0181695 	46
Accuracy: 0.0181696 	47
Accuracy: 0.0181697 	48
Accuracy: 0.0181698 	49
Accuracy: 0.0181699 	50
Accuracy: 0.01817 	51
Accuracy: 0.0181701 	52
Accuracy: 0.0181702 	53
Accuracy: 0.0181702 	54
Accuracy: 0.0181703 	55
Accuracy: 0.0181703 	56
Accuracy: 0.0181704 	57
Accuracy: 0.0181704 	58
Accuracy: 0.0181705 	59
Accuracy: 0.0181705 	60
Accuracy: 0.0181706 	61
Accuracy: 0.0181706 	62
Accuracy: 0.0181706 	63
Accuracy: 0.0181706 	64
Accuracy: 0.0181706 	65
Accuracy: 0.0181706 	66
Accuracy: 0.0181706 	67
Accuracy: 0.0181706 	68
Accuracy: 0.0181706 	69
Accuracy: 0.0181705 	70
Accuracy: 0.0181705 	71
Accuracy: 0.0181705 	72
Accuracy: 0.0181704 	73
Accuracy: 0.0181704 	74
Accuracy: 0.0181703 	75
Accuracy: 0.0181702 	76
Accuracy: 0.0181702 	77
Accuracy: 0.0181701 	78
Accuracy: 0.01817 	79
Accuracy: 0.0181699 	80
Accuracy: 0.0181698 	81
Accuracy: 0.0181697 	82
Accuracy: 0.0181696 	83
Accuracy: 0.0181695 	84
Accuracy: 0.0181693 	85
Accuracy: 0.0181692 	86
Accuracy: 0.0181691 	87
Accuracy: 0.0181689 	88
Accuracy: 0.0181688 	89
Accuracy: 0.0181686 	90
Accuracy: 0.0181685 	91
Accuracy: 0.0181683 	92
Accuracy: 0.0181681 	93
Accuracy: 0.018168 	94
Accuracy: 0.0181678 	95
Accuracy: 0.0181676 	96
Accuracy: 0.0181674 	97
Accuracy: 0.0181672 	98
Accuracy: 0.018167 	99
Accuracy: 0.0194753 	0
Accuracy: 0.0214455 	1
Accuracy: 0.0237353 	2
Accuracy: 0.0263074 	3
Accuracy: 0.0291118 	4
Accuracy: 0.0320002 	5
Accuracy: 0.0349129 	6
Accuracy: 0.0378181 	7
Accuracy: 0.040653 	8
Accuracy: 0.0432108 	9
Accuracy: 0.0455065 	10
Accuracy: 0.0475571 	11
Accuracy: 0.04921 	12
Accuracy: 0.0505691 	13
Accuracy: 0.0515938 	14
Accuracy: 0.0522672 	15
Accuracy: 0.0524708 	16
Accuracy: 0.0524532 	17
Accuracy: 0.0524253 	18
Accuracy: 0.0523873 	19
Accuracy: 0.0523381 	20
Accuracy: 0.0522814 	21
Accuracy: 0.0522118 	22
Accuracy: 0.0521305 	23
Accuracy: 0.0520389 	24
Accuracy: 0.0519387 	25
Accuracy: 0.0518323 	26
Accuracy: 0.0517147 	27
Accuracy: 0.051589 	28
Accuracy: 0.0514512 	29
Accuracy: 0.051307 	30
Accuracy: 0.0511584 	31
Accuracy: 0.0510074 	32
Accuracy: 0.0508516 	33
Accuracy: 0.0506914 	34
Accuracy: 0.050526 	35
Accuracy: 0.0503575 	36
Accuracy: 0.0501894 	37
Accuracy: 0.0500151 	38
Accuracy: 0.0498358 	39
Accuracy: 0.0496546 	40
Accuracy: 0.0494678 	41
Accuracy: 0.0492787 	42
Accuracy: 0.0490825 	43
Accuracy: 0.0488863 	44
Accuracy: 0.0486916 	45
Accuracy: 0.0484986 	46
Accuracy: 0.0483016 	47
Accuracy: 0.0481001 	48
Accuracy: 0.0478977 	49
Accuracy: 0.0476958 	50
Accuracy: 0.047495 	51
Accuracy: 0.0472957 	52
Accuracy: 0.0470982 	53
Accuracy: 0.046901 	54
Accuracy: 0.0467041 	55
Accuracy: 0.0465097 	56
Accuracy: 0.0463175 	57
Accuracy: 0.0461265 	58
Accuracy: 0.0459367 	59
Accuracy: 0.0457481 	60
Accuracy: 0.0455587 	61
Accuracy: 0.0453697 	62
Accuracy: 0.0451807 	63
Accuracy: 0.0449924 	64
Accuracy: 0.0448027 	65
Accuracy: 0.0446134 	66
Accuracy: 0.0444217 	67
Accuracy: 0.044228 	68
Accuracy: 0.0440282 	69
Accuracy: 0.0438251 	70
Accuracy: 0.0436229 	71
Accuracy: 0.0434222 	72
Accuracy: 0.0432213 	73
Accuracy: 0.0430218 	74
Accuracy: 0.0428226 	75
Accuracy: 0.0426241 	76
Accuracy: 0.0424269 	77
Accuracy: 0.0422291 	78
Accuracy: 0.0420319 	79
Accuracy: 0.0418361 	80
Accuracy: 0.0416412 	81
Accuracy: 0.0414477 	82
Accuracy: 0.0412567 	83
Accuracy: 0.0410674 	84
Accuracy: 0.040879 	85
Accuracy: 0.0406933 	86
Accuracy: 0.0405094 	87
Accuracy: 0.0403233 	88
Accuracy: 0.0401367 	89
Accuracy: 0.0399503 	90
Accuracy: 0.0397616 	91
Accuracy: 0.0395707 	92
Accuracy: 0.0393801 	93
Accuracy: 0.0391875 	94
Accuracy: 0.0389951 	95
Accuracy: 0.038806 	96
Accuracy: 0.0386193 	97
Accuracy: 0.0384365 	98
Accuracy: 0.0382572 	99
[(0.01817061, 66), (0.052470807, 16)]
[11, 10]
Accuracy: 0.022526
Accuracy: 0.0200148
Final bead error: 0.0200148
[True, True, True, True, True, True, True, True, True, True, True, True, True]
Accuracy: 0.0191558 	0
Accuracy: 0.0194272 	1
Accuracy: 0.0197014 	2
Accuracy: 0.0199776 	3
Accuracy: 0.0202563 	4
Accuracy: 0.020537 	5
Accuracy: 0.0208198 	6
Accuracy: 0.0210761 	7
Accuracy: 0.0213115 	8
Accuracy: 0.0215393 	9
Accuracy: 0.021752 	10
Accuracy: 0.0219629 	11
Accuracy: 0.0221623 	12
Accuracy: 0.0223472 	13
Accuracy: 0.0225059 	14
Accuracy: 0.0226437 	15
Accuracy: 0.0227734 	16
Accuracy: 0.0228975 	17
Accuracy: 0.0230103 	18
Accuracy: 0.0231151 	19
Accuracy: 0.0231967 	20
Accuracy: 0.0232661 	21
Accuracy: 0.0233295 	22
Accuracy: 0.0233822 	23
Accuracy: 0.0234209 	24
Accuracy: 0.0234526 	25
Accuracy: 0.0234821 	26
Accuracy: 0.023503 	27
Accuracy: 0.023513 	28
Accuracy: 0.0235224 	29
Accuracy: 0.0235126 	30
Accuracy: 0.0234876 	31
Accuracy: 0.0234441 	32
Accuracy: 0.0233943 	33
Accuracy: 0.0233451 	34
Accuracy: 0.0232996 	35
Accuracy: 0.0232464 	36
Accuracy: 0.0231903 	37
Accuracy: 0.0231286 	38
Accuracy: 0.0230628 	39
Accuracy: 0.0229988 	40
Accuracy: 0.0229275 	41
Accuracy: 0.0228444 	42
Accuracy: 0.0227632 	43
Accuracy: 0.0226851 	44
Accuracy: 0.0226059 	45
Accuracy: 0.022529 	46
Accuracy: 0.0224431 	47
Accuracy: 0.0223586 	48
Accuracy: 0.0222726 	49
Accuracy: 0.0221889 	50
Accuracy: 0.0221066 	51
Accuracy: 0.0220271 	52
Accuracy: 0.0219419 	53
Accuracy: 0.0218575 	54
Accuracy: 0.0217751 	55
Accuracy: 0.0216878 	56
Accuracy: 0.0215901 	57
Accuracy: 0.0214969 	58
Accuracy: 0.0214051 	59
Accuracy: 0.0213158 	60
Accuracy: 0.0212203 	61
Accuracy: 0.0211156 	62
Accuracy: 0.021019 	63
Accuracy: 0.0209335 	64
Accuracy: 0.0208444 	65
Accuracy: 0.0207466 	66
Accuracy: 0.020638 	67
Accuracy: 0.0205208 	68
Accuracy: 0.0204029 	69
Accuracy: 0.0202849 	70
Accuracy: 0.0201758 	71
Accuracy: 0.0200691 	72
Accuracy: 0.0199686 	73
Accuracy: 0.0198727 	74
Accuracy: 0.0197744 	75
Accuracy: 0.0196747 	76
Accuracy: 0.0195735 	77
Accuracy: 0.0194814 	78
Accuracy: 0.0193952 	79
Accuracy: 0.0193108 	80
Accuracy: 0.0192344 	81
Accuracy: 0.0191591 	82
Accuracy: 0.0190824 	83
Accuracy: 0.0190134 	84
Accuracy: 0.018952 	85
Accuracy: 0.018894 	86
Accuracy: 0.0188423 	87
Accuracy: 0.018805 	88
Accuracy: 0.0187812 	89
Accuracy: 0.0187638 	90
Accuracy: 0.0187603 	91
Accuracy: 0.0187686 	92
Accuracy: 0.0187927 	93
Accuracy: 0.0188334 	94
Accuracy: 0.0188848 	95
Accuracy: 0.0189461 	96
Accuracy: 0.0190135 	97
Accuracy: 0.0190993 	98
Accuracy: 0.0192016 	99
Accuracy: 0.0192149 	0
Accuracy: 0.0200096 	1
Accuracy: 0.0207131 	2
Accuracy: 0.0212813 	3
Accuracy: 0.0216854 	4
Accuracy: 0.0218927 	5
Accuracy: 0.0220752 	6
Accuracy: 0.0222652 	7
Accuracy: 0.0224574 	8
Accuracy: 0.0226553 	9
Accuracy: 0.0228497 	10
Accuracy: 0.0230452 	11
Accuracy: 0.023236 	12
Accuracy: 0.0234219 	13
Accuracy: 0.0236032 	14
Accuracy: 0.0237798 	15
Accuracy: 0.0239529 	16
Accuracy: 0.0241229 	17
Accuracy: 0.0242901 	18
Accuracy: 0.0244539 	19
Accuracy: 0.02461 	20
Accuracy: 0.0247633 	21
Accuracy: 0.0249128 	22
Accuracy: 0.0250567 	23
Accuracy: 0.0251925 	24
Accuracy: 0.025325 	25
Accuracy: 0.0254566 	26
Accuracy: 0.0255858 	27
Accuracy: 0.0257144 	28
Accuracy: 0.0258444 	29
Accuracy: 0.0259709 	30
Accuracy: 0.026094 	31
Accuracy: 0.026216 	32
Accuracy: 0.0263348 	33
Accuracy: 0.0264476 	34
Accuracy: 0.0265591 	35
Accuracy: 0.0266723 	36
Accuracy: 0.0267862 	37
Accuracy: 0.026904 	38
Accuracy: 0.0270246 	39
Accuracy: 0.0271429 	40
Accuracy: 0.0272666 	41
Accuracy: 0.0273937 	42
Accuracy: 0.0275196 	43
Accuracy: 0.027652 	44
Accuracy: 0.0277873 	45
Accuracy: 0.027925 	46
Accuracy: 0.0280547 	47
Accuracy: 0.028184 	48
Accuracy: 0.0283141 	49
Accuracy: 0.0284467 	50
Accuracy: 0.0285817 	51
Accuracy: 0.0287204 	52
Accuracy: 0.0288626 	53
Accuracy: 0.0290106 	54
Accuracy: 0.0291616 	55
Accuracy: 0.0293151 	56
Accuracy: 0.0294733 	57
Accuracy: 0.0296364 	58
Accuracy: 0.0298001 	59
Accuracy: 0.0299587 	60
Accuracy: 0.0301162 	61
Accuracy: 0.0302722 	62
Accuracy: 0.0304275 	63
Accuracy: 0.0305816 	64
Accuracy: 0.0307434 	65
Accuracy: 0.030913 	66
Accuracy: 0.0310869 	67
Accuracy: 0.0312633 	68
Accuracy: 0.031444 	69
Accuracy: 0.03163 	70
Accuracy: 0.031812 	71
Accuracy: 0.0319936 	72
Accuracy: 0.0321752 	73
Accuracy: 0.0323581 	74
Accuracy: 0.0325451 	75
Accuracy: 0.0327363 	76
Accuracy: 0.0329286 	77
Accuracy: 0.0331253 	78
Accuracy: 0.0333305 	79
Accuracy: 0.0335399 	80
Accuracy: 0.0337531 	81
Accuracy: 0.0339706 	82
Accuracy: 0.0341939 	83
Accuracy: 0.0344211 	84
Accuracy: 0.0346566 	85
Accuracy: 0.0348988 	86
Accuracy: 0.0351448 	87
Accuracy: 0.0353998 	88
Accuracy: 0.035659 	89
Accuracy: 0.0359239 	90
Accuracy: 0.0361913 	91
Accuracy: 0.0364475 	92
Accuracy: 0.0367064 	93
Accuracy: 0.0369727 	94
Accuracy: 0.0372367 	95
Accuracy: 0.0375039 	96
Accuracy: 0.0377767 	97
Accuracy: 0.0380567 	98
Accuracy: 0.0383339 	99
[(0.023522442, 29), (0.038333878, 99)]
1
2
Thresh: 0.04
Comps: 1
***
15.5982346591
0.0689477678388

In [258]:
print len(models)


3

In [261]:
connecteddict[2]


Out[261]:
'not connected'

In [293]:
models[0].params


Out[293]:
([array([[-1.4998107 , -0.23091862,  0.1918727 ,  0.20560187]], dtype=float32),
  array([[-1.17867899,  0.78579026, -0.89930582, -1.24247086],
         [-0.86114722, -0.1602481 , -0.86737674, -1.13779354],
         [ 0.72442532, -0.57136399,  0.78054297, -0.60431153],
         [-0.08741271, -0.26723036,  0.54410225,  0.19356832]], dtype=float32),
  array([[ 0.61557311],
         [ 0.35026345],
         [ 0.14537683],
         [-0.67227656]], dtype=float32)],
 [array([-0.07781173,  0.05719442, -2.12461662, -1.48168921], dtype=float32),
  array([-1.76856709, -0.59562606,  3.31021976, -1.39359915], dtype=float32),
  array([-0.24812987], dtype=float32)])

In [294]:
models[1].params


Out[294]:
([array([[-1.00434339,  0.85087883, -0.4409067 , -0.07457321]], dtype=float32),
  array([[ 0.75883377, -0.20007004,  0.41354284,  0.29582518],
         [ 0.00292377,  1.28025246,  0.57766509,  1.37554467],
         [-1.0842402 ,  1.0441618 , -0.43416622,  0.22762316],
         [-2.10338879, -1.36171842, -0.22631967,  0.58631068]], dtype=float32),
  array([[-0.11565071],
         [ 0.33658049],
         [-1.14827919],
         [ 0.68962663]], dtype=float32)],
 [array([ 0.73745489, -0.77259034, -1.86909771,  1.6973933 ], dtype=float32),
  array([ 1.3530215 , -1.10458112,  0.21234779, -1.07358325], dtype=float32),
  array([ 0.20686743], dtype=float32)])

In [331]:
len(test.AllBeads)


Out[331]:
13

In [335]:
#for b in xrange(len(test.AllBeads)-1):
#    e = InterpBeadError(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1])
#for b in xrange(len(test.AllBeads)):
#    print test.AllBeads[b][0]

xdat,ydat = generatecandidate4(.5, .25, .1, 1000)
xdat = np.array(xdat)
ydat = np.array(ydat)

def InterpBeadError_SameSet(w1,b1, w2,b2,xdat, ydat, write = False, name = "00"):
    errors = []
    

    
    
    
    
    for tt in xrange(100):
        #print tt
        #accuracy = 0.
        t = tt/100.
        thiserror = 0

        #x0 = tf.placeholder("float", [None, n_input])
        #y0 = tf.placeholder("float", [None, n_classes])
        weights, biases = model_interpolate(w1,b1,w2,b2, t)
        interp_model = multilayer_perceptron(w=weights, b=biases)
        
        with interp_model.g.as_default():
            
            #interp_model.UpdateWeights(weights, biases)


            x = tf.placeholder("float", [None, n_input])
            y = tf.placeholder("float", [None, n_classes])
            pred = interp_model.predict(x)
            init = tf.initialize_all_variables()


            with tf.Session() as sess:
                sess.run(init)
                correct_prediction = tf.reduce_mean(tf.square(pred-y))
                accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
                print "Accuracy:", accuracy.eval({x: xdat, y: ydat}),"\t",tt
                thiserror = accuracy.eval({x: xdat, y: ydat})


        errors.append(thiserror)

    if write == True:
        with open("f" + str(name) + ".out",'w+') as f:
            for e in errors:
                f.write(str(e) + "\n")
    
    return max(errors), np.argmax(errors)


for b in xrange(len(test.AllBeads)-1):
    e = InterpBeadError_SameSet(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1], xdat, ydat)


Accuracy: 0.0417373 	0
Accuracy: 0.0417649 	1
Accuracy: 0.0417958 	2
Accuracy: 0.0418295 	3
Accuracy: 0.0418655 	4
Accuracy: 0.0419032 	5
Accuracy: 0.0419423 	6
Accuracy: 0.0419823 	7
Accuracy: 0.0420228 	8
Accuracy: 0.0420632 	9
Accuracy: 0.0421033 	10
Accuracy: 0.0421425 	11
Accuracy: 0.0421806 	12
Accuracy: 0.042217 	13
Accuracy: 0.0422514 	14
Accuracy: 0.0422835 	15
Accuracy: 0.0423129 	16
Accuracy: 0.0423392 	17
Accuracy: 0.0423621 	18
Accuracy: 0.0423813 	19
Accuracy: 0.0423964 	20
Accuracy: 0.0424072 	21
Accuracy: 0.0424134 	22
Accuracy: 0.0424146 	23
Accuracy: 0.0424107 	24
Accuracy: 0.0424014 	25
Accuracy: 0.0423864 	26
Accuracy: 0.0423654 	27
Accuracy: 0.0423383 	28
Accuracy: 0.0423049 	29
Accuracy: 0.0422649 	30
Accuracy: 0.0422182 	31
Accuracy: 0.0421646 	32
Accuracy: 0.042104 	33
Accuracy: 0.0420361 	34
Accuracy: 0.0419609 	35
Accuracy: 0.0418782 	36
Accuracy: 0.0417879 	37
Accuracy: 0.0416899 	38
Accuracy: 0.0415841 	39
Accuracy: 0.0414705 	40
Accuracy: 0.041349 	41
Accuracy: 0.0412195 	42
Accuracy: 0.041082 	43
Accuracy: 0.0409365 	44
Accuracy: 0.040783 	45
Accuracy: 0.0406215 	46
Accuracy: 0.040452 	47
Accuracy: 0.0402744 	48
Accuracy: 0.040089 	49
Accuracy: 0.0398957 	50
Accuracy: 0.0396946 	51
Accuracy: 0.0394857 	52
Accuracy: 0.0392693 	53
Accuracy: 0.0390453 	54
Accuracy: 0.038814 	55
Accuracy: 0.0385754 	56
Accuracy: 0.0383298 	57
Accuracy: 0.0380772 	58
Accuracy: 0.0378179 	59
Accuracy: 0.037552 	60
Accuracy: 0.0372799 	61
Accuracy: 0.0370016 	62
Accuracy: 0.0367175 	63
Accuracy: 0.0364278 	64
Accuracy: 0.0361328 	65
Accuracy: 0.0358327 	66
Accuracy: 0.0355279 	67
Accuracy: 0.0352187 	68
Accuracy: 0.0349053 	69
Accuracy: 0.0345882 	70
Accuracy: 0.0342678 	71
Accuracy: 0.0339443 	72
Accuracy: 0.0336182 	73
Accuracy: 0.0332899 	74
Accuracy: 0.0329598 	75
Accuracy: 0.0326283 	76
Accuracy: 0.032296 	77
Accuracy: 0.0319632 	78
Accuracy: 0.0316305 	79
Accuracy: 0.0312983 	80
Accuracy: 0.0309673 	81
Accuracy: 0.0306378 	82
Accuracy: 0.0302642 	83
Accuracy: 0.0297479 	84
Accuracy: 0.0291486 	85
Accuracy: 0.0285088 	86
Accuracy: 0.0278548 	87
Accuracy: 0.0272019 	88
Accuracy: 0.0265603 	89
Accuracy: 0.0259373 	90
Accuracy: 0.0253339 	91
Accuracy: 0.0247518 	92
Accuracy: 0.0241901 	93
Accuracy: 0.0236501 	94
Accuracy: 0.0231299 	95
Accuracy: 0.022629 	96
Accuracy: 0.0221506 	97
Accuracy: 0.0216911 	98
Accuracy: 0.0212481 	99
Accuracy: 0.0208234 	0
Accuracy: 0.020939 	1
Accuracy: 0.0210577 	2
Accuracy: 0.0211794 	3
Accuracy: 0.0213038 	4
Accuracy: 0.0214307 	5
Accuracy: 0.0215601 	6
Accuracy: 0.0216915 	7
Accuracy: 0.0218248 	8
Accuracy: 0.0219598 	9
Accuracy: 0.0220963 	10
Accuracy: 0.0222341 	11
Accuracy: 0.0223729 	12
Accuracy: 0.0225125 	13
Accuracy: 0.0226527 	14
Accuracy: 0.0227932 	15
Accuracy: 0.0229339 	16
Accuracy: 0.0230746 	17
Accuracy: 0.0232149 	18
Accuracy: 0.0233546 	19
Accuracy: 0.0234936 	20
Accuracy: 0.0236315 	21
Accuracy: 0.0237682 	22
Accuracy: 0.0239035 	23
Accuracy: 0.0240371 	24
Accuracy: 0.0241687 	25
Accuracy: 0.0242982 	26
Accuracy: 0.0244253 	27
Accuracy: 0.0245499 	28
Accuracy: 0.0246716 	29
Accuracy: 0.0247902 	30
Accuracy: 0.0249057 	31
Accuracy: 0.0250176 	32
Accuracy: 0.0251259 	33
Accuracy: 0.0252302 	34
Accuracy: 0.0253305 	35
Accuracy: 0.0254265 	36
Accuracy: 0.0255181 	37
Accuracy: 0.0256049 	38
Accuracy: 0.0256869 	39
Accuracy: 0.0257639 	40
Accuracy: 0.0258356 	41
Accuracy: 0.0259019 	42
Accuracy: 0.0259627 	43
Accuracy: 0.0260178 	44
Accuracy: 0.026067 	45
Accuracy: 0.0261103 	46
Accuracy: 0.0261473 	47
Accuracy: 0.0261781 	48
Accuracy: 0.0262025 	49
Accuracy: 0.0262205 	50
Accuracy: 0.0262318 	51
Accuracy: 0.0262364 	52
Accuracy: 0.0262342 	53
Accuracy: 0.0262252 	54
Accuracy: 0.0262092 	55
Accuracy: 0.0261863 	56
Accuracy: 0.0261564 	57
Accuracy: 0.0261194 	58
Accuracy: 0.0260753 	59
Accuracy: 0.0260241 	60
Accuracy: 0.0259659 	61
Accuracy: 0.0259006 	62
Accuracy: 0.0258283 	63
Accuracy: 0.025749 	64
Accuracy: 0.0256628 	65
Accuracy: 0.0255697 	66
Accuracy: 0.0254699 	67
Accuracy: 0.0253634 	68
Accuracy: 0.0252504 	69
Accuracy: 0.025131 	70
Accuracy: 0.0250053 	71
Accuracy: 0.0248736 	72
Accuracy: 0.0247359 	73
Accuracy: 0.0245926 	74
Accuracy: 0.0244438 	75
Accuracy: 0.0242898 	76
Accuracy: 0.0241308 	77
Accuracy: 0.0239672 	78
Accuracy: 0.0237991 	79
Accuracy: 0.023627 	80
Accuracy: 0.0234513 	81
Accuracy: 0.0232721 	82
Accuracy: 0.02309 	83
Accuracy: 0.0229054 	84
Accuracy: 0.0227186 	85
Accuracy: 0.0225302 	86
Accuracy: 0.0223406 	87
Accuracy: 0.0221504 	88
Accuracy: 0.02196 	89
Accuracy: 0.0217703 	90
Accuracy: 0.021593 	91
Accuracy: 0.0214287 	92
Accuracy: 0.0212835 	93
Accuracy: 0.0211502 	94
Accuracy: 0.021031 	95
Accuracy: 0.0209271 	96
Accuracy: 0.0208377 	97
Accuracy: 0.0207599 	98
Accuracy: 0.0206996 	99
Accuracy: 0.0206582 	0
Accuracy: 0.0206239 	1
Accuracy: 0.0205912 	2
Accuracy: 0.0205598 	3
Accuracy: 0.0205292 	4
Accuracy: 0.0205007 	5
Accuracy: 0.0204735 	6
Accuracy: 0.0204476 	7
Accuracy: 0.020423 	8
Accuracy: 0.0203993 	9
Accuracy: 0.020376 	10
Accuracy: 0.0203534 	11
Accuracy: 0.0203323 	12
Accuracy: 0.0203127 	13
Accuracy: 0.0202942 	14
Accuracy: 0.0202767 	15
Accuracy: 0.0202596 	16
Accuracy: 0.0202437 	17
Accuracy: 0.0202286 	18
Accuracy: 0.0202139 	19
Accuracy: 0.0201999 	20
Accuracy: 0.0201867 	21
Accuracy: 0.020174 	22
Accuracy: 0.0201625 	23
Accuracy: 0.0201516 	24
Accuracy: 0.0201414 	25
Accuracy: 0.0201316 	26
Accuracy: 0.0201221 	27
Accuracy: 0.0201133 	28
Accuracy: 0.0201053 	29
Accuracy: 0.0200981 	30
Accuracy: 0.0200914 	31
Accuracy: 0.0200852 	32
Accuracy: 0.0200802 	33
Accuracy: 0.0200761 	34
Accuracy: 0.0200729 	35
Accuracy: 0.0200708 	36
Accuracy: 0.0200697 	37
Accuracy: 0.0200691 	38
Accuracy: 0.0200686 	39
Accuracy: 0.0200684 	40
Accuracy: 0.0200691 	41
Accuracy: 0.0200709 	42
Accuracy: 0.0200735 	43
Accuracy: 0.0200769 	44
Accuracy: 0.020081 	45
Accuracy: 0.0200853 	46
Accuracy: 0.0200909 	47
Accuracy: 0.0200969 	48
Accuracy: 0.0201037 	49
Accuracy: 0.0201112 	50
Accuracy: 0.0201191 	51
Accuracy: 0.0201276 	52
Accuracy: 0.0201365 	53
Accuracy: 0.0201459 	54
Accuracy: 0.020156 	55
Accuracy: 0.0201665 	56
Accuracy: 0.0201776 	57
Accuracy: 0.0201894 	58
Accuracy: 0.0202019 	59
Accuracy: 0.0202151 	60
Accuracy: 0.020229 	61
Accuracy: 0.0202436 	62
Accuracy: 0.0202588 	63
Accuracy: 0.0202745 	64
Accuracy: 0.0202906 	65
Accuracy: 0.0203071 	66
Accuracy: 0.0203245 	67
Accuracy: 0.0203427 	68
Accuracy: 0.020362 	69
Accuracy: 0.0203819 	70
Accuracy: 0.0204024 	71
Accuracy: 0.020424 	72
Accuracy: 0.020446 	73
Accuracy: 0.0204685 	74
Accuracy: 0.0204918 	75
Accuracy: 0.0205157 	76
Accuracy: 0.0205405 	77
Accuracy: 0.0205659 	78
Accuracy: 0.0205918 	79
Accuracy: 0.0206183 	80
Accuracy: 0.0206458 	81
Accuracy: 0.0206741 	82
Accuracy: 0.0207032 	83
Accuracy: 0.020733 	84
Accuracy: 0.0207636 	85
Accuracy: 0.0207949 	86
Accuracy: 0.0208273 	87
Accuracy: 0.0208607 	88
Accuracy: 0.020895 	89
Accuracy: 0.02093 	90
Accuracy: 0.0209657 	91
Accuracy: 0.0210022 	92
Accuracy: 0.0210394 	93
Accuracy: 0.0210774 	94
Accuracy: 0.0211161 	95
Accuracy: 0.0211555 	96
Accuracy: 0.0211959 	97
Accuracy: 0.0212372 	98
Accuracy: 0.0212796 	99
Accuracy: 0.0213422 	0
Accuracy: 0.0213334 	1
Accuracy: 0.0213246 	2
Accuracy: 0.0213159 	3
Accuracy: 0.0213073 	4
Accuracy: 0.0212988 	5
Accuracy: 0.0212903 	6
Accuracy: 0.021282 	7
Accuracy: 0.0212736 	8
Accuracy: 0.0212654 	9
Accuracy: 0.0212572 	10
Accuracy: 0.0212491 	11
Accuracy: 0.0212411 	12
Accuracy: 0.0212331 	13
Accuracy: 0.0212252 	14
Accuracy: 0.0212174 	15
Accuracy: 0.0212096 	16
Accuracy: 0.021202 	17
Accuracy: 0.0211943 	18
Accuracy: 0.0211868 	19
Accuracy: 0.0211794 	20
Accuracy: 0.021172 	21
Accuracy: 0.0211647 	22
Accuracy: 0.0211575 	23
Accuracy: 0.0211503 	24
Accuracy: 0.0211432 	25
Accuracy: 0.0211361 	26
Accuracy: 0.0211291 	27
Accuracy: 0.0211222 	28
Accuracy: 0.0211153 	29
Accuracy: 0.0211085 	30
Accuracy: 0.0211018 	31
Accuracy: 0.0210951 	32
Accuracy: 0.0210885 	33
Accuracy: 0.021082 	34
Accuracy: 0.0210755 	35
Accuracy: 0.0210691 	36
Accuracy: 0.0210628 	37
Accuracy: 0.0210566 	38
Accuracy: 0.0210504 	39
Accuracy: 0.0210444 	40
Accuracy: 0.0210388 	41
Accuracy: 0.0210334 	42
Accuracy: 0.021028 	43
Accuracy: 0.0210227 	44
Accuracy: 0.0210176 	45
Accuracy: 0.0210125 	46
Accuracy: 0.0210075 	47
Accuracy: 0.0210025 	48
Accuracy: 0.0209976 	49
Accuracy: 0.0209928 	50
Accuracy: 0.020988 	51
Accuracy: 0.0209833 	52
Accuracy: 0.0209787 	53
Accuracy: 0.0209742 	54
Accuracy: 0.0209697 	55
Accuracy: 0.0209652 	56
Accuracy: 0.0209608 	57
Accuracy: 0.0209565 	58
Accuracy: 0.0209522 	59
Accuracy: 0.020948 	60
Accuracy: 0.0209438 	61
Accuracy: 0.0209397 	62
Accuracy: 0.0209357 	63
Accuracy: 0.0209317 	64
Accuracy: 0.0209278 	65
Accuracy: 0.0209239 	66
Accuracy: 0.0209201 	67
Accuracy: 0.0209163 	68
Accuracy: 0.0209126 	69
Accuracy: 0.0209089 	70
Accuracy: 0.0209054 	71
Accuracy: 0.0209018 	72
Accuracy: 0.0208983 	73
Accuracy: 0.0208949 	74
Accuracy: 0.0208916 	75
Accuracy: 0.0208883 	76
Accuracy: 0.0208851 	77
Accuracy: 0.0208819 	78
Accuracy: 0.0208788 	79
Accuracy: 0.0208758 	80
Accuracy: 0.0208727 	81
Accuracy: 0.0208698 	82
Accuracy: 0.0208669 	83
Accuracy: 0.0208641 	84
Accuracy: 0.0208613 	85
Accuracy: 0.0208586 	86
Accuracy: 0.0208559 	87
Accuracy: 0.0208533 	88
Accuracy: 0.0208508 	89
Accuracy: 0.0208483 	90
Accuracy: 0.0208459 	91
Accuracy: 0.0208435 	92
Accuracy: 0.0208412 	93
Accuracy: 0.0208389 	94
Accuracy: 0.0208367 	95
Accuracy: 0.0208345 	96
Accuracy: 0.0208324 	97
Accuracy: 0.0208304 	98
Accuracy: 0.0208284 	99
Accuracy: 0.0208265 	0
Accuracy: 0.0208142 	1
Accuracy: 0.0208018 	2
Accuracy: 0.0207895 	3
Accuracy: 0.0207772 	4
Accuracy: 0.020765 	5
Accuracy: 0.0207527 	6
Accuracy: 0.0207405 	7
Accuracy: 0.0207283 	8
Accuracy: 0.0207161 	9
Accuracy: 0.0207039 	10
Accuracy: 0.0206917 	11
Accuracy: 0.0206795 	12
Accuracy: 0.0206673 	13
Accuracy: 0.0206552 	14
Accuracy: 0.020643 	15
Accuracy: 0.0206325 	16
Accuracy: 0.0206222 	17
Accuracy: 0.0206119 	18
Accuracy: 0.0206017 	19
Accuracy: 0.0205915 	20
Accuracy: 0.0205812 	21
Accuracy: 0.020571 	22
Accuracy: 0.0205608 	23
Accuracy: 0.0205506 	24
Accuracy: 0.0205404 	25
Accuracy: 0.0205302 	26
Accuracy: 0.02052 	27
Accuracy: 0.0205098 	28
Accuracy: 0.0204997 	29
Accuracy: 0.0204895 	30
Accuracy: 0.0204794 	31
Accuracy: 0.0204692 	32
Accuracy: 0.0204591 	33
Accuracy: 0.020449 	34
Accuracy: 0.0204389 	35
Accuracy: 0.0204287 	36
Accuracy: 0.0204186 	37
Accuracy: 0.0204086 	38
Accuracy: 0.0203985 	39
Accuracy: 0.0203884 	40
Accuracy: 0.0203783 	41
Accuracy: 0.0203683 	42
Accuracy: 0.0203582 	43
Accuracy: 0.0203481 	44
Accuracy: 0.0203381 	45
Accuracy: 0.0203281 	46
Accuracy: 0.020318 	47
Accuracy: 0.020308 	48
Accuracy: 0.020298 	49
Accuracy: 0.020288 	50
Accuracy: 0.020278 	51
Accuracy: 0.020268 	52
Accuracy: 0.0202581 	53
Accuracy: 0.0202481 	54
Accuracy: 0.0202382 	55
Accuracy: 0.0202282 	56
Accuracy: 0.0202183 	57
Accuracy: 0.0202084 	58
Accuracy: 0.0201985 	59
Accuracy: 0.0201886 	60
Accuracy: 0.0201787 	61
Accuracy: 0.0201688 	62
Accuracy: 0.0201589 	63
Accuracy: 0.020149 	64
Accuracy: 0.0201391 	65
Accuracy: 0.0201293 	66
Accuracy: 0.0201194 	67
Accuracy: 0.0201096 	68
Accuracy: 0.0200997 	69
Accuracy: 0.0200899 	70
Accuracy: 0.02008 	71
Accuracy: 0.0200702 	72
Accuracy: 0.0200604 	73
Accuracy: 0.0200506 	74
Accuracy: 0.0200408 	75
Accuracy: 0.020031 	76
Accuracy: 0.0200212 	77
Accuracy: 0.0200115 	78
Accuracy: 0.0200017 	79
Accuracy: 0.0199919 	80
Accuracy: 0.0199822 	81
Accuracy: 0.0199725 	82
Accuracy: 0.0199627 	83
Accuracy: 0.019953 	84
Accuracy: 0.0199433 	85
Accuracy: 0.0199336 	86
Accuracy: 0.0199239 	87
Accuracy: 0.0199143 	88
Accuracy: 0.0199046 	89
Accuracy: 0.0198949 	90
Accuracy: 0.0198853 	91
Accuracy: 0.0198756 	92
Accuracy: 0.019866 	93
Accuracy: 0.0198563 	94
Accuracy: 0.0198467 	95
Accuracy: 0.0198371 	96
Accuracy: 0.0198275 	97
Accuracy: 0.0198179 	98
Accuracy: 0.0198083 	99
Accuracy: 0.0197987 	0
Accuracy: 0.0197993 	1
Accuracy: 0.0197999 	2
Accuracy: 0.0198005 	3
Accuracy: 0.0198011 	4
Accuracy: 0.0198017 	5
Accuracy: 0.0198024 	6
Accuracy: 0.019803 	7
Accuracy: 0.0198036 	8
Accuracy: 0.0198043 	9
Accuracy: 0.0198049 	10
Accuracy: 0.0198056 	11
Accuracy: 0.0198062 	12
Accuracy: 0.0198068 	13
Accuracy: 0.0198075 	14
Accuracy: 0.0198081 	15
Accuracy: 0.0198088 	16
Accuracy: 0.0198094 	17
Accuracy: 0.0198101 	18
Accuracy: 0.0198107 	19
Accuracy: 0.0198114 	20
Accuracy: 0.019812 	21
Accuracy: 0.0198127 	22
Accuracy: 0.0198134 	23
Accuracy: 0.019814 	24
Accuracy: 0.0198147 	25
Accuracy: 0.0198153 	26
Accuracy: 0.019816 	27
Accuracy: 0.0198166 	28
Accuracy: 0.0198173 	29
Accuracy: 0.0198179 	30
Accuracy: 0.0198186 	31
Accuracy: 0.0198193 	32
Accuracy: 0.01982 	33
Accuracy: 0.0198207 	34
Accuracy: 0.0198213 	35
Accuracy: 0.019822 	36
Accuracy: 0.0198227 	37
Accuracy: 0.0198233 	38
Accuracy: 0.019824 	39
Accuracy: 0.0198247 	40
Accuracy: 0.0198255 	41
Accuracy: 0.0198262 	42
Accuracy: 0.019827 	43
Accuracy: 0.0198278 	44
Accuracy: 0.0198286 	45
Accuracy: 0.0198294 	46
Accuracy: 0.0198302 	47
Accuracy: 0.019831 	48
Accuracy: 0.0198319 	49
Accuracy: 0.0198327 	50
Accuracy: 0.0198336 	51
Accuracy: 0.0198344 	52
Accuracy: 0.0198353 	53
Accuracy: 0.0198361 	54
Accuracy: 0.019837 	55
Accuracy: 0.0198379 	56
Accuracy: 0.0198388 	57
Accuracy: 0.0198397 	58
Accuracy: 0.0198406 	59
Accuracy: 0.0198415 	60
Accuracy: 0.0198424 	61
Accuracy: 0.0198433 	62
Accuracy: 0.0198443 	63
Accuracy: 0.0198452 	64
Accuracy: 0.0198461 	65
Accuracy: 0.019847 	66
Accuracy: 0.0198479 	67
Accuracy: 0.0198489 	68
Accuracy: 0.0198498 	69
Accuracy: 0.0198508 	70
Accuracy: 0.0198517 	71
Accuracy: 0.0198527 	72
Accuracy: 0.0198536 	73
Accuracy: 0.0198546 	74
Accuracy: 0.0198555 	75
Accuracy: 0.0198564 	76
Accuracy: 0.0198574 	77
Accuracy: 0.0198583 	78
Accuracy: 0.0198593 	79
Accuracy: 0.0198602 	80
Accuracy: 0.0198611 	81
Accuracy: 0.0198621 	82
Accuracy: 0.019863 	83
Accuracy: 0.0198639 	84
Accuracy: 0.0198648 	85
Accuracy: 0.0198658 	86
Accuracy: 0.0198667 	87
Accuracy: 0.0198676 	88
Accuracy: 0.0198685 	89
Accuracy: 0.0198694 	90
Accuracy: 0.0198704 	91
Accuracy: 0.0198713 	92
Accuracy: 0.0198723 	93
Accuracy: 0.0198732 	94
Accuracy: 0.0198742 	95
Accuracy: 0.0198752 	96
Accuracy: 0.0198763 	97
Accuracy: 0.0198774 	98
Accuracy: 0.0198785 	99
Accuracy: 0.0198797 	0
Accuracy: 0.0198789 	1
Accuracy: 0.019878 	2
Accuracy: 0.0198772 	3
Accuracy: 0.0198764 	4
Accuracy: 0.0198756 	5
Accuracy: 0.0198747 	6
Accuracy: 0.0198739 	7
Accuracy: 0.0198731 	8
Accuracy: 0.0198723 	9
Accuracy: 0.0198715 	10
Accuracy: 0.0198707 	11
Accuracy: 0.0198699 	12
Accuracy: 0.019869 	13
Accuracy: 0.0198682 	14
Accuracy: 0.0198674 	15
Accuracy: 0.0198665 	16
Accuracy: 0.0198657 	17
Accuracy: 0.0198648 	18
Accuracy: 0.019864 	19
Accuracy: 0.0198631 	20
Accuracy: 0.0198622 	21
Accuracy: 0.0198614 	22
Accuracy: 0.0198605 	23
Accuracy: 0.0198596 	24
Accuracy: 0.0198588 	25
Accuracy: 0.0198579 	26
Accuracy: 0.019857 	27
Accuracy: 0.0198561 	28
Accuracy: 0.0198552 	29
Accuracy: 0.0198544 	30
Accuracy: 0.0198535 	31
Accuracy: 0.0198526 	32
Accuracy: 0.0198517 	33
Accuracy: 0.0198509 	34
Accuracy: 0.01985 	35
Accuracy: 0.0198491 	36
Accuracy: 0.0198482 	37
Accuracy: 0.0198473 	38
Accuracy: 0.0198464 	39
Accuracy: 0.0198455 	40
Accuracy: 0.0198446 	41
Accuracy: 0.0198437 	42
Accuracy: 0.0198428 	43
Accuracy: 0.0198419 	44
Accuracy: 0.0198409 	45
Accuracy: 0.01984 	46
Accuracy: 0.0198391 	47
Accuracy: 0.0198381 	48
Accuracy: 0.0198372 	49
Accuracy: 0.0198363 	50
Accuracy: 0.0198353 	51
Accuracy: 0.0198344 	52
Accuracy: 0.0198334 	53
Accuracy: 0.0198325 	54
Accuracy: 0.0198316 	55
Accuracy: 0.0198307 	56
Accuracy: 0.0198298 	57
Accuracy: 0.0198289 	58
Accuracy: 0.019828 	59
Accuracy: 0.0198271 	60
Accuracy: 0.0198262 	61
Accuracy: 0.0198252 	62
Accuracy: 0.0198243 	63
Accuracy: 0.0198234 	64
Accuracy: 0.0198225 	65
Accuracy: 0.0198216 	66
Accuracy: 0.0198207 	67
Accuracy: 0.0198199 	68
Accuracy: 0.019819 	69
Accuracy: 0.0198181 	70
Accuracy: 0.0198172 	71
Accuracy: 0.0198164 	72
Accuracy: 0.0198155 	73
Accuracy: 0.0198146 	74
Accuracy: 0.0198137 	75
Accuracy: 0.0198128 	76
Accuracy: 0.0198119 	77
Accuracy: 0.0198109 	78
Accuracy: 0.01981 	79
Accuracy: 0.0198091 	80
Accuracy: 0.0198082 	81
Accuracy: 0.0198072 	82
Accuracy: 0.0198063 	83
Accuracy: 0.0198053 	84
Accuracy: 0.0198044 	85
Accuracy: 0.0198034 	86
Accuracy: 0.0198025 	87
Accuracy: 0.0198016 	88
Accuracy: 0.0198006 	89
Accuracy: 0.0197997 	90
Accuracy: 0.0197988 	91
Accuracy: 0.0197979 	92
Accuracy: 0.019797 	93
Accuracy: 0.0197961 	94
Accuracy: 0.0197952 	95
Accuracy: 0.0197942 	96
Accuracy: 0.0197933 	97
Accuracy: 0.0197924 	98
Accuracy: 0.0197916 	99
Accuracy: 0.0197907 	0
Accuracy: 0.0197939 	1
Accuracy: 0.0197972 	2
Accuracy: 0.0198005 	3
Accuracy: 0.0198039 	4
Accuracy: 0.0198075 	5
Accuracy: 0.019811 	6
Accuracy: 0.0198146 	7
Accuracy: 0.0198182 	8
Accuracy: 0.0198217 	9
Accuracy: 0.0198252 	10
Accuracy: 0.0198288 	11
Accuracy: 0.0198323 	12
Accuracy: 0.0198359 	13
Accuracy: 0.0198395 	14
Accuracy: 0.019843 	15
Accuracy: 0.0198466 	16
Accuracy: 0.0198503 	17
Accuracy: 0.0198541 	18
Accuracy: 0.019858 	19
Accuracy: 0.0198619 	20
Accuracy: 0.0198658 	21
Accuracy: 0.0198697 	22
Accuracy: 0.0198736 	23
Accuracy: 0.0198776 	24
Accuracy: 0.0198815 	25
Accuracy: 0.0198854 	26
Accuracy: 0.0198893 	27
Accuracy: 0.0198932 	28
Accuracy: 0.0198972 	29
Accuracy: 0.0199011 	30
Accuracy: 0.0199051 	31
Accuracy: 0.0199092 	32
Accuracy: 0.0199134 	33
Accuracy: 0.0199175 	34
Accuracy: 0.0199217 	35
Accuracy: 0.0199258 	36
Accuracy: 0.0199299 	37
Accuracy: 0.019934 	38
Accuracy: 0.0199382 	39
Accuracy: 0.0199423 	40
Accuracy: 0.0199465 	41
Accuracy: 0.0199507 	42
Accuracy: 0.0199548 	43
Accuracy: 0.019959 	44
Accuracy: 0.0199632 	45
Accuracy: 0.0199673 	46
Accuracy: 0.0199715 	47
Accuracy: 0.0199756 	48
Accuracy: 0.0199797 	49
Accuracy: 0.0199839 	50
Accuracy: 0.019988 	51
Accuracy: 0.0199922 	52
Accuracy: 0.0199966 	53
Accuracy: 0.0200009 	54
Accuracy: 0.0200053 	55
Accuracy: 0.0200096 	56
Accuracy: 0.020014 	57
Accuracy: 0.0200184 	58
Accuracy: 0.0200228 	59
Accuracy: 0.0200272 	60
Accuracy: 0.0200317 	61
Accuracy: 0.0200362 	62
Accuracy: 0.0200407 	63
Accuracy: 0.0200453 	64
Accuracy: 0.0200501 	65
Accuracy: 0.0200549 	66
Accuracy: 0.0200597 	67
Accuracy: 0.0200645 	68
Accuracy: 0.0200693 	69
Accuracy: 0.0200741 	70
Accuracy: 0.020079 	71
Accuracy: 0.0200839 	72
Accuracy: 0.0200888 	73
Accuracy: 0.0200937 	74
Accuracy: 0.0200986 	75
Accuracy: 0.0201036 	76
Accuracy: 0.0201085 	77
Accuracy: 0.0201134 	78
Accuracy: 0.0201183 	79
Accuracy: 0.0201233 	80
Accuracy: 0.0201282 	81
Accuracy: 0.0201331 	82
Accuracy: 0.020138 	83
Accuracy: 0.0201429 	84
Accuracy: 0.0201478 	85
Accuracy: 0.0201527 	86
Accuracy: 0.0201576 	87
Accuracy: 0.0201625 	88
Accuracy: 0.0201674 	89
Accuracy: 0.0201723 	90
Accuracy: 0.0201771 	91
Accuracy: 0.020182 	92
Accuracy: 0.0201869 	93
Accuracy: 0.0201917 	94
Accuracy: 0.0201966 	95
Accuracy: 0.0202014 	96
Accuracy: 0.0202063 	97
Accuracy: 0.0202111 	98
Accuracy: 0.0202159 	99
Accuracy: 0.0202208 	0
Accuracy: 0.0202238 	1
Accuracy: 0.0202267 	2
Accuracy: 0.0202295 	3
Accuracy: 0.0202322 	4
Accuracy: 0.0202348 	5
Accuracy: 0.0202373 	6
Accuracy: 0.0202397 	7
Accuracy: 0.0202421 	8
Accuracy: 0.0202444 	9
Accuracy: 0.0202465 	10
Accuracy: 0.0202486 	11
Accuracy: 0.0202506 	12
Accuracy: 0.0202526 	13
Accuracy: 0.0202544 	14
Accuracy: 0.0202561 	15
Accuracy: 0.0202578 	16
Accuracy: 0.0202593 	17
Accuracy: 0.0202608 	18
Accuracy: 0.0202622 	19
Accuracy: 0.0202635 	20
Accuracy: 0.0202647 	21
Accuracy: 0.0202658 	22
Accuracy: 0.0202668 	23
Accuracy: 0.0202677 	24
Accuracy: 0.0202686 	25
Accuracy: 0.0202693 	26
Accuracy: 0.02027 	27
Accuracy: 0.0202706 	28
Accuracy: 0.0202711 	29
Accuracy: 0.0202715 	30
Accuracy: 0.0202718 	31
Accuracy: 0.020272 	32
Accuracy: 0.0202722 	33
Accuracy: 0.0202723 	34
Accuracy: 0.0202722 	35
Accuracy: 0.0202721 	36
Accuracy: 0.0202719 	37
Accuracy: 0.0202716 	38
Accuracy: 0.0202713 	39
Accuracy: 0.0202708 	40
Accuracy: 0.0202703 	41
Accuracy: 0.0202696 	42
Accuracy: 0.0202689 	43
Accuracy: 0.020268 	44
Accuracy: 0.020267 	45
Accuracy: 0.0202659 	46
Accuracy: 0.0202648 	47
Accuracy: 0.0202635 	48
Accuracy: 0.0202622 	49
Accuracy: 0.0202607 	50
Accuracy: 0.0202592 	51
Accuracy: 0.0202576 	52
Accuracy: 0.0202559 	53
Accuracy: 0.0202541 	54
Accuracy: 0.0202523 	55
Accuracy: 0.0202503 	56
Accuracy: 0.0202483 	57
Accuracy: 0.0202461 	58
Accuracy: 0.0202439 	59
Accuracy: 0.0202416 	60
Accuracy: 0.0202392 	61
Accuracy: 0.0202367 	62
Accuracy: 0.0202342 	63
Accuracy: 0.0202315 	64
Accuracy: 0.0202287 	65
Accuracy: 0.0202259 	66
Accuracy: 0.020223 	67
Accuracy: 0.02022 	68
Accuracy: 0.0202168 	69
Accuracy: 0.0202136 	70
Accuracy: 0.0202103 	71
Accuracy: 0.0202069 	72
Accuracy: 0.0202034 	73
Accuracy: 0.0201998 	74
Accuracy: 0.0201961 	75
Accuracy: 0.0201924 	76
Accuracy: 0.0201885 	77
Accuracy: 0.0201846 	78
Accuracy: 0.0201805 	79
Accuracy: 0.0201764 	80
Accuracy: 0.0201722 	81
Accuracy: 0.0201679 	82
Accuracy: 0.0201635 	83
Accuracy: 0.020159 	84
Accuracy: 0.0201544 	85
Accuracy: 0.0201497 	86
Accuracy: 0.020145 	87
Accuracy: 0.0201402 	88
Accuracy: 0.0201352 	89
Accuracy: 0.0201302 	90
Accuracy: 0.0201251 	91
Accuracy: 0.02012 	92
Accuracy: 0.0201146 	93
Accuracy: 0.0201093 	94
Accuracy: 0.0201038 	95
Accuracy: 0.0200982 	96
Accuracy: 0.0200926 	97
Accuracy: 0.0200868 	98
Accuracy: 0.0200809 	99
Accuracy: 0.020075 	0
Accuracy: 0.0200753 	1
Accuracy: 0.0200757 	2
Accuracy: 0.020076 	3
Accuracy: 0.0200763 	4
Accuracy: 0.0200767 	5
Accuracy: 0.020077 	6
Accuracy: 0.0200773 	7
Accuracy: 0.0200776 	8
Accuracy: 0.0200779 	9
Accuracy: 0.0200782 	10
Accuracy: 0.0200784 	11
Accuracy: 0.0200787 	12
Accuracy: 0.020079 	13
Accuracy: 0.0200793 	14
Accuracy: 0.0200795 	15
Accuracy: 0.0200798 	16
Accuracy: 0.02008 	17
Accuracy: 0.0200803 	18
Accuracy: 0.0200805 	19
Accuracy: 0.0200807 	20
Accuracy: 0.0200809 	21
Accuracy: 0.0200812 	22
Accuracy: 0.0200814 	23
Accuracy: 0.0200816 	24
Accuracy: 0.0200818 	25
Accuracy: 0.020082 	26
Accuracy: 0.0200822 	27
Accuracy: 0.0200823 	28
Accuracy: 0.0200825 	29
Accuracy: 0.0200827 	30
Accuracy: 0.0200829 	31
Accuracy: 0.020083 	32
Accuracy: 0.0200832 	33
Accuracy: 0.0200833 	34
Accuracy: 0.0200835 	35
Accuracy: 0.0200836 	36
Accuracy: 0.0200837 	37
Accuracy: 0.0200839 	38
Accuracy: 0.020084 	39
Accuracy: 0.0200841 	40
Accuracy: 0.0200842 	41
Accuracy: 0.0200843 	42
Accuracy: 0.0200844 	43
Accuracy: 0.0200845 	44
Accuracy: 0.0200846 	45
Accuracy: 0.0200847 	46
Accuracy: 0.0200848 	47
Accuracy: 0.0200848 	48
Accuracy: 0.0200849 	49
Accuracy: 0.020085 	50
Accuracy: 0.020085 	51
Accuracy: 0.0200851 	52
Accuracy: 0.0200851 	53
Accuracy: 0.0200851 	54
Accuracy: 0.0200852 	55
Accuracy: 0.0200852 	56
Accuracy: 0.0200852 	57
Accuracy: 0.0200852 	58
Accuracy: 0.0200852 	59
Accuracy: 0.0200853 	60
Accuracy: 0.0200853 	61
Accuracy: 0.0200852 	62
Accuracy: 0.0200852 	63
Accuracy: 0.0200852 	64
Accuracy: 0.0200852 	65
Accuracy: 0.0200852 	66
Accuracy: 0.0200851 	67
Accuracy: 0.0200851 	68
Accuracy: 0.020085 	69
Accuracy: 0.020085 	70
Accuracy: 0.0200849 	71
Accuracy: 0.0200849 	72
Accuracy: 0.0200848 	73
Accuracy: 0.0200847 	74
Accuracy: 0.0200847 	75
Accuracy: 0.0200846 	76
Accuracy: 0.0200845 	77
Accuracy: 0.0200844 	78
Accuracy: 0.0200843 	79
Accuracy: 0.0200842 	80
Accuracy: 0.0200841 	81
Accuracy: 0.0200839 	82
Accuracy: 0.0200838 	83
Accuracy: 0.0200837 	84
Accuracy: 0.0200835 	85
Accuracy: 0.0200834 	86
Accuracy: 0.0200833 	87
Accuracy: 0.0200831 	88
Accuracy: 0.0200829 	89
Accuracy: 0.0200828 	90
Accuracy: 0.0200826 	91
Accuracy: 0.0200824 	92
Accuracy: 0.0200822 	93
Accuracy: 0.0200821 	94
Accuracy: 0.0200819 	95
Accuracy: 0.0200817 	96
Accuracy: 0.0200815 	97
Accuracy: 0.0200813 	98
Accuracy: 0.020081 	99
Accuracy: 0.0200808 	0
Accuracy: 0.0203569 	1
Accuracy: 0.0206356 	2
Accuracy: 0.0209164 	3
Accuracy: 0.0211997 	4
Accuracy: 0.0214849 	5
Accuracy: 0.0217685 	6
Accuracy: 0.0220339 	7
Accuracy: 0.0222709 	8
Accuracy: 0.0224911 	9
Accuracy: 0.022706 	10
Accuracy: 0.0229052 	11
Accuracy: 0.0230859 	12
Accuracy: 0.0232484 	13
Accuracy: 0.0233763 	14
Accuracy: 0.0234959 	15
Accuracy: 0.0236023 	16
Accuracy: 0.023705 	17
Accuracy: 0.0238066 	18
Accuracy: 0.0238928 	19
Accuracy: 0.0239619 	20
Accuracy: 0.0240316 	21
Accuracy: 0.0240893 	22
Accuracy: 0.024125 	23
Accuracy: 0.0241582 	24
Accuracy: 0.0241862 	25
Accuracy: 0.024201 	26
Accuracy: 0.0242114 	27
Accuracy: 0.0242118 	28
Accuracy: 0.0242116 	29
Accuracy: 0.0242131 	30
Accuracy: 0.0242146 	31
Accuracy: 0.0241987 	32
Accuracy: 0.024165 	33
Accuracy: 0.0241322 	34
Accuracy: 0.0240934 	35
Accuracy: 0.0240355 	36
Accuracy: 0.0239618 	37
Accuracy: 0.0238864 	38
Accuracy: 0.0238169 	39
Accuracy: 0.023738 	40
Accuracy: 0.0236492 	41
Accuracy: 0.0235559 	42
Accuracy: 0.0234521 	43
Accuracy: 0.0233298 	44
Accuracy: 0.0231992 	45
Accuracy: 0.0230626 	46
Accuracy: 0.0229212 	47
Accuracy: 0.0227835 	48
Accuracy: 0.0226523 	49
Accuracy: 0.0225167 	50
Accuracy: 0.0223724 	51
Accuracy: 0.0222229 	52
Accuracy: 0.0220651 	53
Accuracy: 0.0219167 	54
Accuracy: 0.0217686 	55
Accuracy: 0.0216151 	56
Accuracy: 0.0214671 	57
Accuracy: 0.021325 	58
Accuracy: 0.0211846 	59
Accuracy: 0.0210485 	60
Accuracy: 0.0209184 	61
Accuracy: 0.0207854 	62
Accuracy: 0.0206561 	63
Accuracy: 0.0205331 	64
Accuracy: 0.0204177 	65
Accuracy: 0.020314 	66
Accuracy: 0.0202136 	67
Accuracy: 0.0201161 	68
Accuracy: 0.0200194 	69
Accuracy: 0.0199337 	70
Accuracy: 0.0198545 	71
Accuracy: 0.0197815 	72
Accuracy: 0.0197047 	73
Accuracy: 0.0196339 	74
Accuracy: 0.0195597 	75
Accuracy: 0.0194849 	76
Accuracy: 0.0194102 	77
Accuracy: 0.0193459 	78
Accuracy: 0.0192963 	79
Accuracy: 0.0192578 	80
Accuracy: 0.0192233 	81
Accuracy: 0.0191901 	82
Accuracy: 0.0191636 	83
Accuracy: 0.0191395 	84
Accuracy: 0.0191188 	85
Accuracy: 0.0191088 	86
Accuracy: 0.0191169 	87
Accuracy: 0.0191236 	88
Accuracy: 0.0191427 	89
Accuracy: 0.0191686 	90
Accuracy: 0.0192007 	91
Accuracy: 0.0192401 	92
Accuracy: 0.0192949 	93
Accuracy: 0.0193592 	94
Accuracy: 0.0194373 	95
Accuracy: 0.0195292 	96
Accuracy: 0.0196291 	97
Accuracy: 0.0197447 	98
Accuracy: 0.0198762 	99
Accuracy: 0.0200153 	0
Accuracy: 0.0207024 	1
Accuracy: 0.0213059 	2
Accuracy: 0.0217698 	3
Accuracy: 0.0220383 	4
Accuracy: 0.0222046 	5
Accuracy: 0.0223598 	6
Accuracy: 0.0225158 	7
Accuracy: 0.0226773 	8
Accuracy: 0.0228451 	9
Accuracy: 0.0230167 	10
Accuracy: 0.0231897 	11
Accuracy: 0.0233637 	12
Accuracy: 0.0235379 	13
Accuracy: 0.0237143 	14
Accuracy: 0.0238869 	15
Accuracy: 0.0240567 	16
Accuracy: 0.0242232 	17
Accuracy: 0.0243852 	18
Accuracy: 0.0245452 	19
Accuracy: 0.0247036 	20
Accuracy: 0.0248531 	21
Accuracy: 0.0249961 	22
Accuracy: 0.0251396 	23
Accuracy: 0.0252832 	24
Accuracy: 0.0254231 	25
Accuracy: 0.025559 	26
Accuracy: 0.0256893 	27
Accuracy: 0.0258134 	28
Accuracy: 0.0259358 	29
Accuracy: 0.026053 	30
Accuracy: 0.0261646 	31
Accuracy: 0.026273 	32
Accuracy: 0.0263786 	33
Accuracy: 0.0264854 	34
Accuracy: 0.0265961 	35
Accuracy: 0.0267127 	36
Accuracy: 0.0268361 	37
Accuracy: 0.0269602 	38
Accuracy: 0.0270798 	39
Accuracy: 0.0271998 	40
Accuracy: 0.0273172 	41
Accuracy: 0.0274336 	42
Accuracy: 0.0275477 	43
Accuracy: 0.0276664 	44
Accuracy: 0.0277849 	45
Accuracy: 0.0278976 	46
Accuracy: 0.02801 	47
Accuracy: 0.0281245 	48
Accuracy: 0.0282444 	49
Accuracy: 0.0283646 	50
Accuracy: 0.0284921 	51
Accuracy: 0.0286216 	52
Accuracy: 0.0287513 	53
Accuracy: 0.0288851 	54
Accuracy: 0.0290199 	55
Accuracy: 0.0291578 	56
Accuracy: 0.0292997 	57
Accuracy: 0.0294495 	58
Accuracy: 0.0296061 	59
Accuracy: 0.0297685 	60
Accuracy: 0.0299371 	61
Accuracy: 0.0301057 	62
Accuracy: 0.0302801 	63
Accuracy: 0.0304595 	64
Accuracy: 0.0306355 	65
Accuracy: 0.0308164 	66
Accuracy: 0.031005 	67
Accuracy: 0.0311976 	68
Accuracy: 0.031395 	69
Accuracy: 0.0315953 	70
Accuracy: 0.0318017 	71
Accuracy: 0.0320168 	72
Accuracy: 0.0322377 	73
Accuracy: 0.0324648 	74
Accuracy: 0.0326934 	75
Accuracy: 0.0329222 	76
Accuracy: 0.0331553 	77
Accuracy: 0.0333987 	78
Accuracy: 0.0336513 	79
Accuracy: 0.0339044 	80
Accuracy: 0.0341596 	81
Accuracy: 0.0344184 	82
Accuracy: 0.0346755 	83
Accuracy: 0.0349338 	84
Accuracy: 0.0351925 	85
Accuracy: 0.0354541 	86
Accuracy: 0.0357217 	87
Accuracy: 0.0359995 	88
Accuracy: 0.0362857 	89
Accuracy: 0.0365801 	90
Accuracy: 0.0368773 	91
Accuracy: 0.0371775 	92
Accuracy: 0.0374828 	93
Accuracy: 0.0377883 	94
Accuracy: 0.0380957 	95
Accuracy: 0.03841 	96
Accuracy: 0.0387152 	97
Accuracy: 0.039016 	98
Accuracy: 0.0393312 	99

In [ ]: