In [41]:
#Imports and model parameters
import tensorflow as tf
import numpy as np
#Simple network: Given three integers a,b,c, [-100,100] chooses three random x-values, and evaluates
#the quadratic function a*x^2 + b*x + c at those values.
import copy
alpha,hidden_dim,hidden_dim2 = (.001,4,4)
thresh = .03
# Parameters
learning_rate = 0.0005
training_epochs = 15
batch_size = 2000
display_step = 1
# Network Parameters
n_hidden_1 = 4 # 1st layer number of features
n_hidden_2 = 4 # 2nd layer number of features
n_input = 1 # Guess quadratic function
n_classes = 1 #
#synapses = []
models = []
#Testing starting in the same place
#synapse0 = 2*np.random.random((1,hidden_dim)) - 1
#synapse1 = 2*np.random.random((hidden_dim,hidden_dim2)) - 1
#synapse2 = 2*np.random.random((hidden_dim2,1)) - 1
#copy_model = multilayer_perceptron(ind=0)
In [59]:
#Function definitions
def func(x,a,b,c):
return x*x*a + x*b + c
def generatecandidate4(a,b,c,tot):
candidate = [[np.random.random() for x in xrange(1)] for y in xrange(tot)]
candidatesolutions = [[func(x[0],a,b,c)] for x in candidate]
return (candidate, candidatesolutions)
def synapse_interpolate(synapse1, synapse2, t):
return (synapse2-synapse1)*t + synapse1
def model_interpolate(w1,b1,w2,b2,t):
m1w = w1
m1b = b1
m2w = w2
m2b = b2
mwi = [synapse_interpolate(m1we,m2we,t) for m1we, m2we in zip(m1w,m2w)]
mbi = [synapse_interpolate(m1be,m2be,t) for m1be, m2be in zip(m1b,m2b)]
return mwi, mbi
def InterpBeadError(w1,b1, w2,b2, write = False, name = "00"):
errors = []
xdat,ydat = generatecandidate4(.5, .25, .1, 1000)
xdat = np.array(xdat)
ydat = np.array(ydat)
for tt in xrange(100):
#print tt
#accuracy = 0.
t = tt/100.
thiserror = 0
#x0 = tf.placeholder("float", [None, n_input])
#y0 = tf.placeholder("float", [None, n_classes])
weights, biases = model_interpolate(w1,b1,w2,b2, t)
interp_model = multilayer_perceptron(w=weights, b=biases)
with interp_model.g.as_default():
#interp_model.UpdateWeights(weights, biases)
x = tf.placeholder("float", [None, n_input])
y = tf.placeholder("float", [None, n_classes])
pred = interp_model.predict(x)
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
correct_prediction = tf.reduce_mean(tf.square(pred-y))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print "Accuracy:", accuracy.eval({x: xdat, y: ydat}),"\t",tt,weights[0][0][0],weights[0][0][1]
thiserror = accuracy.eval({x: xdat, y: ydat})
errors.append(thiserror)
if write == True:
with open("f" + str(name) + ".out",'w+') as f:
for e in errors:
f.write(str(e) + "\n")
return max(errors), np.argmax(errors)
In [43]:
#Class definitions
class multilayer_perceptron():
#weights = {}
#biases = {}
def __init__(self, w=0, b=0, ind='00'):
self.index = ind #used for reading values from file
#See the filesystem convention below (is this really necessary?)
#I'm going to eschew writing to file for now because I'll be generating too many files
#Currently, the last value of the parameters is stored in self.params to be read
learning_rate = 0.01
training_epochs = 15
batch_size = 1000
display_step = 1
# Network Parameters
n_hidden_1 = 4 # 1st layer number of features
n_hidden_2 = 4 # 2nd layer number of features
n_input = 1 # Guess quadratic function
n_classes = 1 #
self.g = tf.Graph()
self.params = []
with self.g.as_default():
#Note that by default, weights and biases will be initialized to random normal dists
if w==0:
self.weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
else:
self.weights = {
'h1': tf.Variable(w[0]),
'h2': tf.Variable(w[1]),
'out': tf.Variable(w[2])
}
self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(b[0]),
'b2': tf.Variable(b[1]),
'out': tf.Variable(b[2])
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
self.saver = tf.train.Saver()
def UpdateWeights(self, w, b):
with self.g.as_default():
self.weights = {
'h1': tf.Variable(w[0]),
'h2': tf.Variable(w[1]),
'out': tf.Variable(w[2])
}
self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(b[0]),
'b2': tf.Variable(b[1]),
'out': tf.Variable(b[2])
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
def predict(self, x):
with self.g.as_default():
layer_1 = tf.add(tf.matmul(x, self.weights['h1']), self.biases['b1'])
layer_1 = tf.nn.relu(layer_1)
# Hidden layer with RELU activation
layer_2 = tf.add(tf.matmul(layer_1, self.weights['h2']), self.biases['b2'])
layer_2 = tf.nn.relu(layer_2)
# Output layer with linear activation
out_layer = tf.matmul(layer_2, self.weights['out']) + self.biases['out']
return out_layer
def ReturnParamsAsList(self):
with self.g.as_default():
with tf.Session() as sess:
# Restore variables from disk
self.saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model"+str(self.index)+".ckpt")
return sess.run(self.weightslist), sess.run(self.biaseslist)
class WeightString:
def __init__(self, w1, b1, w2, b2, numbeads, threshold):
self.w1 = w1
self.w2 = w2
self.b1 = b1
self.b2 = b2
#self.w2, self.b2 = m2.params
self.AllBeads = []
self.threshold = threshold
self.AllBeads.append([w1,b1])
for n in xrange(numbeads):
ws,bs = model_interpolate(w1,b1,w2,b2, (n + 1.)/(numbeads+1.))
self.AllBeads.append([ws,bs])
self.AllBeads.append([w2,b2])
self.ConvergedList = [False for f in xrange(len(self.AllBeads))]
self.ConvergedList[0] = True
self.ConvergedList[-1] = True
def SpringNorm(self, order):
total = 0.
#Energy between mobile beads
for i,b in enumerate(self.AllBeads):
if i < len(self.AllBeads)-1:
#print "Tallying energy between bead " + str(i) + " and bead " + str(i+1)
subtotal = 0.
for j in xrange(len(b)):
subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][0][j],self.AllBeads[i+1][0][j]),ord=order)#/len(self.beads[0][j])
for j in xrange(len(b)):
subtotal += np.linalg.norm(np.subtract(self.AllBeads[i][1][j],self.AllBeads[i+1][1][j]),ord=order)#/len(self.beads[0][j])
total+=subtotal
return total#/len(self.beads)
def SGDBead(self, bead, thresh, maxindex):
finalerror = 0.
#thresh = .05
# Parameters
learning_rate = 0.01
training_epochs = 15
batch_size = 1000
display_step = 1
curWeights, curBiases = self.AllBeads[bead]
test_model = multilayer_perceptron(w=curWeights, b=curBiases)
with test_model.g.as_default():
x = tf.placeholder("float", [None, n_input])
y = tf.placeholder("float", [None, n_classes])
pred = test_model.predict(x)
cost = tf.reduce_mean(tf.square(pred-y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
init = tf.initialize_all_variables()
stopcond = True
with tf.Session() as sess:
sess.run(init)
xtest, ytest = generatecandidate4(.5,.25,.1,1000)
j = 0
while stopcond:
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(10000/batch_size)
if (avg_cost > thresh or avg_cost == 0.) and stopcond:
# Loop over all batches
for i in range(total_batch):
batch_x, batch_y = generatecandidate4(.5,.25,.1,batch_size)
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
y: batch_y})
# Compute average loss
avg_cost += c / total_batch
# Display logs per epoch step
#if epoch % display_step == 0:
# print "Epoch:", '%04d' % (epoch+1), "cost=", \
# "{:.9f}".format(avg_cost)
if avg_cost < thresh:
stopcond = False
#print "Optimization Finished!"
# Test model
#correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
correct_prediction = tf.reduce_mean(tf.square(pred-y))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print "Accuracy:", accuracy.eval({x: xtest, y: ytest})
#if (j%5000) == 0:
# print "Error after "+str(j)+" iterations:" + str(accuracy.eval({x: xtest, y: ytest}))
finalerror = accuracy.eval({x: xtest, y: ytest})
if finalerror < thresh or stopcond==False:# or j > maxindex:
#print "Changing stopcond!"
stopcond = False
#print "Final params:"
test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
self.AllBeads[bead]=test_model.params
print "Final bead error: " + str(finalerror)
j+=1
return finalerror
In [44]:
#Model generation
for ii in xrange(3):
'''weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}'''
# Construct model with different initial weights
test_model = multilayer_perceptron(ind=ii)
#Construct model with same initial weights
#test_model = copy.copy(copy_model)
#test_model.index = ii
#print test_model.weights
models.append(test_model)
with test_model.g.as_default():
x = tf.placeholder("float", [None, n_input])
y = tf.placeholder("float", [None, n_classes])
pred = test_model.predict(x)
# Define loss and optimizer
#cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
cost = tf.reduce_mean(tf.square(pred-y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
# Initializing the variables
init = tf.initialize_all_variables()
#remove the comment to get random initialization
stopcond = True
with tf.Session() as sess:
sess.run(init)
xtest, ytest = generatecandidate4(.5,.25,.1,1000)
while stopcond:
#print 'epoch:' + str(e)
#X = []
#y = []
j = 0
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(10000/batch_size)
if (avg_cost > thresh or avg_cost == 0.) and stopcond:
# Loop over all batches
for i in range(total_batch):
batch_x, batch_y = generatecandidate4(.5,.25,.1,batch_size)
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
y: batch_y})
# Compute average loss
avg_cost += c / total_batch
# Display logs per epoch step
if epoch % display_step == 0:
print "Epoch:", '%04d' % (epoch+1), "cost=", \
"{:.9f}".format(avg_cost)
if avg_cost < thresh:
stopcond = False
#test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
#save_path = test_model.saver.save(sess,"/home/dfreeman/PythonFun/tmp/model" + str(ii) + ".ckpt")
print "Optimization Finished!"
# Test model
#correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
correct_prediction = tf.reduce_mean(tf.square(pred-y))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print "Accuracy:", accuracy.eval({x: xtest, y: ytest})
if (j%5000) == 0:
print "Error after "+str(j)+" iterations:" + str(accuracy.eval({x: xtest, y: ytest}))
if accuracy.eval({x: xtest, y: ytest}) < thresh or stopcond == False:
#print "Changing stopcond!"
stopcond = False
print "Final params:"
test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
save_path = test_model.saver.save(sess,"/home/dfreeman/PythonFun/tmp/model" + str(ii) + ".ckpt")
j+=1
#remove the comment to get random initialization
#synapses.append([synapse_0,synapse_1,synapse_2
Epoch: 0001 cost= 0.268766576
Epoch: 0002 cost= 0.247447196
Epoch: 0003 cost= 0.231380257
Epoch: 0004 cost= 0.215012711
Epoch: 0005 cost= 0.201905853
Epoch: 0006 cost= 0.191673702
Epoch: 0007 cost= 0.177289271
Epoch: 0008 cost= 0.164127609
Epoch: 0009 cost= 0.157833308
Epoch: 0010 cost= 0.144638336
Epoch: 0011 cost= 0.140677395
Epoch: 0012 cost= 0.133864456
Epoch: 0013 cost= 0.127730995
Epoch: 0014 cost= 0.118851471
Epoch: 0015 cost= 0.111467114
Optimization Finished!
Accuracy: 0.116371
Error after 0 iterations:0.116371
Epoch: 0001 cost= 0.108530270
Epoch: 0002 cost= 0.103610612
Epoch: 0003 cost= 0.097405253
Epoch: 0004 cost= 0.094024596
Epoch: 0005 cost= 0.093340334
Epoch: 0006 cost= 0.088003223
Epoch: 0007 cost= 0.085662736
Epoch: 0008 cost= 0.082675809
Epoch: 0009 cost= 0.078721796
Epoch: 0010 cost= 0.076755011
Epoch: 0011 cost= 0.074962015
Epoch: 0012 cost= 0.073527814
Epoch: 0013 cost= 0.071860832
Epoch: 0014 cost= 0.071171053
Epoch: 0015 cost= 0.069432078
Optimization Finished!
Accuracy: 0.0720659
Error after 0 iterations:0.0720659
Epoch: 0001 cost= 0.068111829
Epoch: 0002 cost= 0.065833879
Epoch: 0003 cost= 0.064144029
Epoch: 0004 cost= 0.063975209
Epoch: 0005 cost= 0.062338471
Epoch: 0006 cost= 0.062348612
Epoch: 0007 cost= 0.061061291
Epoch: 0008 cost= 0.060648439
Epoch: 0009 cost= 0.060553673
Epoch: 0010 cost= 0.060807389
Epoch: 0011 cost= 0.058186249
Epoch: 0012 cost= 0.056882928
Epoch: 0013 cost= 0.058762704
Epoch: 0014 cost= 0.055880670
Epoch: 0015 cost= 0.056201088
Optimization Finished!
Accuracy: 0.0586148
Error after 0 iterations:0.0586148
Epoch: 0001 cost= 0.055328757
Epoch: 0002 cost= 0.054269627
Epoch: 0003 cost= 0.053509530
Epoch: 0004 cost= 0.054387462
Epoch: 0005 cost= 0.053372941
Epoch: 0006 cost= 0.053191369
Epoch: 0007 cost= 0.052677624
Epoch: 0008 cost= 0.052284450
Epoch: 0009 cost= 0.052285809
Epoch: 0010 cost= 0.051524123
Epoch: 0011 cost= 0.051916703
Epoch: 0012 cost= 0.049732319
Epoch: 0013 cost= 0.050014976
Epoch: 0014 cost= 0.052170128
Epoch: 0015 cost= 0.051498075
Optimization Finished!
Accuracy: 0.0529583
Error after 0 iterations:0.0529583
Epoch: 0001 cost= 0.050587058
Epoch: 0002 cost= 0.050166617
Epoch: 0003 cost= 0.048989912
Epoch: 0004 cost= 0.049623383
Epoch: 0005 cost= 0.049638755
Epoch: 0006 cost= 0.049672737
Epoch: 0007 cost= 0.048246326
Epoch: 0008 cost= 0.048316506
Epoch: 0009 cost= 0.048389547
Epoch: 0010 cost= 0.048693530
Epoch: 0011 cost= 0.048748256
Epoch: 0012 cost= 0.047338461
Epoch: 0013 cost= 0.048498962
Epoch: 0014 cost= 0.047443738
Epoch: 0015 cost= 0.046762998
Optimization Finished!
Accuracy: 0.0492356
Error after 0 iterations:0.0492356
Epoch: 0001 cost= 0.047116067
Epoch: 0002 cost= 0.047172682
Epoch: 0003 cost= 0.046057059
Epoch: 0004 cost= 0.046111906
Epoch: 0005 cost= 0.045710032
Epoch: 0006 cost= 0.046000168
Epoch: 0007 cost= 0.045531407
Epoch: 0008 cost= 0.045432775
Epoch: 0009 cost= 0.045454446
Epoch: 0010 cost= 0.043961347
Epoch: 0011 cost= 0.045129541
Epoch: 0012 cost= 0.045020971
Epoch: 0013 cost= 0.044415821
Epoch: 0014 cost= 0.044140649
Epoch: 0015 cost= 0.044298166
Optimization Finished!
Accuracy: 0.0461721
Error after 0 iterations:0.0461721
Epoch: 0001 cost= 0.044352701
Epoch: 0002 cost= 0.044014304
Epoch: 0003 cost= 0.043313056
Epoch: 0004 cost= 0.042851434
Epoch: 0005 cost= 0.043504942
Epoch: 0006 cost= 0.043543687
Epoch: 0007 cost= 0.043619787
Epoch: 0008 cost= 0.042892807
Epoch: 0009 cost= 0.043030921
Epoch: 0010 cost= 0.042539345
Epoch: 0011 cost= 0.042659108
Epoch: 0012 cost= 0.041961554
Epoch: 0013 cost= 0.042019218
Epoch: 0014 cost= 0.041404714
Epoch: 0015 cost= 0.042039464
Optimization Finished!
Accuracy: 0.0434011
Error after 0 iterations:0.0434011
Epoch: 0001 cost= 0.041004475
Epoch: 0002 cost= 0.041322179
Epoch: 0003 cost= 0.041880667
Epoch: 0004 cost= 0.041927729
Epoch: 0005 cost= 0.041055504
Epoch: 0006 cost= 0.041168907
Epoch: 0007 cost= 0.040588091
Epoch: 0008 cost= 0.040658668
Epoch: 0009 cost= 0.040443034
Epoch: 0010 cost= 0.039904846
Epoch: 0011 cost= 0.039756011
Epoch: 0012 cost= 0.039389238
Epoch: 0013 cost= 0.039364843
Epoch: 0014 cost= 0.039190865
Epoch: 0015 cost= 0.038655581
Optimization Finished!
Accuracy: 0.040828
Error after 0 iterations:0.040828
Epoch: 0001 cost= 0.039202049
Epoch: 0002 cost= 0.038811350
Epoch: 0003 cost= 0.039095000
Epoch: 0004 cost= 0.038905483
Epoch: 0005 cost= 0.038412438
Epoch: 0006 cost= 0.038811413
Epoch: 0007 cost= 0.038372810
Epoch: 0008 cost= 0.037580005
Epoch: 0009 cost= 0.037547339
Epoch: 0010 cost= 0.036990172
Epoch: 0011 cost= 0.037374827
Epoch: 0012 cost= 0.037655064
Epoch: 0013 cost= 0.037214465
Epoch: 0014 cost= 0.036333268
Epoch: 0015 cost= 0.036954226
Optimization Finished!
Accuracy: 0.0384154
Error after 0 iterations:0.0384154
Epoch: 0001 cost= 0.037494742
Epoch: 0002 cost= 0.036842323
Epoch: 0003 cost= 0.036692329
Epoch: 0004 cost= 0.036736737
Epoch: 0005 cost= 0.036211418
Epoch: 0006 cost= 0.036561669
Epoch: 0007 cost= 0.036009618
Epoch: 0008 cost= 0.035273619
Epoch: 0009 cost= 0.035483591
Epoch: 0010 cost= 0.035439119
Epoch: 0011 cost= 0.034741855
Epoch: 0012 cost= 0.034994063
Epoch: 0013 cost= 0.034491426
Epoch: 0014 cost= 0.035888284
Epoch: 0015 cost= 0.035026334
Optimization Finished!
Accuracy: 0.0361393
Error after 0 iterations:0.0361393
Epoch: 0001 cost= 0.034863828
Epoch: 0002 cost= 0.033851996
Epoch: 0003 cost= 0.034209137
Epoch: 0004 cost= 0.034110048
Epoch: 0005 cost= 0.033542195
Epoch: 0006 cost= 0.033974506
Epoch: 0007 cost= 0.033605482
Epoch: 0008 cost= 0.033168719
Epoch: 0009 cost= 0.034016981
Epoch: 0010 cost= 0.033600362
Epoch: 0011 cost= 0.033472549
Epoch: 0012 cost= 0.033220584
Epoch: 0013 cost= 0.032923812
Epoch: 0014 cost= 0.032749888
Epoch: 0015 cost= 0.032900994
Optimization Finished!
Accuracy: 0.0340098
Error after 0 iterations:0.0340098
Epoch: 0001 cost= 0.032377514
Epoch: 0002 cost= 0.032921007
Epoch: 0003 cost= 0.031378242
Epoch: 0004 cost= 0.031695091
Epoch: 0005 cost= 0.031689690
Epoch: 0006 cost= 0.031726261
Epoch: 0007 cost= 0.031619641
Epoch: 0008 cost= 0.031474600
Epoch: 0009 cost= 0.031396276
Epoch: 0010 cost= 0.031448537
Epoch: 0011 cost= 0.030709147
Epoch: 0012 cost= 0.030458616
Epoch: 0013 cost= 0.030813820
Epoch: 0014 cost= 0.031159821
Epoch: 0015 cost= 0.030910163
Optimization Finished!
Accuracy: 0.0320158
Error after 0 iterations:0.0320158
Epoch: 0001 cost= 0.030584219
Epoch: 0002 cost= 0.030294308
Epoch: 0003 cost= 0.029682229
Optimization Finished!
Accuracy: 0.0316336
Error after 0 iterations:0.0316336
Final params:
Epoch: 0001 cost= 2.153250885
Epoch: 0002 cost= 1.804174638
Epoch: 0003 cost= 1.539111209
Epoch: 0004 cost= 1.455615973
Epoch: 0005 cost= 1.349163866
Epoch: 0006 cost= 1.331781244
Epoch: 0007 cost= 1.280768108
Epoch: 0008 cost= 1.246967316
Epoch: 0009 cost= 1.197469497
Epoch: 0010 cost= 1.153349710
Epoch: 0011 cost= 1.137086391
Epoch: 0012 cost= 1.109253335
Epoch: 0013 cost= 1.083902597
Epoch: 0014 cost= 1.036632013
Epoch: 0015 cost= 1.032435083
Optimization Finished!
Accuracy: 1.0124
Error after 0 iterations:1.0124
Epoch: 0001 cost= 1.003325987
Epoch: 0002 cost= 0.958921885
Epoch: 0003 cost= 0.947094023
Epoch: 0004 cost= 0.905675697
Epoch: 0005 cost= 0.884653962
Epoch: 0006 cost= 0.855681050
Epoch: 0007 cost= 0.838958263
Epoch: 0008 cost= 0.821513450
Epoch: 0009 cost= 0.796317720
Epoch: 0010 cost= 0.763215446
Epoch: 0011 cost= 0.762239134
Epoch: 0012 cost= 0.729659200
Epoch: 0013 cost= 0.719209826
Epoch: 0014 cost= 0.706056643
Epoch: 0015 cost= 0.686763239
Optimization Finished!
Accuracy: 0.67369
Error after 0 iterations:0.67369
Epoch: 0001 cost= 0.661081719
Epoch: 0002 cost= 0.638005352
Epoch: 0003 cost= 0.629542208
Epoch: 0004 cost= 0.613008332
Epoch: 0005 cost= 0.598001862
Epoch: 0006 cost= 0.589007401
Epoch: 0007 cost= 0.561945367
Epoch: 0008 cost= 0.549934983
Epoch: 0009 cost= 0.534160697
Epoch: 0010 cost= 0.520826197
Epoch: 0011 cost= 0.513096201
Epoch: 0012 cost= 0.503378350
Epoch: 0013 cost= 0.489987469
Epoch: 0014 cost= 0.482842940
Epoch: 0015 cost= 0.471626127
Optimization Finished!
Accuracy: 0.466052
Error after 0 iterations:0.466052
Epoch: 0001 cost= 0.454421574
Epoch: 0002 cost= 0.451425004
Epoch: 0003 cost= 0.444326466
Epoch: 0004 cost= 0.422874475
Epoch: 0005 cost= 0.416678250
Epoch: 0006 cost= 0.404897356
Epoch: 0007 cost= 0.398117685
Epoch: 0008 cost= 0.389852732
Epoch: 0009 cost= 0.385298330
Epoch: 0010 cost= 0.380450100
Epoch: 0011 cost= 0.372862059
Epoch: 0012 cost= 0.358944321
Epoch: 0013 cost= 0.358271712
Epoch: 0014 cost= 0.346893454
Epoch: 0015 cost= 0.338346314
Optimization Finished!
Accuracy: 0.341791
Error after 0 iterations:0.341791
Epoch: 0001 cost= 0.338802487
Epoch: 0002 cost= 0.327329963
Epoch: 0003 cost= 0.326222551
Epoch: 0004 cost= 0.317774421
Epoch: 0005 cost= 0.315134096
Epoch: 0006 cost= 0.309625524
Epoch: 0007 cost= 0.297722125
Epoch: 0008 cost= 0.294333029
Epoch: 0009 cost= 0.291372329
Epoch: 0010 cost= 0.280576414
Epoch: 0011 cost= 0.282590783
Epoch: 0012 cost= 0.280875814
Epoch: 0013 cost= 0.275725561
Epoch: 0014 cost= 0.266715169
Epoch: 0015 cost= 0.263783389
Optimization Finished!
Accuracy: 0.264728
Error after 0 iterations:0.264728
Epoch: 0001 cost= 0.259091711
Epoch: 0002 cost= 0.262297302
Epoch: 0003 cost= 0.250337121
Epoch: 0004 cost= 0.248481354
Epoch: 0005 cost= 0.246383330
Epoch: 0006 cost= 0.236806405
Epoch: 0007 cost= 0.237767890
Epoch: 0008 cost= 0.234377974
Epoch: 0009 cost= 0.234383202
Epoch: 0010 cost= 0.227768120
Epoch: 0011 cost= 0.224253121
Epoch: 0012 cost= 0.226250657
Epoch: 0013 cost= 0.220459929
Epoch: 0014 cost= 0.214459601
Epoch: 0015 cost= 0.215837368
Optimization Finished!
Accuracy: 0.214427
Error after 0 iterations:0.214427
Epoch: 0001 cost= 0.209833369
Epoch: 0002 cost= 0.210850033
Epoch: 0003 cost= 0.202571708
Epoch: 0004 cost= 0.200894102
Epoch: 0005 cost= 0.202147439
Epoch: 0006 cost= 0.199411368
Epoch: 0007 cost= 0.194504476
Epoch: 0008 cost= 0.195573837
Epoch: 0009 cost= 0.189203465
Epoch: 0010 cost= 0.189494348
Epoch: 0011 cost= 0.187952915
Epoch: 0012 cost= 0.186398405
Epoch: 0013 cost= 0.183125234
Epoch: 0014 cost= 0.180573362
Epoch: 0015 cost= 0.178051075
Optimization Finished!
Accuracy: 0.179141
Error after 0 iterations:0.179141
Epoch: 0001 cost= 0.177260846
Epoch: 0002 cost= 0.175429049
Epoch: 0003 cost= 0.171969163
Epoch: 0004 cost= 0.173549345
Epoch: 0005 cost= 0.168656945
Epoch: 0006 cost= 0.169868648
Epoch: 0007 cost= 0.165499943
Epoch: 0008 cost= 0.163690984
Epoch: 0009 cost= 0.162210178
Epoch: 0010 cost= 0.159536850
Epoch: 0011 cost= 0.159727573
Epoch: 0012 cost= 0.158882630
Epoch: 0013 cost= 0.154032478
Epoch: 0014 cost= 0.152304581
Epoch: 0015 cost= 0.153382891
Optimization Finished!
Accuracy: 0.152719
Error after 0 iterations:0.152719
Epoch: 0001 cost= 0.151356986
Epoch: 0002 cost= 0.149712032
Epoch: 0003 cost= 0.147153836
Epoch: 0004 cost= 0.145788825
Epoch: 0005 cost= 0.144554868
Epoch: 0006 cost= 0.142735955
Epoch: 0007 cost= 0.140309894
Epoch: 0008 cost= 0.139752594
Epoch: 0009 cost= 0.138924503
Epoch: 0010 cost= 0.137713605
Epoch: 0011 cost= 0.134516403
Epoch: 0012 cost= 0.135307196
Epoch: 0013 cost= 0.133549860
Epoch: 0014 cost= 0.132527858
Epoch: 0015 cost= 0.129798117
Optimization Finished!
Accuracy: 0.131758
Error after 0 iterations:0.131758
Epoch: 0001 cost= 0.130185041
Epoch: 0002 cost= 0.127352861
Epoch: 0003 cost= 0.128001019
Epoch: 0004 cost= 0.126129331
Epoch: 0005 cost= 0.127352601
Epoch: 0006 cost= 0.124351455
Epoch: 0007 cost= 0.122488108
Epoch: 0008 cost= 0.123026235
Epoch: 0009 cost= 0.120317051
Epoch: 0010 cost= 0.120288672
Epoch: 0011 cost= 0.118210725
Epoch: 0012 cost= 0.117304151
Epoch: 0013 cost= 0.118306108
Epoch: 0014 cost= 0.116077329
Epoch: 0015 cost= 0.113189632
Optimization Finished!
Accuracy: 0.114459
Error after 0 iterations:0.114459
Epoch: 0001 cost= 0.112644327
Epoch: 0002 cost= 0.111335637
Epoch: 0003 cost= 0.110011293
Epoch: 0004 cost= 0.110019462
Epoch: 0005 cost= 0.109837207
Epoch: 0006 cost= 0.107298531
Epoch: 0007 cost= 0.106166454
Epoch: 0008 cost= 0.104861481
Epoch: 0009 cost= 0.106035918
Epoch: 0010 cost= 0.103736031
Epoch: 0011 cost= 0.104341105
Epoch: 0012 cost= 0.102008861
Epoch: 0013 cost= 0.100007924
Epoch: 0014 cost= 0.098622960
Epoch: 0015 cost= 0.100509036
Optimization Finished!
Accuracy: 0.100047
Error after 0 iterations:0.100047
Epoch: 0001 cost= 0.100218531
Epoch: 0002 cost= 0.098027411
Epoch: 0003 cost= 0.096838683
Epoch: 0004 cost= 0.095791672
Epoch: 0005 cost= 0.094068193
Epoch: 0006 cost= 0.093726520
Epoch: 0007 cost= 0.093080890
Epoch: 0008 cost= 0.092311913
Epoch: 0009 cost= 0.090730904
Epoch: 0010 cost= 0.089964806
Epoch: 0011 cost= 0.089241290
Epoch: 0012 cost= 0.089751078
Epoch: 0013 cost= 0.089253578
Epoch: 0014 cost= 0.086382055
Epoch: 0015 cost= 0.087097770
Optimization Finished!
Accuracy: 0.0877949
Error after 0 iterations:0.0877949
Epoch: 0001 cost= 0.087461133
Epoch: 0002 cost= 0.085000692
Epoch: 0003 cost= 0.086684242
Epoch: 0004 cost= 0.084245904
Epoch: 0005 cost= 0.083083582
Epoch: 0006 cost= 0.083913802
Epoch: 0007 cost= 0.082974900
Epoch: 0008 cost= 0.081456026
Epoch: 0009 cost= 0.079525383
Epoch: 0010 cost= 0.080837147
Epoch: 0011 cost= 0.079396878
Epoch: 0012 cost= 0.079075217
Epoch: 0013 cost= 0.078432117
Epoch: 0014 cost= 0.077063277
Epoch: 0015 cost= 0.076468357
Optimization Finished!
Accuracy: 0.0771543
Error after 0 iterations:0.0771543
Epoch: 0001 cost= 0.076073460
Epoch: 0002 cost= 0.076137170
Epoch: 0003 cost= 0.075005180
Epoch: 0004 cost= 0.073025182
Epoch: 0005 cost= 0.073297949
Epoch: 0006 cost= 0.073021975
Epoch: 0007 cost= 0.071994358
Epoch: 0008 cost= 0.070372878
Epoch: 0009 cost= 0.072327453
Epoch: 0010 cost= 0.070391212
Epoch: 0011 cost= 0.069802603
Epoch: 0012 cost= 0.069333269
Epoch: 0013 cost= 0.069305551
Epoch: 0014 cost= 0.067522933
Epoch: 0015 cost= 0.068157957
Optimization Finished!
Accuracy: 0.0679825
Error after 0 iterations:0.0679825
Epoch: 0001 cost= 0.066755553
Epoch: 0002 cost= 0.065708259
Epoch: 0003 cost= 0.065737726
Epoch: 0004 cost= 0.065957114
Epoch: 0005 cost= 0.065175971
Epoch: 0006 cost= 0.063984771
Epoch: 0007 cost= 0.062442797
Epoch: 0008 cost= 0.063970540
Epoch: 0009 cost= 0.062907372
Epoch: 0010 cost= 0.061668003
Epoch: 0011 cost= 0.062060430
Epoch: 0012 cost= 0.060667806
Epoch: 0013 cost= 0.060583036
Epoch: 0014 cost= 0.059652326
Epoch: 0015 cost= 0.059676663
Optimization Finished!
Accuracy: 0.06
Error after 0 iterations:0.06
Epoch: 0001 cost= 0.059430031
Epoch: 0002 cost= 0.058556373
Epoch: 0003 cost= 0.058407460
Epoch: 0004 cost= 0.057874399
Epoch: 0005 cost= 0.057162952
Epoch: 0006 cost= 0.056184979
Epoch: 0007 cost= 0.055471664
Epoch: 0008 cost= 0.055760705
Epoch: 0009 cost= 0.055183241
Epoch: 0010 cost= 0.054489894
Epoch: 0011 cost= 0.054006399
Epoch: 0012 cost= 0.055359671
Epoch: 0013 cost= 0.053854081
Epoch: 0014 cost= 0.053625448
Epoch: 0015 cost= 0.052769984
Optimization Finished!
Accuracy: 0.0530092
Error after 0 iterations:0.0530092
Epoch: 0001 cost= 0.052652207
Epoch: 0002 cost= 0.051520838
Epoch: 0003 cost= 0.051383858
Epoch: 0004 cost= 0.051033665
Epoch: 0005 cost= 0.050339090
Epoch: 0006 cost= 0.050472313
Epoch: 0007 cost= 0.049806403
Epoch: 0008 cost= 0.048389986
Epoch: 0009 cost= 0.048684753
Epoch: 0010 cost= 0.048653018
Epoch: 0011 cost= 0.047735391
Epoch: 0012 cost= 0.047441486
Epoch: 0013 cost= 0.048214061
Epoch: 0014 cost= 0.047692351
Epoch: 0015 cost= 0.046776028
Optimization Finished!
Accuracy: 0.0468665
Error after 0 iterations:0.0468665
Epoch: 0001 cost= 0.046605101
Epoch: 0002 cost= 0.046579813
Epoch: 0003 cost= 0.046035251
Epoch: 0004 cost= 0.044648232
Epoch: 0005 cost= 0.044250334
Epoch: 0006 cost= 0.043821619
Epoch: 0007 cost= 0.044350036
Epoch: 0008 cost= 0.043211949
Epoch: 0009 cost= 0.042740952
Epoch: 0010 cost= 0.043870565
Epoch: 0011 cost= 0.042785692
Epoch: 0012 cost= 0.042190788
Epoch: 0013 cost= 0.041707557
Epoch: 0014 cost= 0.041354738
Epoch: 0015 cost= 0.041569514
Optimization Finished!
Accuracy: 0.0414859
Error after 0 iterations:0.0414859
Epoch: 0001 cost= 0.040888625
Epoch: 0002 cost= 0.040152247
Epoch: 0003 cost= 0.040168446
Epoch: 0004 cost= 0.039678898
Epoch: 0005 cost= 0.039992565
Epoch: 0006 cost= 0.039236027
Epoch: 0007 cost= 0.039269242
Epoch: 0008 cost= 0.038402133
Epoch: 0009 cost= 0.038778602
Epoch: 0010 cost= 0.037979589
Epoch: 0011 cost= 0.038059596
Epoch: 0012 cost= 0.037871503
Epoch: 0013 cost= 0.036991493
Epoch: 0014 cost= 0.036568258
Epoch: 0015 cost= 0.036499316
Optimization Finished!
Accuracy: 0.0367412
Error after 0 iterations:0.0367412
Epoch: 0001 cost= 0.036373124
Epoch: 0002 cost= 0.035998760
Epoch: 0003 cost= 0.035846806
Epoch: 0004 cost= 0.035147348
Epoch: 0005 cost= 0.035115007
Epoch: 0006 cost= 0.034796211
Epoch: 0007 cost= 0.034257598
Epoch: 0008 cost= 0.034307435
Epoch: 0009 cost= 0.033498313
Epoch: 0010 cost= 0.033446287
Epoch: 0011 cost= 0.033885764
Epoch: 0012 cost= 0.033795217
Epoch: 0013 cost= 0.033126388
Epoch: 0014 cost= 0.031955666
Epoch: 0015 cost= 0.032805569
Optimization Finished!
Accuracy: 0.0325858
Error after 0 iterations:0.0325858
Epoch: 0001 cost= 0.031607189
Epoch: 0002 cost= 0.032143378
Epoch: 0003 cost= 0.031577119
Epoch: 0004 cost= 0.031221864
Epoch: 0005 cost= 0.031262278
Epoch: 0006 cost= 0.031359574
Epoch: 0007 cost= 0.030523705
Epoch: 0008 cost= 0.030399058
Epoch: 0009 cost= 0.030055944
Epoch: 0010 cost= 0.029896392
Optimization Finished!
Accuracy: 0.0300677
Error after 0 iterations:0.0300677
Final params:
Epoch: 0001 cost= 5.741192436
Epoch: 0002 cost= 4.328470278
Epoch: 0003 cost= 3.243019962
Epoch: 0004 cost= 2.490483427
Epoch: 0005 cost= 1.940615439
Epoch: 0006 cost= 1.519821858
Epoch: 0007 cost= 1.204393744
Epoch: 0008 cost= 0.959296274
Epoch: 0009 cost= 0.783492732
Epoch: 0010 cost= 0.635659838
Epoch: 0011 cost= 0.536593646
Epoch: 0012 cost= 0.466122472
Epoch: 0013 cost= 0.403418165
Epoch: 0014 cost= 0.356449246
Epoch: 0015 cost= 0.327002120
Optimization Finished!
Accuracy: 0.309406
Error after 0 iterations:0.309406
Epoch: 0001 cost= 0.304701823
Epoch: 0002 cost= 0.277886474
Epoch: 0003 cost= 0.269563997
Epoch: 0004 cost= 0.253170967
Epoch: 0005 cost= 0.247052932
Epoch: 0006 cost= 0.242273822
Epoch: 0007 cost= 0.229419532
Epoch: 0008 cost= 0.225323951
Epoch: 0009 cost= 0.226979306
Epoch: 0010 cost= 0.222576562
Epoch: 0011 cost= 0.216240916
Epoch: 0012 cost= 0.215680283
Epoch: 0013 cost= 0.221135810
Epoch: 0014 cost= 0.213215250
Epoch: 0015 cost= 0.208143440
Optimization Finished!
Accuracy: 0.208432
Error after 0 iterations:0.208432
Epoch: 0001 cost= 0.206150940
Epoch: 0002 cost= 0.208347410
Epoch: 0003 cost= 0.206683290
Epoch: 0004 cost= 0.205578244
Epoch: 0005 cost= 0.202891061
Epoch: 0006 cost= 0.200661618
Epoch: 0007 cost= 0.204632944
Epoch: 0008 cost= 0.203505707
Epoch: 0009 cost= 0.201500592
Epoch: 0010 cost= 0.199385044
Epoch: 0011 cost= 0.199251682
Epoch: 0012 cost= 0.200139517
Epoch: 0013 cost= 0.195589480
Epoch: 0014 cost= 0.193230838
Epoch: 0015 cost= 0.194838408
Optimization Finished!
Accuracy: 0.193655
Error after 0 iterations:0.193655
Epoch: 0001 cost= 0.197029352
Epoch: 0002 cost= 0.191639164
Epoch: 0003 cost= 0.191846141
Epoch: 0004 cost= 0.196893501
Epoch: 0005 cost= 0.193071866
Epoch: 0006 cost= 0.192794749
Epoch: 0007 cost= 0.185641333
Epoch: 0008 cost= 0.188947198
Epoch: 0009 cost= 0.187752390
Epoch: 0010 cost= 0.187636277
Epoch: 0011 cost= 0.186060688
Epoch: 0012 cost= 0.187897381
Epoch: 0013 cost= 0.183320475
Epoch: 0014 cost= 0.183521244
Epoch: 0015 cost= 0.185227045
Optimization Finished!
Accuracy: 0.179585
Error after 0 iterations:0.179585
Epoch: 0001 cost= 0.183741975
Epoch: 0002 cost= 0.183505562
Epoch: 0003 cost= 0.180550224
Epoch: 0004 cost= 0.181009585
Epoch: 0005 cost= 0.177896541
Epoch: 0006 cost= 0.177608660
Epoch: 0007 cost= 0.176532167
Epoch: 0008 cost= 0.173318377
Epoch: 0009 cost= 0.170522118
Epoch: 0010 cost= 0.174733174
Epoch: 0011 cost= 0.170988521
Epoch: 0012 cost= 0.168776470
Epoch: 0013 cost= 0.168679410
Epoch: 0014 cost= 0.167922494
Epoch: 0015 cost= 0.165855485
Optimization Finished!
Accuracy: 0.165268
Error after 0 iterations:0.165268
Epoch: 0001 cost= 0.166007924
Epoch: 0002 cost= 0.165720144
Epoch: 0003 cost= 0.168363506
Epoch: 0004 cost= 0.164487886
Epoch: 0005 cost= 0.160917747
Epoch: 0006 cost= 0.163544056
Epoch: 0007 cost= 0.163262329
Epoch: 0008 cost= 0.162720448
Epoch: 0009 cost= 0.159543234
Epoch: 0010 cost= 0.162844545
Epoch: 0011 cost= 0.156956497
Epoch: 0012 cost= 0.152479869
Epoch: 0013 cost= 0.153822240
Epoch: 0014 cost= 0.155445635
Epoch: 0015 cost= 0.154939011
Optimization Finished!
Accuracy: 0.151101
Error after 0 iterations:0.151101
Epoch: 0001 cost= 0.150459054
Epoch: 0002 cost= 0.147897226
Epoch: 0003 cost= 0.149981683
Epoch: 0004 cost= 0.150396824
Epoch: 0005 cost= 0.150887850
Epoch: 0006 cost= 0.147247946
Epoch: 0007 cost= 0.147623855
Epoch: 0008 cost= 0.146101829
Epoch: 0009 cost= 0.143512848
Epoch: 0010 cost= 0.142704448
Epoch: 0011 cost= 0.145608589
Epoch: 0012 cost= 0.141773400
Epoch: 0013 cost= 0.141302976
Epoch: 0014 cost= 0.142476177
Epoch: 0015 cost= 0.142214906
Optimization Finished!
Accuracy: 0.138078
Error after 0 iterations:0.138078
Epoch: 0001 cost= 0.140412700
Epoch: 0002 cost= 0.139704615
Epoch: 0003 cost= 0.138622338
Epoch: 0004 cost= 0.137251425
Epoch: 0005 cost= 0.136140579
Epoch: 0006 cost= 0.132955071
Epoch: 0007 cost= 0.135899982
Epoch: 0008 cost= 0.134810817
Epoch: 0009 cost= 0.134689048
Epoch: 0010 cost= 0.132358557
Epoch: 0011 cost= 0.131928360
Epoch: 0012 cost= 0.132347536
Epoch: 0013 cost= 0.131165257
Epoch: 0014 cost= 0.132737911
Epoch: 0015 cost= 0.128977618
Optimization Finished!
Accuracy: 0.127068
Error after 0 iterations:0.127068
Epoch: 0001 cost= 0.127690062
Epoch: 0002 cost= 0.129966465
Epoch: 0003 cost= 0.128901069
Epoch: 0004 cost= 0.123621042
Epoch: 0005 cost= 0.127214071
Epoch: 0006 cost= 0.124614839
Epoch: 0007 cost= 0.123723647
Epoch: 0008 cost= 0.123416276
Epoch: 0009 cost= 0.123193866
Epoch: 0010 cost= 0.124040067
Epoch: 0011 cost= 0.123748624
Epoch: 0012 cost= 0.122277167
Epoch: 0013 cost= 0.120833364
Epoch: 0014 cost= 0.119449045
Epoch: 0015 cost= 0.120110938
Optimization Finished!
Accuracy: 0.118485
Error after 0 iterations:0.118485
Epoch: 0001 cost= 0.121541739
Epoch: 0002 cost= 0.118318926
Epoch: 0003 cost= 0.117869441
Epoch: 0004 cost= 0.118248917
Epoch: 0005 cost= 0.119651335
Epoch: 0006 cost= 0.117062913
Epoch: 0007 cost= 0.119337195
Epoch: 0008 cost= 0.116563667
Epoch: 0009 cost= 0.116447046
Epoch: 0010 cost= 0.115081581
Epoch: 0011 cost= 0.116177598
Epoch: 0012 cost= 0.114624722
Epoch: 0013 cost= 0.114598507
Epoch: 0014 cost= 0.114869893
Epoch: 0015 cost= 0.112349288
Optimization Finished!
Accuracy: 0.111549
Error after 0 iterations:0.111549
Epoch: 0001 cost= 0.112975976
Epoch: 0002 cost= 0.111977141
Epoch: 0003 cost= 0.109693715
Epoch: 0004 cost= 0.109649724
Epoch: 0005 cost= 0.113484018
Epoch: 0006 cost= 0.111505154
Epoch: 0007 cost= 0.109586978
Epoch: 0008 cost= 0.110760105
Epoch: 0009 cost= 0.111237790
Epoch: 0010 cost= 0.109943667
Epoch: 0011 cost= 0.109310788
Epoch: 0012 cost= 0.107451957
Epoch: 0013 cost= 0.106609660
Epoch: 0014 cost= 0.107819472
Epoch: 0015 cost= 0.105293433
Optimization Finished!
Accuracy: 0.105454
Error after 0 iterations:0.105454
Epoch: 0001 cost= 0.109315199
Epoch: 0002 cost= 0.106582122
Epoch: 0003 cost= 0.105373015
Epoch: 0004 cost= 0.106213322
Epoch: 0005 cost= 0.104188484
Epoch: 0006 cost= 0.106205036
Epoch: 0007 cost= 0.105159022
Epoch: 0008 cost= 0.104731341
Epoch: 0009 cost= 0.103390452
Epoch: 0010 cost= 0.103943533
Epoch: 0011 cost= 0.102719550
Epoch: 0012 cost= 0.102320577
Epoch: 0013 cost= 0.103059262
Epoch: 0014 cost= 0.102030639
Epoch: 0015 cost= 0.100320600
Optimization Finished!
Accuracy: 0.0998356
Error after 0 iterations:0.0998356
Epoch: 0001 cost= 0.103013316
Epoch: 0002 cost= 0.100530747
Epoch: 0003 cost= 0.099891211
Epoch: 0004 cost= 0.101260872
Epoch: 0005 cost= 0.099099778
Epoch: 0006 cost= 0.099478443
Epoch: 0007 cost= 0.100370869
Epoch: 0008 cost= 0.097378832
Epoch: 0009 cost= 0.098672053
Epoch: 0010 cost= 0.099324477
Epoch: 0011 cost= 0.098703726
Epoch: 0012 cost= 0.097915721
Epoch: 0013 cost= 0.097142673
Epoch: 0014 cost= 0.098467641
Epoch: 0015 cost= 0.094480708
Optimization Finished!
Accuracy: 0.0946278
Error after 0 iterations:0.0946278
Epoch: 0001 cost= 0.096483962
Epoch: 0002 cost= 0.096440910
Epoch: 0003 cost= 0.096042049
Epoch: 0004 cost= 0.094514029
Epoch: 0005 cost= 0.093393709
Epoch: 0006 cost= 0.095028700
Epoch: 0007 cost= 0.092035803
Epoch: 0008 cost= 0.095612793
Epoch: 0009 cost= 0.093491593
Epoch: 0010 cost= 0.093278359
Epoch: 0011 cost= 0.092811555
Epoch: 0012 cost= 0.092212933
Epoch: 0013 cost= 0.089791076
Epoch: 0014 cost= 0.091437611
Epoch: 0015 cost= 0.092694663
Optimization Finished!
Accuracy: 0.0898237
Error after 0 iterations:0.0898237
Epoch: 0001 cost= 0.092278320
Epoch: 0002 cost= 0.090927613
Epoch: 0003 cost= 0.090675627
Epoch: 0004 cost= 0.089924957
Epoch: 0005 cost= 0.088826801
Epoch: 0006 cost= 0.089680360
Epoch: 0007 cost= 0.089585143
Epoch: 0008 cost= 0.088774322
Epoch: 0009 cost= 0.089189430
Epoch: 0010 cost= 0.089606534
Epoch: 0011 cost= 0.087612534
Epoch: 0012 cost= 0.087739767
Epoch: 0013 cost= 0.085951330
Epoch: 0014 cost= 0.087666512
Epoch: 0015 cost= 0.085443063
Optimization Finished!
Accuracy: 0.0853933
Error after 0 iterations:0.0853933
Epoch: 0001 cost= 0.086287552
Epoch: 0002 cost= 0.085192411
Epoch: 0003 cost= 0.085677591
Epoch: 0004 cost= 0.086439715
Epoch: 0005 cost= 0.086851470
Epoch: 0006 cost= 0.085669252
Epoch: 0007 cost= 0.084180009
Epoch: 0008 cost= 0.085652433
Epoch: 0009 cost= 0.083844420
Epoch: 0010 cost= 0.082880823
Epoch: 0011 cost= 0.085252899
Epoch: 0012 cost= 0.083692376
Epoch: 0013 cost= 0.083482680
Epoch: 0014 cost= 0.081775460
Epoch: 0015 cost= 0.083807443
Optimization Finished!
Accuracy: 0.0812771
Error after 0 iterations:0.0812771
Epoch: 0001 cost= 0.080964437
Epoch: 0002 cost= 0.080754551
Epoch: 0003 cost= 0.082616818
Epoch: 0004 cost= 0.081654212
Epoch: 0005 cost= 0.081276731
Epoch: 0006 cost= 0.080849329
Epoch: 0007 cost= 0.081038435
Epoch: 0008 cost= 0.081671730
Epoch: 0009 cost= 0.081521635
Epoch: 0010 cost= 0.081703418
Epoch: 0011 cost= 0.081090088
Epoch: 0012 cost= 0.079997997
Epoch: 0013 cost= 0.079266238
Epoch: 0014 cost= 0.080299020
Epoch: 0015 cost= 0.080089171
Optimization Finished!
Accuracy: 0.0774622
Error after 0 iterations:0.0774622
Epoch: 0001 cost= 0.077604887
Epoch: 0002 cost= 0.077828173
Epoch: 0003 cost= 0.077371141
Epoch: 0004 cost= 0.077488659
Epoch: 0005 cost= 0.078475912
Epoch: 0006 cost= 0.076746677
Epoch: 0007 cost= 0.078260107
Epoch: 0008 cost= 0.076898797
Epoch: 0009 cost= 0.076764241
Epoch: 0010 cost= 0.076349391
Epoch: 0011 cost= 0.076109102
Epoch: 0012 cost= 0.075284286
Epoch: 0013 cost= 0.076386683
Epoch: 0014 cost= 0.075295267
Epoch: 0015 cost= 0.074174370
Optimization Finished!
Accuracy: 0.0739459
Error after 0 iterations:0.0739459
Epoch: 0001 cost= 0.075772457
Epoch: 0002 cost= 0.074158104
Epoch: 0003 cost= 0.074765223
Epoch: 0004 cost= 0.073761031
Epoch: 0005 cost= 0.073083837
Epoch: 0006 cost= 0.073354490
Epoch: 0007 cost= 0.074061257
Epoch: 0008 cost= 0.073813777
Epoch: 0009 cost= 0.073842695
Epoch: 0010 cost= 0.073883100
Epoch: 0011 cost= 0.072857708
Epoch: 0012 cost= 0.071718910
Epoch: 0013 cost= 0.071753292
Epoch: 0014 cost= 0.071738340
Epoch: 0015 cost= 0.070881978
Optimization Finished!
Accuracy: 0.0706663
Error after 0 iterations:0.0706663
Epoch: 0001 cost= 0.071174632
Epoch: 0002 cost= 0.071420929
Epoch: 0003 cost= 0.070769350
Epoch: 0004 cost= 0.070482801
Epoch: 0005 cost= 0.070650136
Epoch: 0006 cost= 0.070454006
Epoch: 0007 cost= 0.070591168
Epoch: 0008 cost= 0.071785472
Epoch: 0009 cost= 0.069226873
Epoch: 0010 cost= 0.070277350
Epoch: 0011 cost= 0.069968003
Epoch: 0012 cost= 0.069619252
Epoch: 0013 cost= 0.068915808
Epoch: 0014 cost= 0.068655133
Epoch: 0015 cost= 0.068309440
Optimization Finished!
Accuracy: 0.0675809
Error after 0 iterations:0.0675809
Epoch: 0001 cost= 0.068498312
Epoch: 0002 cost= 0.067190093
Epoch: 0003 cost= 0.067496227
Epoch: 0004 cost= 0.067331171
Epoch: 0005 cost= 0.067048326
Epoch: 0006 cost= 0.067932622
Epoch: 0007 cost= 0.067398570
Epoch: 0008 cost= 0.067289029
Epoch: 0009 cost= 0.067003274
Epoch: 0010 cost= 0.068066585
Epoch: 0011 cost= 0.067311986
Epoch: 0012 cost= 0.066576275
Epoch: 0013 cost= 0.065896198
Epoch: 0014 cost= 0.066756119
Epoch: 0015 cost= 0.064661129
Optimization Finished!
Accuracy: 0.0646661
Error after 0 iterations:0.0646661
Epoch: 0001 cost= 0.066083761
Epoch: 0002 cost= 0.065779109
Epoch: 0003 cost= 0.064683409
Epoch: 0004 cost= 0.065385781
Epoch: 0005 cost= 0.065024011
Epoch: 0006 cost= 0.064185743
Epoch: 0007 cost= 0.064797069
Epoch: 0008 cost= 0.064738126
Epoch: 0009 cost= 0.064194605
Epoch: 0010 cost= 0.064209320
Epoch: 0011 cost= 0.064311020
Epoch: 0012 cost= 0.064099460
Epoch: 0013 cost= 0.063276039
Epoch: 0014 cost= 0.062171466
Epoch: 0015 cost= 0.063168596
Optimization Finished!
Accuracy: 0.0618866
Error after 0 iterations:0.0618866
Epoch: 0001 cost= 0.063154845
Epoch: 0002 cost= 0.063565573
Epoch: 0003 cost= 0.062337355
Epoch: 0004 cost= 0.062479474
Epoch: 0005 cost= 0.061683150
Epoch: 0006 cost= 0.062617555
Epoch: 0007 cost= 0.061125527
Epoch: 0008 cost= 0.061509126
Epoch: 0009 cost= 0.061696135
Epoch: 0010 cost= 0.061775123
Epoch: 0011 cost= 0.060953636
Epoch: 0012 cost= 0.060301587
Epoch: 0013 cost= 0.061315972
Epoch: 0014 cost= 0.061073454
Epoch: 0015 cost= 0.061036996
Optimization Finished!
Accuracy: 0.0592178
Error after 0 iterations:0.0592178
Epoch: 0001 cost= 0.059831577
Epoch: 0002 cost= 0.061104488
Epoch: 0003 cost= 0.060011961
Epoch: 0004 cost= 0.059060131
Epoch: 0005 cost= 0.059589367
Epoch: 0006 cost= 0.060380357
Epoch: 0007 cost= 0.059803703
Epoch: 0008 cost= 0.059705813
Epoch: 0009 cost= 0.059363095
Epoch: 0010 cost= 0.058945500
Epoch: 0011 cost= 0.059328524
Epoch: 0012 cost= 0.057513339
Epoch: 0013 cost= 0.057813858
Epoch: 0014 cost= 0.057679582
Epoch: 0015 cost= 0.058418155
Optimization Finished!
Accuracy: 0.056651
Error after 0 iterations:0.056651
Epoch: 0001 cost= 0.057420521
Epoch: 0002 cost= 0.056760085
Epoch: 0003 cost= 0.056689204
Epoch: 0004 cost= 0.057182281
Epoch: 0005 cost= 0.056796981
Epoch: 0006 cost= 0.056778309
Epoch: 0007 cost= 0.056232367
Epoch: 0008 cost= 0.056623285
Epoch: 0009 cost= 0.056241795
Epoch: 0010 cost= 0.056221304
Epoch: 0011 cost= 0.056123088
Epoch: 0012 cost= 0.055433755
Epoch: 0013 cost= 0.055372062
Epoch: 0014 cost= 0.055275566
Epoch: 0015 cost= 0.055104521
Optimization Finished!
Accuracy: 0.0542048
Error after 0 iterations:0.0542048
Epoch: 0001 cost= 0.055001517
Epoch: 0002 cost= 0.054406364
Epoch: 0003 cost= 0.054703572
Epoch: 0004 cost= 0.054682212
Epoch: 0005 cost= 0.054018528
Epoch: 0006 cost= 0.054293932
Epoch: 0007 cost= 0.054372363
Epoch: 0008 cost= 0.053278290
Epoch: 0009 cost= 0.054238551
Epoch: 0010 cost= 0.052932516
Epoch: 0011 cost= 0.053422098
Epoch: 0012 cost= 0.052872264
Epoch: 0013 cost= 0.054227617
Epoch: 0014 cost= 0.053556415
Epoch: 0015 cost= 0.053702009
Optimization Finished!
Accuracy: 0.0518444
Error after 0 iterations:0.0518444
Epoch: 0001 cost= 0.052794431
Epoch: 0002 cost= 0.052908505
Epoch: 0003 cost= 0.051839483
Epoch: 0004 cost= 0.052019229
Epoch: 0005 cost= 0.052927566
Epoch: 0006 cost= 0.052053015
Epoch: 0007 cost= 0.050636799
Epoch: 0008 cost= 0.052250235
Epoch: 0009 cost= 0.052194465
Epoch: 0010 cost= 0.051602188
Epoch: 0011 cost= 0.050730994
Epoch: 0012 cost= 0.050770231
Epoch: 0013 cost= 0.050898856
Epoch: 0014 cost= 0.050420872
Epoch: 0015 cost= 0.050450491
Optimization Finished!
Accuracy: 0.0495691
Error after 0 iterations:0.0495691
Epoch: 0001 cost= 0.050729001
Epoch: 0002 cost= 0.050779209
Epoch: 0003 cost= 0.049488357
Epoch: 0004 cost= 0.050094043
Epoch: 0005 cost= 0.050725534
Epoch: 0006 cost= 0.049315766
Epoch: 0007 cost= 0.048694599
Epoch: 0008 cost= 0.049277544
Epoch: 0009 cost= 0.049405044
Epoch: 0010 cost= 0.049055506
Epoch: 0011 cost= 0.049769895
Epoch: 0012 cost= 0.048682372
Epoch: 0013 cost= 0.048106942
Epoch: 0014 cost= 0.048044083
Epoch: 0015 cost= 0.048512507
Optimization Finished!
Accuracy: 0.0473741
Error after 0 iterations:0.0473741
Epoch: 0001 cost= 0.047919007
Epoch: 0002 cost= 0.048575283
Epoch: 0003 cost= 0.047893442
Epoch: 0004 cost= 0.047100134
Epoch: 0005 cost= 0.048453779
Epoch: 0006 cost= 0.047868079
Epoch: 0007 cost= 0.047218053
Epoch: 0008 cost= 0.046631162
Epoch: 0009 cost= 0.046238610
Epoch: 0010 cost= 0.046947575
Epoch: 0011 cost= 0.046840807
Epoch: 0012 cost= 0.046385854
Epoch: 0013 cost= 0.044814575
Epoch: 0014 cost= 0.046587998
Epoch: 0015 cost= 0.046400008
Optimization Finished!
Accuracy: 0.0452656
Error after 0 iterations:0.0452656
Epoch: 0001 cost= 0.045775688
Epoch: 0002 cost= 0.046974214
Epoch: 0003 cost= 0.045438068
Epoch: 0004 cost= 0.045184109
Epoch: 0005 cost= 0.045905033
Epoch: 0006 cost= 0.045146245
Epoch: 0007 cost= 0.045200012
Epoch: 0008 cost= 0.045251518
Epoch: 0009 cost= 0.044873844
Epoch: 0010 cost= 0.044383972
Epoch: 0011 cost= 0.044842315
Epoch: 0012 cost= 0.044971274
Epoch: 0013 cost= 0.043889083
Epoch: 0014 cost= 0.044201162
Epoch: 0015 cost= 0.044824280
Optimization Finished!
Accuracy: 0.0432208
Error after 0 iterations:0.0432208
Epoch: 0001 cost= 0.043698945
Epoch: 0002 cost= 0.043810130
Epoch: 0003 cost= 0.043719260
Epoch: 0004 cost= 0.042919728
Epoch: 0005 cost= 0.044250261
Epoch: 0006 cost= 0.043589570
Epoch: 0007 cost= 0.042902132
Epoch: 0008 cost= 0.043040589
Epoch: 0009 cost= 0.043039836
Epoch: 0010 cost= 0.042936151
Epoch: 0011 cost= 0.043441915
Epoch: 0012 cost= 0.042283104
Epoch: 0013 cost= 0.042313121
Epoch: 0014 cost= 0.041566924
Epoch: 0015 cost= 0.042679968
Optimization Finished!
Accuracy: 0.0412455
Error after 0 iterations:0.0412455
Epoch: 0001 cost= 0.041706633
Epoch: 0002 cost= 0.043156004
Epoch: 0003 cost= 0.041154356
Epoch: 0004 cost= 0.041278708
Epoch: 0005 cost= 0.041543091
Epoch: 0006 cost= 0.041486648
Epoch: 0007 cost= 0.042098775
Epoch: 0008 cost= 0.041211608
Epoch: 0009 cost= 0.041105841
Epoch: 0010 cost= 0.040890056
Epoch: 0011 cost= 0.041252983
Epoch: 0012 cost= 0.040980859
Epoch: 0013 cost= 0.040236490
Epoch: 0014 cost= 0.040952647
Epoch: 0015 cost= 0.040222070
Optimization Finished!
Accuracy: 0.0393389
Error after 0 iterations:0.0393389
Epoch: 0001 cost= 0.040479585
Epoch: 0002 cost= 0.039465898
Epoch: 0003 cost= 0.038978461
Epoch: 0004 cost= 0.040053094
Epoch: 0005 cost= 0.040352893
Epoch: 0006 cost= 0.039285488
Epoch: 0007 cost= 0.039304049
Epoch: 0008 cost= 0.039592886
Epoch: 0009 cost= 0.039255056
Epoch: 0010 cost= 0.037869886
Epoch: 0011 cost= 0.039484539
Epoch: 0012 cost= 0.038755950
Epoch: 0013 cost= 0.038366365
Epoch: 0014 cost= 0.037756750
Epoch: 0015 cost= 0.038489517
Optimization Finished!
Accuracy: 0.0375045
Error after 0 iterations:0.0375045
Epoch: 0001 cost= 0.038218919
Epoch: 0002 cost= 0.037965825
Epoch: 0003 cost= 0.038093860
Epoch: 0004 cost= 0.038115632
Epoch: 0005 cost= 0.038482909
Epoch: 0006 cost= 0.037626555
Epoch: 0007 cost= 0.037460326
Epoch: 0008 cost= 0.037382912
Epoch: 0009 cost= 0.037405121
Epoch: 0010 cost= 0.036703122
Epoch: 0011 cost= 0.037190352
Epoch: 0012 cost= 0.037456324
Epoch: 0013 cost= 0.036033516
Epoch: 0014 cost= 0.036371263
Epoch: 0015 cost= 0.036838007
Optimization Finished!
Accuracy: 0.0357335
Error after 0 iterations:0.0357335
Epoch: 0001 cost= 0.036248718
Epoch: 0002 cost= 0.036096963
Epoch: 0003 cost= 0.035062204
Epoch: 0004 cost= 0.036261660
Epoch: 0005 cost= 0.035783947
Epoch: 0006 cost= 0.035260338
Epoch: 0007 cost= 0.035456808
Epoch: 0008 cost= 0.035777455
Epoch: 0009 cost= 0.035532211
Epoch: 0010 cost= 0.035477246
Epoch: 0011 cost= 0.035029975
Epoch: 0012 cost= 0.035140380
Epoch: 0013 cost= 0.035238191
Epoch: 0014 cost= 0.034741589
Epoch: 0015 cost= 0.034717666
Optimization Finished!
Accuracy: 0.0340711
Error after 0 iterations:0.0340711
Epoch: 0001 cost= 0.035293444
Epoch: 0002 cost= 0.034097296
Epoch: 0003 cost= 0.034749920
Epoch: 0004 cost= 0.034715894
Epoch: 0005 cost= 0.034640325
Epoch: 0006 cost= 0.034573245
Epoch: 0007 cost= 0.034905485
Epoch: 0008 cost= 0.033962733
Epoch: 0009 cost= 0.034233553
Epoch: 0010 cost= 0.034388909
Epoch: 0011 cost= 0.033588481
Epoch: 0012 cost= 0.034044032
Epoch: 0013 cost= 0.033354589
Epoch: 0014 cost= 0.033949519
Epoch: 0015 cost= 0.033558922
Optimization Finished!
Accuracy: 0.0327269
Error after 0 iterations:0.0327269
Epoch: 0001 cost= 0.033897788
Epoch: 0002 cost= 0.033925320
Epoch: 0003 cost= 0.033644528
Epoch: 0004 cost= 0.033424485
Epoch: 0005 cost= 0.033167912
Epoch: 0006 cost= 0.033406701
Epoch: 0007 cost= 0.033191689
Epoch: 0008 cost= 0.033609696
Epoch: 0009 cost= 0.032403889
Epoch: 0010 cost= 0.033151957
Epoch: 0011 cost= 0.033091874
Epoch: 0012 cost= 0.033301407
Epoch: 0013 cost= 0.033126943
Epoch: 0014 cost= 0.032826825
Epoch: 0015 cost= 0.032481287
Optimization Finished!
Accuracy: 0.0316168
Error after 0 iterations:0.0316168
Epoch: 0001 cost= 0.032383410
Epoch: 0002 cost= 0.032674338
Epoch: 0003 cost= 0.032855576
Epoch: 0004 cost= 0.032542764
Epoch: 0005 cost= 0.032405250
Epoch: 0006 cost= 0.032017721
Epoch: 0007 cost= 0.032002686
Epoch: 0008 cost= 0.032286127
Epoch: 0009 cost= 0.031775218
Epoch: 0010 cost= 0.031069607
Epoch: 0011 cost= 0.031287309
Epoch: 0012 cost= 0.031725089
Epoch: 0013 cost= 0.031685343
Epoch: 0014 cost= 0.031730041
Epoch: 0015 cost= 0.031359817
Optimization Finished!
Accuracy: 0.0306308
Error after 0 iterations:0.0306308
Epoch: 0001 cost= 0.031828921
Epoch: 0002 cost= 0.031930040
Epoch: 0003 cost= 0.031385755
Epoch: 0004 cost= 0.030952285
Epoch: 0005 cost= 0.031673913
Epoch: 0006 cost= 0.031075206
Epoch: 0007 cost= 0.031009015
Epoch: 0008 cost= 0.031367686
Epoch: 0009 cost= 0.030735902
Epoch: 0010 cost= 0.031225719
Epoch: 0011 cost= 0.030860461
Epoch: 0012 cost= 0.030864338
Epoch: 0013 cost= 0.030237111
Epoch: 0014 cost= 0.030371800
Epoch: 0015 cost= 0.030202129
Optimization Finished!
Accuracy: 0.0297111
Error after 0 iterations:0.0297111
Final params:
In [47]:
#Connected components search
#Used for softening the training criteria. There's some fuzz required due to the difference in
#training error between test and training
thresh_multiplier = 1.1
results = []
tests = []
connecteddict = {}
for i1 in xrange(len(models)):
connecteddict[i1] = 'not connected'
for i1 in xrange(len(models)):
print i1
for i2 in xrange(len(models)):
if i2 > i1 and ((connecteddict[i1] != connecteddict[i2]) or (connecteddict[i1] == 'not connected' or connecteddict[i2] == 'not connected')) :
#print "slow1?"
#print i1,i2
#print models[0]
#print models[1]
#print models[0].params
#print models[1].params
test = WeightString(models[i1].params[0],models[i1].params[1],models[i2].params[0],models[i2].params[1],1,1)
training_threshold = thresh
depth = 0
d_max = 10
#Check error between beads
#Alg: for each bead at depth i, SGD until converged.
#For beads with max error along path too large, add another bead between them, repeat
#Keeps track of which indices to check the interpbeaderror between
newindices = [0,1]
while (depth < d_max):
print newindices
#print "slow2?"
#X, y = GenTest(X,y)
counter = 0
for i,c in enumerate(test.ConvergedList):
if c == False:
#print "slow3?"
error = test.SGDBead(i, .5*training_threshold, 20)
#print "slow4?"
#if counter%5000==0:
# print counter
# print error
test.ConvergedList[i] = True
print test.ConvergedList
interperrors = []
interp_bead_indices = []
for b in xrange(len(test.AllBeads)-1):
if b in newindices:
e = InterpBeadError(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1])
interperrors.append(e)
interp_bead_indices.append(b)
print interperrors
print "Interp bead indices: "
print interp_bead_indices
if max([ee[0] for ee in interperrors]) < thresh_multiplier*training_threshold:
depth = 2*d_max
#print test.ConvergedList
#print test.SpringNorm(2)
#print "Done!"
else:
del newindices[:]
#Interperrors stores the maximum error on the path between beads
#shift index to account for added beads
shift = 0
for i, ie in enumerate(interperrors):
if ie[0] > thresh_multiplier*training_threshold:
k = interp_bead_indices[i]
ws,bs = model_interpolate(test.AllBeads[k+shift][0],test.AllBeads[k+shift][1],\
test.AllBeads[k+shift+1][0],test.AllBeads[k+shift+1][1],\
ie[1]/100.)
test.AllBeads.insert(k+shift+1,[ws,bs])
test.ConvergedList.insert(k+shift+1, False)
newindices.append(k+shift)
newindices.append(k+shift+1)
shift+=1
#print test.ConvergedList
#print test.SpringNorm(2)
#print d_max
depth += 1
if depth == 2*d_max:
results.append([i1,i2,test.SpringNorm(2),"Connected"])
if connecteddict[i1] == 'not connected' and connecteddict[i2] == 'not connected':
connecteddict[i1] = i1
connecteddict[i2] = i1
if connecteddict[i1] == 'not connected':
connecteddict[i1] = connecteddict[i2]
else:
if connecteddict[i2] == 'not connected':
connecteddict[i2] = connecteddict[i1]
else:
if connecteddict[i1] != 'not connected' and connecteddict[i2] != 'not connected':
hold = connecteddict[i2]
connecteddict[i2] = connecteddict[i1]
for h in xrange(len(models)):
if connecteddict[h] == hold:
connecteddict[h] = connecteddict[i1]
else:
results.append([i1,i2,test.SpringNorm(2),"Disconnected"])
#print results[-1]
tests.append(test)
uniquecomps = []
totalcomps = 0
for i in xrange(len(models)):
if not (connecteddict[i] in uniquecomps):
uniquecomps.append(connecteddict[i])
if connecteddict[i] == 'not connected':
totalcomps += 1
#print i,connecteddict[i]
notconoffset = 0
if 'not connected' in uniquecomps:
notconoffset = -1
print "Thresh: " + str(thresh)
print "Comps: " + str(len(uniquecomps) + notconoffset + totalcomps)
#for i in xrange(len(synapses)):
# print connecteddict[i]
connsum = []
for r in results:
if r[3] == "Connected":
connsum.append(r[2])
#print r[2]
print "***"
print np.average(connsum)
print np.std(connsum)
0
[0, 1]
Accuracy: 0.0141108
Final bead error: 0.0141108
[True, True, True]
Accuracy: 0.0297765 0
Accuracy: 0.031562 1
Accuracy: 0.0349438 2
Accuracy: 0.0400737 3
Accuracy: 0.0469294 4
Accuracy: 0.055473 5
Accuracy: 0.0655853 6
Accuracy: 0.0772344 7
Accuracy: 0.0905034 8
Accuracy: 0.105379 9
Accuracy: 0.121862 10
Accuracy: 0.139814 11
Accuracy: 0.159099 12
Accuracy: 0.179746 13
Accuracy: 0.201743 14
Accuracy: 0.224993 15
Accuracy: 0.249388 16
Accuracy: 0.274853 17
Accuracy: 0.301352 18
Accuracy: 0.328792 19
Accuracy: 0.357109 20
Accuracy: 0.386159 21
Accuracy: 0.415949 22
Accuracy: 0.446417 23
Accuracy: 0.477417 24
Accuracy: 0.508893 25
Accuracy: 0.540697 26
Accuracy: 0.572815 27
Accuracy: 0.605149 28
Accuracy: 0.637665 29
Accuracy: 0.670133 30
Accuracy: 0.702353 31
Accuracy: 0.734242 32
Accuracy: 0.765723 33
Accuracy: 0.796727 34
Accuracy: 0.827188 35
Accuracy: 0.857047 36
Accuracy: 0.88625 37
Accuracy: 0.914749 38
Accuracy: 0.942497 39
Accuracy: 0.969456 40
Accuracy: 0.995591 41
Accuracy: 1.02087 42
Accuracy: 1.04527 43
Accuracy: 1.06876 44
Accuracy: 1.09132 45
Accuracy: 1.11295 46
Accuracy: 1.13362 47
Accuracy: 1.15333 48
Accuracy: 1.17207 49
Accuracy: 1.18984 50
Accuracy: 1.20663 51
Accuracy: 1.22246 52
Accuracy: 1.22168 53
Accuracy: 1.19704 54
Accuracy: 1.17113 55
Accuracy: 1.14402 56
Accuracy: 1.11579 57
Accuracy: 1.08651 58
Accuracy: 1.05627 59
Accuracy: 1.02516 60
Accuracy: 0.993243 61
Accuracy: 0.960619 62
Accuracy: 0.92737 63
Accuracy: 0.893581 64
Accuracy: 0.859347 65
Accuracy: 0.82476 66
Accuracy: 0.789913 67
Accuracy: 0.754891 68
Accuracy: 0.719789 69
Accuracy: 0.684691 70
Accuracy: 0.649682 71
Accuracy: 0.61485 72
Accuracy: 0.580278 73
Accuracy: 0.546057 74
Accuracy: 0.512267 75
Accuracy: 0.478991 76
Accuracy: 0.446308 77
Accuracy: 0.41429 78
Accuracy: 0.383018 79
Accuracy: 0.352569 80
Accuracy: 0.32301 81
Accuracy: 0.294412 82
Accuracy: 0.266847 83
Accuracy: 0.240381 84
Accuracy: 0.215078 85
Accuracy: 0.191003 86
Accuracy: 0.168219 87
Accuracy: 0.146789 88
Accuracy: 0.126765 89
Accuracy: 0.108206 90
Accuracy: 0.0911623 91
Accuracy: 0.0756858 92
Accuracy: 0.0618248 93
Accuracy: 0.0496274 94
Accuracy: 0.0391368 95
Accuracy: 0.0303955 96
Accuracy: 0.0234425 97
Accuracy: 0.0183162 98
Accuracy: 0.0150518 99
Accuracy: 0.0135378 0
Accuracy: 0.0140632 1
Accuracy: 0.0153402 2
Accuracy: 0.0173452 3
Accuracy: 0.0200535 4
Accuracy: 0.0234391 5
Accuracy: 0.0274738 6
Accuracy: 0.0321171 7
Accuracy: 0.0373357 8
Accuracy: 0.0431048 9
Accuracy: 0.0494026 10
Accuracy: 0.0562066 11
Accuracy: 0.0634935 12
Accuracy: 0.0712424 13
Accuracy: 0.079437 14
Accuracy: 0.0880638 15
Accuracy: 0.097114 16
Accuracy: 0.106575 17
Accuracy: 0.116411 18
Accuracy: 0.126618 19
Accuracy: 0.137179 20
Accuracy: 0.148091 21
Accuracy: 0.159328 22
Accuracy: 0.170913 23
Accuracy: 0.182787 24
Accuracy: 0.194982 25
Accuracy: 0.207489 26
Accuracy: 0.220308 27
Accuracy: 0.233478 28
Accuracy: 0.246969 29
Accuracy: 0.260815 30
Accuracy: 0.274996 31
Accuracy: 0.289221 32
Accuracy: 0.303428 33
Accuracy: 0.317578 34
Accuracy: 0.331628 35
Accuracy: 0.345539 36
Accuracy: 0.359269 37
Accuracy: 0.372775 38
Accuracy: 0.386017 39
Accuracy: 0.398957 40
Accuracy: 0.41155 41
Accuracy: 0.423753 42
Accuracy: 0.435527 43
Accuracy: 0.446833 44
Accuracy: 0.457632 45
Accuracy: 0.467889 46
Accuracy: 0.477561 47
Accuracy: 0.486614 48
Accuracy: 0.495013 49
Accuracy: 0.502721 50
Accuracy: 0.509702 51
Accuracy: 0.515921 52
Accuracy: 0.521356 53
Accuracy: 0.525972 54
Accuracy: 0.529738 55
Accuracy: 0.532635 56
Accuracy: 0.534635 57
Accuracy: 0.535728 58
Accuracy: 0.535894 59
Accuracy: 0.535117 60
Accuracy: 0.533381 61
Accuracy: 0.530677 62
Accuracy: 0.526996 63
Accuracy: 0.522327 64
Accuracy: 0.516663 65
Accuracy: 0.510002 66
Accuracy: 0.502343 67
Accuracy: 0.4937 68
Accuracy: 0.484107 69
Accuracy: 0.473567 70
Accuracy: 0.462108 71
Accuracy: 0.449763 72
Accuracy: 0.436539 73
Accuracy: 0.422472 74
Accuracy: 0.407617 75
Accuracy: 0.392026 76
Accuracy: 0.375738 77
Accuracy: 0.358795 78
Accuracy: 0.34128 79
Accuracy: 0.323267 80
Accuracy: 0.304835 81
Accuracy: 0.286063 82
Accuracy: 0.267038 83
Accuracy: 0.247852 84
Accuracy: 0.228618 85
Accuracy: 0.209435 86
Accuracy: 0.190419 87
Accuracy: 0.171706 88
Accuracy: 0.153432 89
Accuracy: 0.135737 90
Accuracy: 0.118781 91
Accuracy: 0.10273 92
Accuracy: 0.0877537 93
Accuracy: 0.0740367 94
Accuracy: 0.0617751 95
Accuracy: 0.0511752 96
Accuracy: 0.0424457 97
Accuracy: 0.0358068 98
Accuracy: 0.0314941 99
[(1.2224602, 52), (0.53589422, 59)]
Interp bead indices:
[0, 1]
[0, 1, 2, 3]
Accuracy: 0.0375192
Accuracy: 0.0118964
Final bead error: 0.0118964
Accuracy: 0.0134421
Final bead error: 0.0134421
[True, True, True, True, True]
Accuracy: 0.028936 0
Accuracy: 0.0286747 1
Accuracy: 0.0284315 2
Accuracy: 0.0282061 3
Accuracy: 0.0279991 4
Accuracy: 0.0278085 5
Accuracy: 0.0276307 6
Accuracy: 0.0274681 7
Accuracy: 0.0273212 8
Accuracy: 0.0271919 9
Accuracy: 0.0270703 10
Accuracy: 0.0269597 11
Accuracy: 0.0268652 12
Accuracy: 0.0267859 13
Accuracy: 0.026714 14
Accuracy: 0.0266507 15
Accuracy: 0.0266013 16
Accuracy: 0.0265645 17
Accuracy: 0.0265381 18
Accuracy: 0.0265188 19
Accuracy: 0.0265031 20
Accuracy: 0.0264903 21
Accuracy: 0.0264844 22
Accuracy: 0.0264889 23
Accuracy: 0.026501 24
Accuracy: 0.0265184 25
Accuracy: 0.0265373 26
Accuracy: 0.0265591 27
Accuracy: 0.0265836 28
Accuracy: 0.0266109 29
Accuracy: 0.0266429 30
Accuracy: 0.026681 31
Accuracy: 0.0267261 32
Accuracy: 0.0267755 33
Accuracy: 0.0268318 34
Accuracy: 0.0268881 35
Accuracy: 0.0269453 36
Accuracy: 0.0270066 37
Accuracy: 0.0270694 38
Accuracy: 0.0271324 39
Accuracy: 0.027196 40
Accuracy: 0.0272594 41
Accuracy: 0.027321 42
Accuracy: 0.0273794 43
Accuracy: 0.0274342 44
Accuracy: 0.0274854 45
Accuracy: 0.0275317 46
Accuracy: 0.0275728 47
Accuracy: 0.0276082 48
Accuracy: 0.0276369 49
Accuracy: 0.0276584 50
Accuracy: 0.0276724 51
Accuracy: 0.0276776 52
Accuracy: 0.0276739 53
Accuracy: 0.027659 54
Accuracy: 0.0276339 55
Accuracy: 0.0275997 56
Accuracy: 0.0275519 57
Accuracy: 0.0274902 58
Accuracy: 0.0274142 59
Accuracy: 0.0273246 60
Accuracy: 0.0272183 61
Accuracy: 0.0270933 62
Accuracy: 0.0269517 63
Accuracy: 0.0267901 64
Accuracy: 0.0266086 65
Accuracy: 0.0264116 66
Accuracy: 0.0261955 67
Accuracy: 0.0259563 68
Accuracy: 0.0256974 69
Accuracy: 0.0254182 70
Accuracy: 0.0251221 71
Accuracy: 0.0248087 72
Accuracy: 0.0244698 73
Accuracy: 0.0241043 74
Accuracy: 0.0237144 75
Accuracy: 0.0233018 76
Accuracy: 0.0228692 77
Accuracy: 0.0224173 78
Accuracy: 0.021945 79
Accuracy: 0.0214461 80
Accuracy: 0.0209218 81
Accuracy: 0.0203794 82
Accuracy: 0.0198445 83
Accuracy: 0.0193231 84
Accuracy: 0.0188147 85
Accuracy: 0.018319 86
Accuracy: 0.0178354 87
Accuracy: 0.0173635 88
Accuracy: 0.0169029 89
Accuracy: 0.0164533 90
Accuracy: 0.0160141 91
Accuracy: 0.0155851 92
Accuracy: 0.015166 93
Accuracy: 0.0147563 94
Accuracy: 0.0143559 95
Accuracy: 0.0139645 96
Accuracy: 0.0135818 97
Accuracy: 0.0132074 98
Accuracy: 0.0128414 99
Accuracy: 0.0127498 0
Accuracy: 0.0138134 1
Accuracy: 0.0160641 2
Accuracy: 0.0194453 3
Accuracy: 0.0239024 4
Accuracy: 0.029382 5
Accuracy: 0.0358324 6
Accuracy: 0.0432033 7
Accuracy: 0.0514456 8
Accuracy: 0.060512 9
Accuracy: 0.0703563 10
Accuracy: 0.0809337 11
Accuracy: 0.0922009 12
Accuracy: 0.104106 13
Accuracy: 0.116478 14
Accuracy: 0.129167 15
Accuracy: 0.142055 16
Accuracy: 0.155083 17
Accuracy: 0.168151 18
Accuracy: 0.18113 19
Accuracy: 0.193923 20
Accuracy: 0.20643 21
Accuracy: 0.218505 22
Accuracy: 0.230016 23
Accuracy: 0.240799 24
Accuracy: 0.250591 25
Accuracy: 0.259071 26
Accuracy: 0.26628 27
Accuracy: 0.272116 28
Accuracy: 0.276117 29
Accuracy: 0.27936 30
Accuracy: 0.282243 31
Accuracy: 0.284764 32
Accuracy: 0.286921 33
Accuracy: 0.28871 34
Accuracy: 0.290133 35
Accuracy: 0.291189 36
Accuracy: 0.291879 37
Accuracy: 0.292204 38
Accuracy: 0.292168 39
Accuracy: 0.291773 40
Accuracy: 0.291023 41
Accuracy: 0.289923 42
Accuracy: 0.288478 43
Accuracy: 0.286694 44
Accuracy: 0.284576 45
Accuracy: 0.282133 46
Accuracy: 0.279372 47
Accuracy: 0.2763 48
Accuracy: 0.272928 49
Accuracy: 0.269263 50
Accuracy: 0.265315 51
Accuracy: 0.261096 52
Accuracy: 0.256614 53
Accuracy: 0.251882 54
Accuracy: 0.246911 55
Accuracy: 0.241712 56
Accuracy: 0.236298 57
Accuracy: 0.230682 58
Accuracy: 0.224877 59
Accuracy: 0.218895 60
Accuracy: 0.212751 61
Accuracy: 0.206458 62
Accuracy: 0.20003 63
Accuracy: 0.193482 64
Accuracy: 0.186829 65
Accuracy: 0.180085 66
Accuracy: 0.173266 67
Accuracy: 0.166386 68
Accuracy: 0.159461 69
Accuracy: 0.152508 70
Accuracy: 0.145541 71
Accuracy: 0.138576 72
Accuracy: 0.13163 73
Accuracy: 0.124721 74
Accuracy: 0.11788 75
Accuracy: 0.111134 76
Accuracy: 0.104505 77
Accuracy: 0.0980068 78
Accuracy: 0.0916559 79
Accuracy: 0.085458 80
Accuracy: 0.0794262 81
Accuracy: 0.0735683 82
Accuracy: 0.0678973 83
Accuracy: 0.0624326 84
Accuracy: 0.0571905 85
Accuracy: 0.052176 86
Accuracy: 0.0474026 87
Accuracy: 0.0428882 88
Accuracy: 0.0386473 89
Accuracy: 0.0346963 90
Accuracy: 0.0310439 91
Accuracy: 0.0276993 92
Accuracy: 0.0246764 93
Accuracy: 0.0219881 94
Accuracy: 0.0196502 95
Accuracy: 0.0176742 96
Accuracy: 0.0160727 97
Accuracy: 0.0148591 98
Accuracy: 0.0140445 99
Accuracy: 0.0139125 0
Accuracy: 0.013939 1
Accuracy: 0.0140888 2
Accuracy: 0.0143557 3
Accuracy: 0.0147337 4
Accuracy: 0.0152165 5
Accuracy: 0.0157979 6
Accuracy: 0.0164716 7
Accuracy: 0.0172314 8
Accuracy: 0.0180709 9
Accuracy: 0.0189839 10
Accuracy: 0.0199582 11
Accuracy: 0.0209831 12
Accuracy: 0.0220589 13
Accuracy: 0.0231771 14
Accuracy: 0.0243317 15
Accuracy: 0.0255181 16
Accuracy: 0.0267307 17
Accuracy: 0.0279657 18
Accuracy: 0.0292187 19
Accuracy: 0.0304853 20
Accuracy: 0.0317617 21
Accuracy: 0.0330446 22
Accuracy: 0.0343301 23
Accuracy: 0.0356141 24
Accuracy: 0.0368949 25
Accuracy: 0.0381697 26
Accuracy: 0.0394333 27
Accuracy: 0.0406822 28
Accuracy: 0.0419106 29
Accuracy: 0.0431171 30
Accuracy: 0.0443017 31
Accuracy: 0.0454634 32
Accuracy: 0.0466029 33
Accuracy: 0.0477125 34
Accuracy: 0.0487875 35
Accuracy: 0.0498302 36
Accuracy: 0.0508435 37
Accuracy: 0.051831 38
Accuracy: 0.0527866 39
Accuracy: 0.0537095 40
Accuracy: 0.0546064 41
Accuracy: 0.0554606 42
Accuracy: 0.0562814 43
Accuracy: 0.0570567 44
Accuracy: 0.0577873 45
Accuracy: 0.0584773 46
Accuracy: 0.0591233 47
Accuracy: 0.0597238 48
Accuracy: 0.0602845 49
Accuracy: 0.0608103 50
Accuracy: 0.0613049 51
Accuracy: 0.0617678 52
Accuracy: 0.0621894 53
Accuracy: 0.0625243 54
Accuracy: 0.0627697 55
Accuracy: 0.0629231 56
Accuracy: 0.0629842 57
Accuracy: 0.0629511 58
Accuracy: 0.0628236 59
Accuracy: 0.0626004 60
Accuracy: 0.0622809 61
Accuracy: 0.0618655 62
Accuracy: 0.061354 63
Accuracy: 0.0607468 64
Accuracy: 0.0600446 65
Accuracy: 0.0592483 66
Accuracy: 0.0583603 67
Accuracy: 0.057382 68
Accuracy: 0.056316 69
Accuracy: 0.0551649 70
Accuracy: 0.0539318 71
Accuracy: 0.0526196 72
Accuracy: 0.0512328 73
Accuracy: 0.0497756 74
Accuracy: 0.0482527 75
Accuracy: 0.0466685 76
Accuracy: 0.045029 77
Accuracy: 0.0433403 78
Accuracy: 0.0416086 79
Accuracy: 0.0398417 80
Accuracy: 0.0380468 81
Accuracy: 0.036232 82
Accuracy: 0.0344062 83
Accuracy: 0.0325783 84
Accuracy: 0.0307584 85
Accuracy: 0.0289566 86
Accuracy: 0.0271842 87
Accuracy: 0.0254525 88
Accuracy: 0.023774 89
Accuracy: 0.0221614 90
Accuracy: 0.0206296 91
Accuracy: 0.0191917 92
Accuracy: 0.0178635 93
Accuracy: 0.0166612 94
Accuracy: 0.015601 95
Accuracy: 0.0146984 96
Accuracy: 0.0139729 97
Accuracy: 0.0134436 98
Accuracy: 0.0131301 99
Accuracy: 0.0131809 0
Accuracy: 0.0133396 1
Accuracy: 0.0135305 2
Accuracy: 0.0137522 3
Accuracy: 0.0140031 4
Accuracy: 0.0142819 5
Accuracy: 0.0145872 6
Accuracy: 0.0149174 7
Accuracy: 0.0152712 8
Accuracy: 0.015647 9
Accuracy: 0.0160437 10
Accuracy: 0.0164599 11
Accuracy: 0.0168942 12
Accuracy: 0.0173451 13
Accuracy: 0.0178111 14
Accuracy: 0.0182909 15
Accuracy: 0.0187832 16
Accuracy: 0.0192866 17
Accuracy: 0.0197999 18
Accuracy: 0.0203218 19
Accuracy: 0.0208508 20
Accuracy: 0.0213856 21
Accuracy: 0.021925 22
Accuracy: 0.0224676 23
Accuracy: 0.0230123 24
Accuracy: 0.0235579 25
Accuracy: 0.0241034 26
Accuracy: 0.0246472 27
Accuracy: 0.0251882 28
Accuracy: 0.0257254 29
Accuracy: 0.0262578 30
Accuracy: 0.0267843 31
Accuracy: 0.0273038 32
Accuracy: 0.0278157 33
Accuracy: 0.0283188 34
Accuracy: 0.0288117 35
Accuracy: 0.0292935 36
Accuracy: 0.0297631 37
Accuracy: 0.0302196 38
Accuracy: 0.0306626 39
Accuracy: 0.0310915 40
Accuracy: 0.0315052 41
Accuracy: 0.0319028 42
Accuracy: 0.0322836 43
Accuracy: 0.032647 44
Accuracy: 0.0329923 45
Accuracy: 0.0333187 46
Accuracy: 0.033626 47
Accuracy: 0.0339137 48
Accuracy: 0.0341816 49
Accuracy: 0.0344289 50
Accuracy: 0.0346551 51
Accuracy: 0.0348603 52
Accuracy: 0.0350446 53
Accuracy: 0.0352071 54
Accuracy: 0.0353472 55
Accuracy: 0.0354652 56
Accuracy: 0.0355608 57
Accuracy: 0.0356343 58
Accuracy: 0.0356858 59
Accuracy: 0.0357159 60
Accuracy: 0.035724 61
Accuracy: 0.0357109 62
Accuracy: 0.0356765 63
Accuracy: 0.035621 64
Accuracy: 0.0355453 65
Accuracy: 0.0354502 66
Accuracy: 0.0353362 67
Accuracy: 0.0352037 68
Accuracy: 0.0350532 69
Accuracy: 0.0348864 70
Accuracy: 0.0347043 71
Accuracy: 0.0345072 72
Accuracy: 0.0342967 73
Accuracy: 0.0340738 74
Accuracy: 0.0338391 75
Accuracy: 0.0335938 76
Accuracy: 0.0333396 77
Accuracy: 0.0330773 78
Accuracy: 0.0328082 79
Accuracy: 0.0325338 80
Accuracy: 0.0322565 81
Accuracy: 0.031978 82
Accuracy: 0.0317001 83
Accuracy: 0.0314243 84
Accuracy: 0.0311544 85
Accuracy: 0.0308928 86
Accuracy: 0.0306403 87
Accuracy: 0.0304015 88
Accuracy: 0.0301776 89
Accuracy: 0.0299698 90
Accuracy: 0.0297812 91
Accuracy: 0.0296146 92
Accuracy: 0.0294728 93
Accuracy: 0.0293581 94
Accuracy: 0.0292738 95
Accuracy: 0.0292235 96
Accuracy: 0.0292122 97
Accuracy: 0.0292439 98
Accuracy: 0.0293233 99
[(0.02893603, 0), (0.29220423, 38), (0.062984161, 57), (0.035724018, 61)]
Interp bead indices:
[0, 1, 2, 3]
[1, 2, 3, 4, 5, 6]
Accuracy: 0.0146754
Final bead error: 0.0146754
Accuracy: 0.00920634
Final bead error: 0.00920634
Accuracy: 0.010424
Final bead error: 0.010424
[True, True, True, True, True, True, True, True]
Accuracy: 0.0131544 0
Accuracy: 0.0133263 1
Accuracy: 0.0135704 2
Accuracy: 0.0138848 3
Accuracy: 0.0142675 4
Accuracy: 0.0147165 5
Accuracy: 0.01523 6
Accuracy: 0.0158059 7
Accuracy: 0.0164425 8
Accuracy: 0.0171379 9
Accuracy: 0.0178903 10
Accuracy: 0.0186979 11
Accuracy: 0.0195588 12
Accuracy: 0.0204715 13
Accuracy: 0.0214341 14
Accuracy: 0.0224451 15
Accuracy: 0.0235026 16
Accuracy: 0.024605 17
Accuracy: 0.0257509 18
Accuracy: 0.0269384 19
Accuracy: 0.0281662 20
Accuracy: 0.0294327 21
Accuracy: 0.0307362 22
Accuracy: 0.0320754 23
Accuracy: 0.0334488 24
Accuracy: 0.0348548 25
Accuracy: 0.0362922 26
Accuracy: 0.0377594 27
Accuracy: 0.0392495 28
Accuracy: 0.0407521 29
Accuracy: 0.0422667 30
Accuracy: 0.0437874 31
Accuracy: 0.0453105 32
Accuracy: 0.0468304 33
Accuracy: 0.0483451 34
Accuracy: 0.0498507 35
Accuracy: 0.0513437 36
Accuracy: 0.0528102 37
Accuracy: 0.0542428 38
Accuracy: 0.0556349 39
Accuracy: 0.0569766 40
Accuracy: 0.0582591 41
Accuracy: 0.0594776 42
Accuracy: 0.0606291 43
Accuracy: 0.0616991 44
Accuracy: 0.0626719 45
Accuracy: 0.0635059 46
Accuracy: 0.0641655 47
Accuracy: 0.0646595 48
Accuracy: 0.0649589 49
Accuracy: 0.0650117 50
Accuracy: 0.0647993 51
Accuracy: 0.0642944 52
Accuracy: 0.0634049 53
Accuracy: 0.0622773 54
Accuracy: 0.0611309 55
Accuracy: 0.0599686 56
Accuracy: 0.0587916 57
Accuracy: 0.0576011 58
Accuracy: 0.0563985 59
Accuracy: 0.0551849 60
Accuracy: 0.0539616 61
Accuracy: 0.0527299 62
Accuracy: 0.0514911 63
Accuracy: 0.0502467 64
Accuracy: 0.0489979 65
Accuracy: 0.0477463 66
Accuracy: 0.0464932 67
Accuracy: 0.04524 68
Accuracy: 0.0439883 69
Accuracy: 0.0427396 70
Accuracy: 0.0414954 71
Accuracy: 0.0402571 72
Accuracy: 0.0390265 73
Accuracy: 0.0378051 74
Accuracy: 0.0365945 75
Accuracy: 0.0353963 76
Accuracy: 0.0342123 77
Accuracy: 0.033044 78
Accuracy: 0.0318932 79
Accuracy: 0.0307616 80
Accuracy: 0.029651 81
Accuracy: 0.0285631 82
Accuracy: 0.0274997 83
Accuracy: 0.0264626 84
Accuracy: 0.0254536 85
Accuracy: 0.0244747 86
Accuracy: 0.0235276 87
Accuracy: 0.0226142 88
Accuracy: 0.0217365 89
Accuracy: 0.0208964 90
Accuracy: 0.0200958 91
Accuracy: 0.0193367 92
Accuracy: 0.018621 93
Accuracy: 0.0179508 94
Accuracy: 0.0173281 95
Accuracy: 0.0167549 96
Accuracy: 0.0162334 97
Accuracy: 0.0157655 98
Accuracy: 0.0153533 99
Accuracy: 0.0137387 0
Accuracy: 0.0138952 1
Accuracy: 0.0140927 2
Accuracy: 0.0143277 3
Accuracy: 0.0145968 4
Accuracy: 0.0148967 5
Accuracy: 0.0152244 6
Accuracy: 0.0155767 7
Accuracy: 0.0159507 8
Accuracy: 0.0163437 9
Accuracy: 0.0167529 10
Accuracy: 0.0171756 11
Accuracy: 0.0176094 12
Accuracy: 0.018052 13
Accuracy: 0.0185008 14
Accuracy: 0.0189539 15
Accuracy: 0.019409 16
Accuracy: 0.0198641 17
Accuracy: 0.0203173 18
Accuracy: 0.0207668 19
Accuracy: 0.0212109 20
Accuracy: 0.0216478 21
Accuracy: 0.0220761 22
Accuracy: 0.0224942 23
Accuracy: 0.0229008 24
Accuracy: 0.0232945 25
Accuracy: 0.0236742 26
Accuracy: 0.0240386 27
Accuracy: 0.0243867 28
Accuracy: 0.0247176 29
Accuracy: 0.0250303 30
Accuracy: 0.025324 31
Accuracy: 0.0255978 32
Accuracy: 0.0258531 33
Accuracy: 0.0260896 34
Accuracy: 0.0263068 35
Accuracy: 0.0265044 36
Accuracy: 0.0266821 37
Accuracy: 0.026838 38
Accuracy: 0.0269716 39
Accuracy: 0.0270833 40
Accuracy: 0.0271729 41
Accuracy: 0.0272412 42
Accuracy: 0.0272882 43
Accuracy: 0.0273133 44
Accuracy: 0.0273176 45
Accuracy: 0.0273015 46
Accuracy: 0.0272665 47
Accuracy: 0.0272095 48
Accuracy: 0.0271314 49
Accuracy: 0.0270326 50
Accuracy: 0.0269141 51
Accuracy: 0.0267788 52
Accuracy: 0.0266262 53
Accuracy: 0.0264566 54
Accuracy: 0.0262684 55
Accuracy: 0.0260622 56
Accuracy: 0.0258398 57
Accuracy: 0.0256013 58
Accuracy: 0.0253466 59
Accuracy: 0.0250764 60
Accuracy: 0.0247911 61
Accuracy: 0.0244911 62
Accuracy: 0.0241771 63
Accuracy: 0.0238512 64
Accuracy: 0.0235135 65
Accuracy: 0.0231651 66
Accuracy: 0.0228072 67
Accuracy: 0.0224409 68
Accuracy: 0.0220658 69
Accuracy: 0.021683 70
Accuracy: 0.0212933 71
Accuracy: 0.0208984 72
Accuracy: 0.0205003 73
Accuracy: 0.0200995 74
Accuracy: 0.0196973 75
Accuracy: 0.0192956 76
Accuracy: 0.0188943 77
Accuracy: 0.0184941 78
Accuracy: 0.0180966 79
Accuracy: 0.0177028 80
Accuracy: 0.0173143 81
Accuracy: 0.0169322 82
Accuracy: 0.0165577 83
Accuracy: 0.0161917 84
Accuracy: 0.0158363 85
Accuracy: 0.0154928 86
Accuracy: 0.0151618 87
Accuracy: 0.0148443 88
Accuracy: 0.0145416 89
Accuracy: 0.0142555 90
Accuracy: 0.0139872 91
Accuracy: 0.013738 92
Accuracy: 0.0135097 93
Accuracy: 0.0133033 94
Accuracy: 0.0131203 95
Accuracy: 0.0129618 96
Accuracy: 0.0128293 97
Accuracy: 0.012724 98
Accuracy: 0.0126471 99
Accuracy: 0.014121 0
Accuracy: 0.0140783 1
Accuracy: 0.0140532 2
Accuracy: 0.0140449 3
Accuracy: 0.0140523 4
Accuracy: 0.0140745 5
Accuracy: 0.0141107 6
Accuracy: 0.0141599 7
Accuracy: 0.0142211 8
Accuracy: 0.0142935 9
Accuracy: 0.0143761 10
Accuracy: 0.0144682 11
Accuracy: 0.0145688 12
Accuracy: 0.014677 13
Accuracy: 0.0147921 14
Accuracy: 0.0149132 15
Accuracy: 0.0150395 16
Accuracy: 0.0151701 17
Accuracy: 0.0153043 18
Accuracy: 0.0154396 19
Accuracy: 0.0155741 20
Accuracy: 0.0157065 21
Accuracy: 0.0158361 22
Accuracy: 0.0159636 23
Accuracy: 0.0160878 24
Accuracy: 0.0162083 25
Accuracy: 0.016323 26
Accuracy: 0.0164332 27
Accuracy: 0.0165379 28
Accuracy: 0.0166366 29
Accuracy: 0.0167285 30
Accuracy: 0.0168136 31
Accuracy: 0.0168913 32
Accuracy: 0.0169614 33
Accuracy: 0.0170236 34
Accuracy: 0.0170777 35
Accuracy: 0.0171237 36
Accuracy: 0.0171611 37
Accuracy: 0.0171896 38
Accuracy: 0.0172092 39
Accuracy: 0.0172195 40
Accuracy: 0.0172205 41
Accuracy: 0.0172119 42
Accuracy: 0.0171939 43
Accuracy: 0.0171661 44
Accuracy: 0.0171285 45
Accuracy: 0.0170809 46
Accuracy: 0.0170234 47
Accuracy: 0.0169559 48
Accuracy: 0.0168784 49
Accuracy: 0.0167907 50
Accuracy: 0.0166929 51
Accuracy: 0.0165849 52
Accuracy: 0.0164686 53
Accuracy: 0.0163435 54
Accuracy: 0.0162096 55
Accuracy: 0.0160685 56
Accuracy: 0.0159194 57
Accuracy: 0.0157622 58
Accuracy: 0.0155983 59
Accuracy: 0.0154269 60
Accuracy: 0.015248 61
Accuracy: 0.0150626 62
Accuracy: 0.0148708 63
Accuracy: 0.0146734 64
Accuracy: 0.01447 65
Accuracy: 0.014262 66
Accuracy: 0.0140492 67
Accuracy: 0.0138313 68
Accuracy: 0.013609 69
Accuracy: 0.0133838 70
Accuracy: 0.0131546 71
Accuracy: 0.0129217 72
Accuracy: 0.0126878 73
Accuracy: 0.012454 74
Accuracy: 0.0122212 75
Accuracy: 0.0119888 76
Accuracy: 0.0117569 77
Accuracy: 0.0115268 78
Accuracy: 0.0112991 79
Accuracy: 0.0110745 80
Accuracy: 0.0108539 81
Accuracy: 0.0106357 82
Accuracy: 0.0104217 83
Accuracy: 0.0102125 84
Accuracy: 0.0100098 85
Accuracy: 0.00981385 86
Accuracy: 0.00962579 87
Accuracy: 0.00944694 88
Accuracy: 0.00927819 89
Accuracy: 0.00911833 90
Accuracy: 0.00897102 91
Accuracy: 0.00883628 92
Accuracy: 0.00871432 93
Accuracy: 0.0086061 94
Accuracy: 0.00850878 95
Accuracy: 0.00842275 96
Accuracy: 0.00834914 97
Accuracy: 0.00828926 98
Accuracy: 0.00824475 99
Accuracy: 0.00777537 0
Accuracy: 0.0078309 1
Accuracy: 0.00789198 2
Accuracy: 0.00795831 3
Accuracy: 0.00802961 4
Accuracy: 0.0081056 5
Accuracy: 0.00818603 6
Accuracy: 0.00827059 7
Accuracy: 0.00835903 8
Accuracy: 0.00845109 9
Accuracy: 0.00854648 10
Accuracy: 0.00864499 11
Accuracy: 0.00874629 12
Accuracy: 0.00885018 13
Accuracy: 0.00895638 14
Accuracy: 0.00906466 15
Accuracy: 0.00917477 16
Accuracy: 0.00928646 17
Accuracy: 0.00939952 18
Accuracy: 0.0095137 19
Accuracy: 0.00962876 20
Accuracy: 0.00974448 21
Accuracy: 0.00986067 22
Accuracy: 0.00997706 23
Accuracy: 0.0100935 24
Accuracy: 0.0102097 25
Accuracy: 0.0103255 26
Accuracy: 0.0104407 27
Accuracy: 0.0105551 28
Accuracy: 0.0106686 29
Accuracy: 0.0107809 30
Accuracy: 0.0108919 31
Accuracy: 0.0110013 32
Accuracy: 0.0111091 33
Accuracy: 0.0112151 34
Accuracy: 0.0113191 35
Accuracy: 0.0114209 36
Accuracy: 0.0115205 37
Accuracy: 0.0116176 38
Accuracy: 0.0117123 39
Accuracy: 0.0118042 40
Accuracy: 0.0118933 41
Accuracy: 0.0119796 42
Accuracy: 0.0120628 43
Accuracy: 0.012143 44
Accuracy: 0.01222 45
Accuracy: 0.0122937 46
Accuracy: 0.012364 47
Accuracy: 0.0124309 48
Accuracy: 0.0124944 49
Accuracy: 0.0125543 50
Accuracy: 0.0126107 51
Accuracy: 0.0126635 52
Accuracy: 0.0127126 53
Accuracy: 0.0127581 54
Accuracy: 0.0127999 55
Accuracy: 0.0128381 56
Accuracy: 0.0128726 57
Accuracy: 0.0129035 58
Accuracy: 0.0129308 59
Accuracy: 0.0129546 60
Accuracy: 0.0129749 61
Accuracy: 0.0129919 62
Accuracy: 0.0130053 63
Accuracy: 0.0130155 64
Accuracy: 0.0130223 65
Accuracy: 0.0130259 66
Accuracy: 0.0130265 67
Accuracy: 0.0130242 68
Accuracy: 0.013019 69
Accuracy: 0.0130111 70
Accuracy: 0.0130007 71
Accuracy: 0.0129878 72
Accuracy: 0.0129727 73
Accuracy: 0.0129555 74
Accuracy: 0.0129364 75
Accuracy: 0.0129156 76
Accuracy: 0.0128936 77
Accuracy: 0.0128703 78
Accuracy: 0.0128461 79
Accuracy: 0.0128213 80
Accuracy: 0.012796 81
Accuracy: 0.0127707 82
Accuracy: 0.0127456 83
Accuracy: 0.0127211 84
Accuracy: 0.0126973 85
Accuracy: 0.0126747 86
Accuracy: 0.0126537 87
Accuracy: 0.0126345 88
Accuracy: 0.0126176 89
Accuracy: 0.0126037 90
Accuracy: 0.0125929 91
Accuracy: 0.0125854 92
Accuracy: 0.0125818 93
Accuracy: 0.0125826 94
Accuracy: 0.0125881 95
Accuracy: 0.0125986 96
Accuracy: 0.0126148 97
Accuracy: 0.0126372 98
Accuracy: 0.0126664 99
Accuracy: 0.0136726 0
Accuracy: 0.0136621 1
Accuracy: 0.0136557 2
Accuracy: 0.0136531 3
Accuracy: 0.0136543 4
Accuracy: 0.0136593 5
Accuracy: 0.0136673 6
Accuracy: 0.0136783 7
Accuracy: 0.0136921 8
Accuracy: 0.0137084 9
Accuracy: 0.0137269 10
Accuracy: 0.0137475 11
Accuracy: 0.0137699 12
Accuracy: 0.0137941 13
Accuracy: 0.0138199 14
Accuracy: 0.0138469 15
Accuracy: 0.013875 16
Accuracy: 0.013904 17
Accuracy: 0.0139338 18
Accuracy: 0.0139641 19
Accuracy: 0.0139949 20
Accuracy: 0.0140262 21
Accuracy: 0.0140575 22
Accuracy: 0.0140888 23
Accuracy: 0.0141195 24
Accuracy: 0.0141495 25
Accuracy: 0.0141786 26
Accuracy: 0.0142068 27
Accuracy: 0.0142337 28
Accuracy: 0.0142593 29
Accuracy: 0.0142835 30
Accuracy: 0.014306 31
Accuracy: 0.014327 32
Accuracy: 0.0143463 33
Accuracy: 0.0143641 34
Accuracy: 0.01438 35
Accuracy: 0.0143936 36
Accuracy: 0.014405 37
Accuracy: 0.014414 38
Accuracy: 0.0144205 39
Accuracy: 0.0144247 40
Accuracy: 0.0144265 41
Accuracy: 0.0144255 42
Accuracy: 0.0144218 43
Accuracy: 0.0144151 44
Accuracy: 0.0144053 45
Accuracy: 0.0143925 46
Accuracy: 0.0143767 47
Accuracy: 0.0143577 48
Accuracy: 0.0143357 49
Accuracy: 0.0143104 50
Accuracy: 0.0142825 51
Accuracy: 0.0142516 52
Accuracy: 0.0142176 53
Accuracy: 0.0141807 54
Accuracy: 0.0141409 55
Accuracy: 0.0140982 56
Accuracy: 0.0140523 57
Accuracy: 0.0140034 58
Accuracy: 0.013952 59
Accuracy: 0.013898 60
Accuracy: 0.0138408 61
Accuracy: 0.0137808 62
Accuracy: 0.0137179 63
Accuracy: 0.0136528 64
Accuracy: 0.013585 65
Accuracy: 0.0135148 66
Accuracy: 0.0134421 67
Accuracy: 0.0133669 68
Accuracy: 0.0132892 69
Accuracy: 0.013209 70
Accuracy: 0.0131267 71
Accuracy: 0.0130423 72
Accuracy: 0.0129564 73
Accuracy: 0.0128694 74
Accuracy: 0.0127808 75
Accuracy: 0.0126909 76
Accuracy: 0.0126003 77
Accuracy: 0.0125092 78
Accuracy: 0.0124174 79
Accuracy: 0.0123252 80
Accuracy: 0.0122331 81
Accuracy: 0.012141 82
Accuracy: 0.0120492 83
Accuracy: 0.0119582 84
Accuracy: 0.0118683 85
Accuracy: 0.0117805 86
Accuracy: 0.0116943 87
Accuracy: 0.0116101 88
Accuracy: 0.0115278 89
Accuracy: 0.0114478 90
Accuracy: 0.0113712 91
Accuracy: 0.0112984 92
Accuracy: 0.011229 93
Accuracy: 0.0111638 94
Accuracy: 0.0111033 95
Accuracy: 0.0110473 96
Accuracy: 0.0109962 97
Accuracy: 0.0109521 98
Accuracy: 0.0109147 99
Accuracy: 0.0102219 0
Accuracy: 0.0103512 1
Accuracy: 0.0104827 2
Accuracy: 0.0106164 3
Accuracy: 0.0107522 4
Accuracy: 0.01089 5
Accuracy: 0.0110298 6
Accuracy: 0.0111715 7
Accuracy: 0.0113151 8
Accuracy: 0.0114607 9
Accuracy: 0.0116081 10
Accuracy: 0.0117571 11
Accuracy: 0.011908 12
Accuracy: 0.0120604 13
Accuracy: 0.0122144 14
Accuracy: 0.01237 15
Accuracy: 0.0125272 16
Accuracy: 0.0126858 17
Accuracy: 0.012846 18
Accuracy: 0.0130075 19
Accuracy: 0.0131704 20
Accuracy: 0.0133347 21
Accuracy: 0.0135003 22
Accuracy: 0.0136672 23
Accuracy: 0.0138353 24
Accuracy: 0.0140045 25
Accuracy: 0.0141751 26
Accuracy: 0.0143468 27
Accuracy: 0.0145197 28
Accuracy: 0.0146937 29
Accuracy: 0.0148686 30
Accuracy: 0.0150446 31
Accuracy: 0.0152215 32
Accuracy: 0.0153994 33
Accuracy: 0.0155782 34
Accuracy: 0.015758 35
Accuracy: 0.0159387 36
Accuracy: 0.0161202 37
Accuracy: 0.0163026 38
Accuracy: 0.0164857 39
Accuracy: 0.0166696 40
Accuracy: 0.0168543 41
Accuracy: 0.0170397 42
Accuracy: 0.0172259 43
Accuracy: 0.0174129 44
Accuracy: 0.0176005 45
Accuracy: 0.0177889 46
Accuracy: 0.017978 47
Accuracy: 0.0181677 48
Accuracy: 0.0183581 49
Accuracy: 0.0185492 50
Accuracy: 0.0187409 51
Accuracy: 0.0189332 52
Accuracy: 0.0191262 53
Accuracy: 0.0193198 54
Accuracy: 0.0195141 55
Accuracy: 0.0197092 56
Accuracy: 0.0199051 57
Accuracy: 0.0201016 58
Accuracy: 0.0202988 59
Accuracy: 0.0204967 60
Accuracy: 0.0206953 61
Accuracy: 0.0208945 62
Accuracy: 0.0210944 63
Accuracy: 0.021295 64
Accuracy: 0.0214964 65
Accuracy: 0.0216984 66
Accuracy: 0.0219013 67
Accuracy: 0.0221048 68
Accuracy: 0.0223092 69
Accuracy: 0.0225143 70
Accuracy: 0.0227203 71
Accuracy: 0.0229271 72
Accuracy: 0.0231349 73
Accuracy: 0.0233436 74
Accuracy: 0.0235533 75
Accuracy: 0.023764 76
Accuracy: 0.0239756 77
Accuracy: 0.0241884 78
Accuracy: 0.0244024 79
Accuracy: 0.0246174 80
Accuracy: 0.0248334 81
Accuracy: 0.0250507 82
Accuracy: 0.0252692 83
Accuracy: 0.0254889 84
Accuracy: 0.0257099 85
Accuracy: 0.0259322 86
Accuracy: 0.026156 87
Accuracy: 0.0263811 88
Accuracy: 0.0266078 89
Accuracy: 0.0268359 90
Accuracy: 0.0270657 91
Accuracy: 0.0272971 92
Accuracy: 0.0275304 93
Accuracy: 0.0277655 94
Accuracy: 0.0280024 95
Accuracy: 0.0282411 96
Accuracy: 0.0284819 97
Accuracy: 0.0287247 98
Accuracy: 0.0289697 99
[(0.065011717, 50), (0.027317617, 45), (0.017220482, 41), (0.013026549, 67), (0.014426506, 41), (0.028969711, 99)]
Interp bead indices:
[1, 2, 3, 4, 5, 6]
[1, 2]
Accuracy: 0.0125008
Final bead error: 0.0125008
[True, True, True, True, True, True, True, True, True]
Accuracy: 0.0127198 0
Accuracy: 0.0127155 1
Accuracy: 0.0127113 2
Accuracy: 0.0127073 3
Accuracy: 0.0127033 4
Accuracy: 0.0126994 5
Accuracy: 0.0126955 6
Accuracy: 0.0126917 7
Accuracy: 0.0126878 8
Accuracy: 0.012684 9
Accuracy: 0.01268 10
Accuracy: 0.012676 11
Accuracy: 0.0126719 12
Accuracy: 0.0126676 13
Accuracy: 0.0126632 14
Accuracy: 0.0126587 15
Accuracy: 0.012654 16
Accuracy: 0.012649 17
Accuracy: 0.0126439 18
Accuracy: 0.0126385 19
Accuracy: 0.0126328 20
Accuracy: 0.0126269 21
Accuracy: 0.0126207 22
Accuracy: 0.0126141 23
Accuracy: 0.0126073 24
Accuracy: 0.0126001 25
Accuracy: 0.0125925 26
Accuracy: 0.0125846 27
Accuracy: 0.0125762 28
Accuracy: 0.0125675 29
Accuracy: 0.0125583 30
Accuracy: 0.0125487 31
Accuracy: 0.0125387 32
Accuracy: 0.0125282 33
Accuracy: 0.0125172 34
Accuracy: 0.0125058 35
Accuracy: 0.0124938 36
Accuracy: 0.0124814 37
Accuracy: 0.0124684 38
Accuracy: 0.0124549 39
Accuracy: 0.0124409 40
Accuracy: 0.0124263 41
Accuracy: 0.0124111 42
Accuracy: 0.0123954 43
Accuracy: 0.0123791 44
Accuracy: 0.0123623 45
Accuracy: 0.0123448 46
Accuracy: 0.0123268 47
Accuracy: 0.0123081 48
Accuracy: 0.0122889 49
Accuracy: 0.012269 50
Accuracy: 0.0122485 51
Accuracy: 0.0122274 52
Accuracy: 0.0122057 53
Accuracy: 0.0121833 54
Accuracy: 0.0121603 55
Accuracy: 0.0121366 56
Accuracy: 0.0121123 57
Accuracy: 0.0120874 58
Accuracy: 0.0120618 59
Accuracy: 0.0120355 60
Accuracy: 0.0120086 61
Accuracy: 0.0119811 62
Accuracy: 0.0119528 63
Accuracy: 0.011924 64
Accuracy: 0.0118944 65
Accuracy: 0.0118643 66
Accuracy: 0.0118334 67
Accuracy: 0.0118032 68
Accuracy: 0.011777 69
Accuracy: 0.0117536 70
Accuracy: 0.011732 71
Accuracy: 0.0117124 72
Accuracy: 0.0116962 73
Accuracy: 0.0116851 74
Accuracy: 0.0116787 75
Accuracy: 0.0116761 76
Accuracy: 0.0116774 77
Accuracy: 0.0116827 78
Accuracy: 0.0116917 79
Accuracy: 0.0117049 80
Accuracy: 0.0117231 81
Accuracy: 0.0117465 82
Accuracy: 0.011774 83
Accuracy: 0.0118059 84
Accuracy: 0.011842 85
Accuracy: 0.0118823 86
Accuracy: 0.0119288 87
Accuracy: 0.0119804 88
Accuracy: 0.012037 89
Accuracy: 0.0121002 90
Accuracy: 0.01217 91
Accuracy: 0.0122464 92
Accuracy: 0.0123285 93
Accuracy: 0.0124175 94
Accuracy: 0.0125127 95
Accuracy: 0.0126134 96
Accuracy: 0.0127196 97
Accuracy: 0.0128334 98
Accuracy: 0.012954 99
Accuracy: 0.0126421 0
Accuracy: 0.0129922 1
Accuracy: 0.0133729 2
Accuracy: 0.0137797 3
Accuracy: 0.0142096 4
Accuracy: 0.0146592 5
Accuracy: 0.0151245 6
Accuracy: 0.0156021 7
Accuracy: 0.0160885 8
Accuracy: 0.0165784 9
Accuracy: 0.017066 10
Accuracy: 0.0175482 11
Accuracy: 0.0180206 12
Accuracy: 0.0184742 13
Accuracy: 0.0189079 14
Accuracy: 0.0193102 15
Accuracy: 0.0196717 16
Accuracy: 0.0199945 17
Accuracy: 0.0202728 18
Accuracy: 0.0204863 19
Accuracy: 0.0206434 20
Accuracy: 0.0207277 21
Accuracy: 0.020746 22
Accuracy: 0.0207598 23
Accuracy: 0.0207708 24
Accuracy: 0.0207788 25
Accuracy: 0.0207839 26
Accuracy: 0.0207859 27
Accuracy: 0.0207848 28
Accuracy: 0.0207805 29
Accuracy: 0.0207731 30
Accuracy: 0.0207623 31
Accuracy: 0.0207483 32
Accuracy: 0.020731 33
Accuracy: 0.0207103 34
Accuracy: 0.0206862 35
Accuracy: 0.0206587 36
Accuracy: 0.0206278 37
Accuracy: 0.0205935 38
Accuracy: 0.0205557 39
Accuracy: 0.0205145 40
Accuracy: 0.0204698 41
Accuracy: 0.0204217 42
Accuracy: 0.0203702 43
Accuracy: 0.0203153 44
Accuracy: 0.0202569 45
Accuracy: 0.0201952 46
Accuracy: 0.0201301 47
Accuracy: 0.0200618 48
Accuracy: 0.0199901 49
Accuracy: 0.0199152 50
Accuracy: 0.0198372 51
Accuracy: 0.0197559 52
Accuracy: 0.0196716 53
Accuracy: 0.0195842 54
Accuracy: 0.0194939 55
Accuracy: 0.0194006 56
Accuracy: 0.0193045 57
Accuracy: 0.0192057 58
Accuracy: 0.0191041 59
Accuracy: 0.0189999 60
Accuracy: 0.0188932 61
Accuracy: 0.0187841 62
Accuracy: 0.0186726 63
Accuracy: 0.0185589 64
Accuracy: 0.018443 65
Accuracy: 0.0183251 66
Accuracy: 0.0182052 67
Accuracy: 0.0180835 68
Accuracy: 0.0179602 69
Accuracy: 0.0178352 70
Accuracy: 0.0177088 71
Accuracy: 0.017581 72
Accuracy: 0.0174521 73
Accuracy: 0.0173221 74
Accuracy: 0.0171912 75
Accuracy: 0.0170595 76
Accuracy: 0.0169272 77
Accuracy: 0.0167944 78
Accuracy: 0.0166612 79
Accuracy: 0.0165279 80
Accuracy: 0.0163946 81
Accuracy: 0.0162615 82
Accuracy: 0.0161287 83
Accuracy: 0.0159964 84
Accuracy: 0.0158648 85
Accuracy: 0.0157341 86
Accuracy: 0.0156044 87
Accuracy: 0.0154759 88
Accuracy: 0.0153489 89
Accuracy: 0.0152235 90
Accuracy: 0.0150999 91
Accuracy: 0.0149783 92
Accuracy: 0.014859 93
Accuracy: 0.0147421 94
Accuracy: 0.0146278 95
Accuracy: 0.0145165 96
Accuracy: 0.0144082 97
Accuracy: 0.0143032 98
Accuracy: 0.0142017 99
[(0.012954038, 99), (0.020785892, 27)]
Interp bead indices:
[1, 2]
[0, 1]
Accuracy: 0.0482437
Accuracy: 0.0367408
Accuracy: 0.0286126
Accuracy: 0.022156
Accuracy: 0.0169639
Accuracy: 0.0166682
Final bead error: 0.0166682
[True, True, True]
Accuracy: 0.0280968 0
Accuracy: 0.028844 1
Accuracy: 0.0297405 2
Accuracy: 0.0307732 3
Accuracy: 0.0319293 4
Accuracy: 0.0331967 5
Accuracy: 0.0345633 6
Accuracy: 0.0360175 7
Accuracy: 0.0375483 8
Accuracy: 0.0391448 9
Accuracy: 0.0407966 10
Accuracy: 0.0424936 11
Accuracy: 0.0442261 12
Accuracy: 0.0459848 13
Accuracy: 0.0477417 14
Accuracy: 0.0494275 15
Accuracy: 0.0510577 16
Accuracy: 0.0526191 17
Accuracy: 0.0541144 18
Accuracy: 0.0555378 19
Accuracy: 0.0568486 20
Accuracy: 0.0580986 21
Accuracy: 0.0593072 22
Accuracy: 0.0604586 23
Accuracy: 0.0615637 24
Accuracy: 0.0626253 25
Accuracy: 0.0636206 26
Accuracy: 0.0645503 27
Accuracy: 0.0654014 28
Accuracy: 0.0661952 29
Accuracy: 0.0669411 30
Accuracy: 0.0676306 31
Accuracy: 0.0682497 32
Accuracy: 0.0687928 33
Accuracy: 0.0692573 34
Accuracy: 0.0696419 35
Accuracy: 0.0699493 36
Accuracy: 0.0701771 37
Accuracy: 0.0703285 38
Accuracy: 0.0704028 39
Accuracy: 0.0704033 40
Accuracy: 0.0703326 41
Accuracy: 0.0701936 42
Accuracy: 0.0699869 43
Accuracy: 0.0697108 44
Accuracy: 0.0693655 45
Accuracy: 0.0689538 46
Accuracy: 0.0684775 47
Accuracy: 0.0679401 48
Accuracy: 0.0673422 49
Accuracy: 0.0666856 50
Accuracy: 0.0659699 51
Accuracy: 0.0652114 52
Accuracy: 0.0644003 53
Accuracy: 0.0635561 54
Accuracy: 0.0626678 55
Accuracy: 0.0617315 56
Accuracy: 0.0607592 57
Accuracy: 0.0597494 58
Accuracy: 0.0587049 59
Accuracy: 0.0576398 60
Accuracy: 0.0565717 61
Accuracy: 0.0554991 62
Accuracy: 0.0544385 63
Accuracy: 0.0534032 64
Accuracy: 0.0524098 65
Accuracy: 0.0514488 66
Accuracy: 0.050545 67
Accuracy: 0.0497086 68
Accuracy: 0.0489416 69
Accuracy: 0.0482551 70
Accuracy: 0.0476637 71
Accuracy: 0.0471913 72
Accuracy: 0.0468549 73
Accuracy: 0.0466837 74
Accuracy: 0.0466948 75
Accuracy: 0.0469092 76
Accuracy: 0.0473446 77
Accuracy: 0.0480156 78
Accuracy: 0.0489498 79
Accuracy: 0.0501743 80
Accuracy: 0.0512694 81
Accuracy: 0.0510074 82
Accuracy: 0.0498297 83
Accuracy: 0.0480183 84
Accuracy: 0.0453246 85
Accuracy: 0.0419889 86
Accuracy: 0.0383431 87
Accuracy: 0.0345218 88
Accuracy: 0.030756 89
Accuracy: 0.0272045 90
Accuracy: 0.0239605 91
Accuracy: 0.0211293 92
Accuracy: 0.0187845 93
Accuracy: 0.0170152 94
Accuracy: 0.0157674 95
Accuracy: 0.0148696 96
Accuracy: 0.014271 97
Accuracy: 0.0139642 98
Accuracy: 0.0139389 99
Accuracy: 0.0158617 0
Accuracy: 0.0181162 1
Accuracy: 0.0227436 2
Accuracy: 0.0297573 3
Accuracy: 0.0391656 4
Accuracy: 0.0509707 5
Accuracy: 0.0651694 6
Accuracy: 0.0817544 7
Accuracy: 0.100714 8
Accuracy: 0.12203 9
Accuracy: 0.145684 10
Accuracy: 0.171645 11
Accuracy: 0.199879 12
Accuracy: 0.230352 13
Accuracy: 0.263025 14
Accuracy: 0.297848 15
Accuracy: 0.334774 16
Accuracy: 0.373746 17
Accuracy: 0.414705 18
Accuracy: 0.457586 19
Accuracy: 0.502321 20
Accuracy: 0.548759 21
Accuracy: 0.596278 22
Accuracy: 0.644753 23
Accuracy: 0.694117 24
Accuracy: 0.744031 25
Accuracy: 0.794272 26
Accuracy: 0.844718 27
Accuracy: 0.895574 28
Accuracy: 0.946779 29
Accuracy: 0.99819 30
Accuracy: 1.04966 31
Accuracy: 1.10104 32
Accuracy: 1.15218 33
Accuracy: 1.20294 34
Accuracy: 1.25314 35
Accuracy: 1.30266 36
Accuracy: 1.35132 37
Accuracy: 1.39898 38
Accuracy: 1.44548 39
Accuracy: 1.49067 40
Accuracy: 1.5344 41
Accuracy: 1.57654 42
Accuracy: 1.61691 43
Accuracy: 1.65539 44
Accuracy: 1.69182 45
Accuracy: 1.72606 46
Accuracy: 1.75799 47
Accuracy: 1.78746 48
Accuracy: 1.81436 49
Accuracy: 1.83856 50
Accuracy: 1.85994 51
Accuracy: 1.8784 52
Accuracy: 1.89384 53
Accuracy: 1.90615 54
Accuracy: 1.91525 55
Accuracy: 1.92106 56
Accuracy: 1.9235 57
Accuracy: 1.92251 58
Accuracy: 1.91804 59
Accuracy: 1.91004 60
Accuracy: 1.89848 61
Accuracy: 1.88335 62
Accuracy: 1.86461 63
Accuracy: 1.84229 64
Accuracy: 1.81638 65
Accuracy: 1.78691 66
Accuracy: 1.75392 67
Accuracy: 1.71747 68
Accuracy: 1.67762 69
Accuracy: 1.63446 70
Accuracy: 1.58809 71
Accuracy: 1.53862 72
Accuracy: 1.48619 73
Accuracy: 1.43095 74
Accuracy: 1.37308 75
Accuracy: 1.31276 76
Accuracy: 1.25021 77
Accuracy: 1.18567 78
Accuracy: 1.11938 79
Accuracy: 1.05163 80
Accuracy: 0.982722 81
Accuracy: 0.913162 82
Accuracy: 0.843564 83
Accuracy: 0.774233 84
Accuracy: 0.705571 85
Accuracy: 0.637864 86
Accuracy: 0.57145 87
Accuracy: 0.506788 88
Accuracy: 0.444232 89
Accuracy: 0.384188 90
Accuracy: 0.327083 91
Accuracy: 0.273397 92
Accuracy: 0.223613 93
Accuracy: 0.178282 94
Accuracy: 0.137961 95
Accuracy: 0.103171 96
Accuracy: 0.0745247 97
Accuracy: 0.0526102 98
Accuracy: 0.0380634 99
[(0.070403285, 40), (1.9234983, 57)]
Interp bead indices:
[0, 1]
[0, 1, 2, 3]
Accuracy: 0.0132413
Final bead error: 0.0132413
Accuracy: 0.0330774
Accuracy: 0.0244054
Accuracy: 0.0177034
Accuracy: 0.0154029
Final bead error: 0.0154029
[True, True, True, True, True]
Accuracy: 0.0311707 0
Accuracy: 0.0311778 1
Accuracy: 0.0311871 2
Accuracy: 0.0311986 3
Accuracy: 0.0312121 4
Accuracy: 0.0312276 5
Accuracy: 0.0312451 6
Accuracy: 0.0312643 7
Accuracy: 0.0312853 8
Accuracy: 0.031308 9
Accuracy: 0.0313323 10
Accuracy: 0.0313582 11
Accuracy: 0.0313855 12
Accuracy: 0.0314143 13
Accuracy: 0.0314445 14
Accuracy: 0.0314761 15
Accuracy: 0.0315089 16
Accuracy: 0.0315429 17
Accuracy: 0.0315782 18
Accuracy: 0.0316146 19
Accuracy: 0.0316522 20
Accuracy: 0.0316909 21
Accuracy: 0.0317306 22
Accuracy: 0.0317713 23
Accuracy: 0.0318131 24
Accuracy: 0.0318559 25
Accuracy: 0.0318996 26
Accuracy: 0.0319443 27
Accuracy: 0.0319899 28
Accuracy: 0.0320364 29
Accuracy: 0.0320822 30
Accuracy: 0.0321021 31
Accuracy: 0.0321013 32
Accuracy: 0.0320907 33
Accuracy: 0.0320519 34
Accuracy: 0.0319889 35
Accuracy: 0.0318991 36
Accuracy: 0.0317988 37
Accuracy: 0.0316749 38
Accuracy: 0.0315202 39
Accuracy: 0.031333 40
Accuracy: 0.0311223 41
Accuracy: 0.0308994 42
Accuracy: 0.0306621 43
Accuracy: 0.0304105 44
Accuracy: 0.0301515 45
Accuracy: 0.0298776 46
Accuracy: 0.0295997 47
Accuracy: 0.0293279 48
Accuracy: 0.0290473 49
Accuracy: 0.0287586 50
Accuracy: 0.0284447 51
Accuracy: 0.0281214 52
Accuracy: 0.0277964 53
Accuracy: 0.0274594 54
Accuracy: 0.0271297 55
Accuracy: 0.0268031 56
Accuracy: 0.0264776 57
Accuracy: 0.0261419 58
Accuracy: 0.0257945 59
Accuracy: 0.0254488 60
Accuracy: 0.0250987 61
Accuracy: 0.0247413 62
Accuracy: 0.0243863 63
Accuracy: 0.0240326 64
Accuracy: 0.0236735 65
Accuracy: 0.0233122 66
Accuracy: 0.0229482 67
Accuracy: 0.022592 68
Accuracy: 0.0222356 69
Accuracy: 0.0218831 70
Accuracy: 0.0215375 71
Accuracy: 0.0211968 72
Accuracy: 0.0208596 73
Accuracy: 0.0205153 74
Accuracy: 0.0201697 75
Accuracy: 0.0198176 76
Accuracy: 0.0194726 77
Accuracy: 0.0191343 78
Accuracy: 0.0188036 79
Accuracy: 0.0184764 80
Accuracy: 0.0181533 81
Accuracy: 0.0178377 82
Accuracy: 0.01753 83
Accuracy: 0.0172276 84
Accuracy: 0.0169294 85
Accuracy: 0.0166355 86
Accuracy: 0.016345 87
Accuracy: 0.0160643 88
Accuracy: 0.0157897 89
Accuracy: 0.0155222 90
Accuracy: 0.0152568 91
Accuracy: 0.0149935 92
Accuracy: 0.0147319 93
Accuracy: 0.0144762 94
Accuracy: 0.0142232 95
Accuracy: 0.0139735 96
Accuracy: 0.0137266 97
Accuracy: 0.0134872 98
Accuracy: 0.0132553 99
Accuracy: 0.0129494 0
Accuracy: 0.0130881 1
Accuracy: 0.0132627 2
Accuracy: 0.0134724 3
Accuracy: 0.0137165 4
Accuracy: 0.0139932 5
Accuracy: 0.014303 6
Accuracy: 0.0146403 7
Accuracy: 0.0150014 8
Accuracy: 0.0153865 9
Accuracy: 0.0157958 10
Accuracy: 0.0162243 11
Accuracy: 0.0166706 12
Accuracy: 0.0171316 13
Accuracy: 0.0176056 14
Accuracy: 0.0180918 15
Accuracy: 0.0185895 16
Accuracy: 0.0190959 17
Accuracy: 0.0196096 18
Accuracy: 0.0201295 19
Accuracy: 0.020654 20
Accuracy: 0.021182 21
Accuracy: 0.0217122 22
Accuracy: 0.0222438 23
Accuracy: 0.0227749 24
Accuracy: 0.0233021 25
Accuracy: 0.0238242 26
Accuracy: 0.0243415 27
Accuracy: 0.0248554 28
Accuracy: 0.0253671 29
Accuracy: 0.025876 30
Accuracy: 0.0263817 31
Accuracy: 0.0268818 32
Accuracy: 0.0273761 33
Accuracy: 0.0278634 34
Accuracy: 0.028345 35
Accuracy: 0.0288189 36
Accuracy: 0.0292859 37
Accuracy: 0.0297532 38
Accuracy: 0.0302198 39
Accuracy: 0.0306843 40
Accuracy: 0.0311493 41
Accuracy: 0.0316153 42
Accuracy: 0.0320786 43
Accuracy: 0.0325427 44
Accuracy: 0.0330136 45
Accuracy: 0.0335026 46
Accuracy: 0.034003 47
Accuracy: 0.0345161 48
Accuracy: 0.0350443 49
Accuracy: 0.0355868 50
Accuracy: 0.0361488 51
Accuracy: 0.0367348 52
Accuracy: 0.0373446 53
Accuracy: 0.0379858 54
Accuracy: 0.0386651 55
Accuracy: 0.0393777 56
Accuracy: 0.0401334 57
Accuracy: 0.0409395 58
Accuracy: 0.0417962 59
Accuracy: 0.0427082 60
Accuracy: 0.0436788 61
Accuracy: 0.0447134 62
Accuracy: 0.0458185 63
Accuracy: 0.0469955 64
Accuracy: 0.048247 65
Accuracy: 0.0495803 66
Accuracy: 0.050953 67
Accuracy: 0.0519837 68
Accuracy: 0.0524297 69
Accuracy: 0.0523659 70
Accuracy: 0.0518886 71
Accuracy: 0.050988 72
Accuracy: 0.0497108 73
Accuracy: 0.0482378 74
Accuracy: 0.04656 75
Accuracy: 0.0446857 76
Accuracy: 0.042639 77
Accuracy: 0.0404938 78
Accuracy: 0.0382701 79
Accuracy: 0.0360083 80
Accuracy: 0.0337577 81
Accuracy: 0.0315517 82
Accuracy: 0.029413 83
Accuracy: 0.0273745 84
Accuracy: 0.0254532 85
Accuracy: 0.0236716 86
Accuracy: 0.0220473 87
Accuracy: 0.0205902 88
Accuracy: 0.019318 89
Accuracy: 0.0182454 90
Accuracy: 0.017364 91
Accuracy: 0.016624 92
Accuracy: 0.0159955 93
Accuracy: 0.0154772 94
Accuracy: 0.0150685 95
Accuracy: 0.0147683 96
Accuracy: 0.014575 97
Accuracy: 0.0144853 98
Accuracy: 0.0144974 99
Accuracy: 0.015518 0
Accuracy: 0.0160568 1
Accuracy: 0.0169062 2
Accuracy: 0.0180724 3
Accuracy: 0.0195599 4
Accuracy: 0.0213714 5
Accuracy: 0.0235104 6
Accuracy: 0.0259794 7
Accuracy: 0.0287816 8
Accuracy: 0.0319175 9
Accuracy: 0.0353872 10
Accuracy: 0.0391932 11
Accuracy: 0.043337 12
Accuracy: 0.047818 13
Accuracy: 0.0526368 14
Accuracy: 0.0577935 15
Accuracy: 0.0632889 16
Accuracy: 0.0691231 17
Accuracy: 0.0752849 18
Accuracy: 0.0814811 19
Accuracy: 0.0876903 20
Accuracy: 0.0939351 21
Accuracy: 0.100219 22
Accuracy: 0.106589 23
Accuracy: 0.113052 24
Accuracy: 0.119596 25
Accuracy: 0.126209 26
Accuracy: 0.132871 27
Accuracy: 0.139561 28
Accuracy: 0.14627 29
Accuracy: 0.152984 30
Accuracy: 0.159686 31
Accuracy: 0.166357 32
Accuracy: 0.172977 33
Accuracy: 0.179528 34
Accuracy: 0.185991 35
Accuracy: 0.192348 36
Accuracy: 0.198577 37
Accuracy: 0.204662 38
Accuracy: 0.210582 39
Accuracy: 0.216319 40
Accuracy: 0.221854 41
Accuracy: 0.227169 42
Accuracy: 0.232245 43
Accuracy: 0.237065 44
Accuracy: 0.241612 45
Accuracy: 0.245867 46
Accuracy: 0.249814 47
Accuracy: 0.253437 48
Accuracy: 0.256722 49
Accuracy: 0.259652 50
Accuracy: 0.262214 51
Accuracy: 0.264394 52
Accuracy: 0.26618 53
Accuracy: 0.26756 54
Accuracy: 0.268524 55
Accuracy: 0.269062 56
Accuracy: 0.269163 57
Accuracy: 0.268823 58
Accuracy: 0.268033 59
Accuracy: 0.266788 60
Accuracy: 0.265084 61
Accuracy: 0.262918 62
Accuracy: 0.260289 63
Accuracy: 0.257198 64
Accuracy: 0.253646 65
Accuracy: 0.249638 66
Accuracy: 0.245179 67
Accuracy: 0.240276 68
Accuracy: 0.234937 69
Accuracy: 0.229174 70
Accuracy: 0.222998 71
Accuracy: 0.216426 72
Accuracy: 0.209474 73
Accuracy: 0.202163 74
Accuracy: 0.194514 75
Accuracy: 0.186552 76
Accuracy: 0.178303 77
Accuracy: 0.169796 78
Accuracy: 0.161064 79
Accuracy: 0.152141 80
Accuracy: 0.143065 81
Accuracy: 0.133877 82
Accuracy: 0.124619 83
Accuracy: 0.115338 84
Accuracy: 0.106084 85
Accuracy: 0.0969099 86
Accuracy: 0.0878724 87
Accuracy: 0.0790311 88
Accuracy: 0.0704497 89
Accuracy: 0.0621954 90
Accuracy: 0.0543386 91
Accuracy: 0.0469546 92
Accuracy: 0.0401219 93
Accuracy: 0.0339232 94
Accuracy: 0.0284455 95
Accuracy: 0.0237798 96
Accuracy: 0.0200216 97
Accuracy: 0.0172708 98
Accuracy: 0.0156318 99
Accuracy: 0.0152345 0
Accuracy: 0.0158829 1
Accuracy: 0.0167198 2
Accuracy: 0.0177345 3
Accuracy: 0.0189163 4
Accuracy: 0.0202547 5
Accuracy: 0.0217394 6
Accuracy: 0.0233602 7
Accuracy: 0.025107 8
Accuracy: 0.0269701 9
Accuracy: 0.0289396 10
Accuracy: 0.0310059 11
Accuracy: 0.0331598 12
Accuracy: 0.035392 13
Accuracy: 0.0376933 14
Accuracy: 0.0400551 15
Accuracy: 0.0424684 16
Accuracy: 0.0449248 17
Accuracy: 0.047416 18
Accuracy: 0.0499338 19
Accuracy: 0.0524702 20
Accuracy: 0.0550174 21
Accuracy: 0.057568 22
Accuracy: 0.0601143 23
Accuracy: 0.0626494 24
Accuracy: 0.0651662 25
Accuracy: 0.0676577 26
Accuracy: 0.0701177 27
Accuracy: 0.0725396 28
Accuracy: 0.0749174 29
Accuracy: 0.0772449 30
Accuracy: 0.0795167 31
Accuracy: 0.0817271 32
Accuracy: 0.0838709 33
Accuracy: 0.0859432 34
Accuracy: 0.087939 35
Accuracy: 0.0898537 36
Accuracy: 0.0916832 37
Accuracy: 0.0934233 38
Accuracy: 0.09507 39
Accuracy: 0.09662 40
Accuracy: 0.0980698 41
Accuracy: 0.0994162 42
Accuracy: 0.100657 43
Accuracy: 0.101788 44
Accuracy: 0.102809 45
Accuracy: 0.103717 46
Accuracy: 0.10451 47
Accuracy: 0.105186 48
Accuracy: 0.105745 49
Accuracy: 0.106186 50
Accuracy: 0.106507 51
Accuracy: 0.106709 52
Accuracy: 0.106791 53
Accuracy: 0.106754 54
Accuracy: 0.106598 55
Accuracy: 0.106323 56
Accuracy: 0.105928 57
Accuracy: 0.105415 58
Accuracy: 0.104785 59
Accuracy: 0.104038 60
Accuracy: 0.103178 61
Accuracy: 0.102207 62
Accuracy: 0.101125 63
Accuracy: 0.0999368 64
Accuracy: 0.0986445 65
Accuracy: 0.0972513 66
Accuracy: 0.0957605 67
Accuracy: 0.0941755 68
Accuracy: 0.0925009 69
Accuracy: 0.0907408 70
Accuracy: 0.0888995 71
Accuracy: 0.0869821 72
Accuracy: 0.0849939 73
Accuracy: 0.0829402 74
Accuracy: 0.0808267 75
Accuracy: 0.0786596 76
Accuracy: 0.0764449 77
Accuracy: 0.0741884 78
Accuracy: 0.0718974 79
Accuracy: 0.0695797 80
Accuracy: 0.0672434 81
Accuracy: 0.0648933 82
Accuracy: 0.0625362 83
Accuracy: 0.0601828 84
Accuracy: 0.0578417 85
Accuracy: 0.0555208 86
Accuracy: 0.0532268 87
Accuracy: 0.0509688 88
Accuracy: 0.0487554 89
Accuracy: 0.046594 90
Accuracy: 0.0444992 91
Accuracy: 0.042491 92
Accuracy: 0.0405847 93
Accuracy: 0.0387785 94
Accuracy: 0.0370847 95
Accuracy: 0.0355141 96
Accuracy: 0.0340909 97
Accuracy: 0.0328226 98
Accuracy: 0.0318411 99
[(0.032102142, 31), (0.052429724, 69), (0.26916346, 57), (0.10679145, 53)]
Interp bead indices:
[0, 1, 2, 3]
[1, 2, 3, 4, 5, 6]
Accuracy: 0.0122218
Final bead error: 0.0122218
Accuracy: 0.0142399
Final bead error: 0.0142399
Accuracy: 0.0332163
Accuracy: 0.0167764
Accuracy: 0.0148501
Final bead error: 0.0148501
[True, True, True, True, True, True, True, True]
Accuracy: 0.0136479 0
Accuracy: 0.0136881 1
Accuracy: 0.0137406 2
Accuracy: 0.013805 3
Accuracy: 0.0138794 4
Accuracy: 0.0139648 5
Accuracy: 0.0140625 6
Accuracy: 0.0141688 7
Accuracy: 0.0142853 8
Accuracy: 0.0144125 9
Accuracy: 0.014547 10
Accuracy: 0.0146876 11
Accuracy: 0.0148352 12
Accuracy: 0.0149899 13
Accuracy: 0.0151516 14
Accuracy: 0.0153195 15
Accuracy: 0.0154927 16
Accuracy: 0.0156713 17
Accuracy: 0.0158551 18
Accuracy: 0.0160432 19
Accuracy: 0.0162353 20
Accuracy: 0.0164311 21
Accuracy: 0.0166298 22
Accuracy: 0.0168314 23
Accuracy: 0.0170354 24
Accuracy: 0.0172413 25
Accuracy: 0.017449 26
Accuracy: 0.0176587 27
Accuracy: 0.0178698 28
Accuracy: 0.0180822 29
Accuracy: 0.0182957 30
Accuracy: 0.0185094 31
Accuracy: 0.0187239 32
Accuracy: 0.0189391 33
Accuracy: 0.0191537 34
Accuracy: 0.0193684 35
Accuracy: 0.0195832 36
Accuracy: 0.0197989 37
Accuracy: 0.0200147 38
Accuracy: 0.0202284 39
Accuracy: 0.0204406 40
Accuracy: 0.0206528 41
Accuracy: 0.0208651 42
Accuracy: 0.0210773 43
Accuracy: 0.02129 44
Accuracy: 0.0215019 45
Accuracy: 0.021717 46
Accuracy: 0.0219354 47
Accuracy: 0.0221521 48
Accuracy: 0.0223689 49
Accuracy: 0.0225849 50
Accuracy: 0.0228001 51
Accuracy: 0.0230179 52
Accuracy: 0.0232374 53
Accuracy: 0.0234583 54
Accuracy: 0.0236789 55
Accuracy: 0.0238986 56
Accuracy: 0.0241238 57
Accuracy: 0.0243533 58
Accuracy: 0.0245861 59
Accuracy: 0.024822 60
Accuracy: 0.0250602 61
Accuracy: 0.0253044 62
Accuracy: 0.025553 63
Accuracy: 0.0258028 64
Accuracy: 0.0260547 65
Accuracy: 0.0263126 66
Accuracy: 0.0265763 67
Accuracy: 0.0268476 68
Accuracy: 0.0271259 69
Accuracy: 0.0274104 70
Accuracy: 0.0277011 71
Accuracy: 0.0280018 72
Accuracy: 0.0283123 73
Accuracy: 0.0286346 74
Accuracy: 0.0289704 75
Accuracy: 0.0293231 76
Accuracy: 0.0296916 77
Accuracy: 0.0300744 78
Accuracy: 0.0304691 79
Accuracy: 0.0308803 80
Accuracy: 0.0313144 81
Accuracy: 0.0317 82
Accuracy: 0.0317539 83
Accuracy: 0.0314942 84
Accuracy: 0.0309851 85
Accuracy: 0.0302896 86
Accuracy: 0.0293618 87
Accuracy: 0.028284 88
Accuracy: 0.0271255 89
Accuracy: 0.0258671 90
Accuracy: 0.0245367 91
Accuracy: 0.0231717 92
Accuracy: 0.0217773 93
Accuracy: 0.0203686 94
Accuracy: 0.0189703 95
Accuracy: 0.0176029 96
Accuracy: 0.016284 97
Accuracy: 0.0150313 98
Accuracy: 0.01386 99
Accuracy: 0.0125521 0
Accuracy: 0.0124631 1
Accuracy: 0.0123748 2
Accuracy: 0.0122874 3
Accuracy: 0.0122011 4
Accuracy: 0.012116 5
Accuracy: 0.0120317 6
Accuracy: 0.011948 7
Accuracy: 0.0118665 8
Accuracy: 0.0117863 9
Accuracy: 0.0117072 10
Accuracy: 0.0116298 11
Accuracy: 0.0115544 12
Accuracy: 0.0114812 13
Accuracy: 0.011412 14
Accuracy: 0.0113447 15
Accuracy: 0.0112779 16
Accuracy: 0.0112144 17
Accuracy: 0.0111525 18
Accuracy: 0.0110925 19
Accuracy: 0.0110351 20
Accuracy: 0.0109809 21
Accuracy: 0.0109291 22
Accuracy: 0.0108804 23
Accuracy: 0.0108346 24
Accuracy: 0.0107912 25
Accuracy: 0.01075 26
Accuracy: 0.010713 27
Accuracy: 0.0106814 28
Accuracy: 0.0106542 29
Accuracy: 0.0106297 30
Accuracy: 0.0106111 31
Accuracy: 0.0105973 32
Accuracy: 0.0105894 33
Accuracy: 0.0105852 34
Accuracy: 0.0105853 35
Accuracy: 0.0105899 36
Accuracy: 0.0105971 37
Accuracy: 0.0106073 38
Accuracy: 0.0106234 39
Accuracy: 0.0106431 40
Accuracy: 0.0106671 41
Accuracy: 0.0106945 42
Accuracy: 0.0107248 43
Accuracy: 0.0107593 44
Accuracy: 0.0107968 45
Accuracy: 0.0108387 46
Accuracy: 0.0108858 47
Accuracy: 0.0109402 48
Accuracy: 0.0109982 49
Accuracy: 0.0110592 50
Accuracy: 0.0111234 51
Accuracy: 0.011193 52
Accuracy: 0.011268 53
Accuracy: 0.0113526 54
Accuracy: 0.0114424 55
Accuracy: 0.0115324 56
Accuracy: 0.0116222 57
Accuracy: 0.0117119 58
Accuracy: 0.0118013 59
Accuracy: 0.0118905 60
Accuracy: 0.0119795 61
Accuracy: 0.0120683 62
Accuracy: 0.0121569 63
Accuracy: 0.0122453 64
Accuracy: 0.0123334 65
Accuracy: 0.0124214 66
Accuracy: 0.0125091 67
Accuracy: 0.0125967 68
Accuracy: 0.0126841 69
Accuracy: 0.0127714 70
Accuracy: 0.0128586 71
Accuracy: 0.0129457 72
Accuracy: 0.0130327 73
Accuracy: 0.0131196 74
Accuracy: 0.0132064 75
Accuracy: 0.0132931 76
Accuracy: 0.0133797 77
Accuracy: 0.0134664 78
Accuracy: 0.013553 79
Accuracy: 0.0136396 80
Accuracy: 0.0137262 81
Accuracy: 0.0138128 82
Accuracy: 0.0138994 83
Accuracy: 0.0139862 84
Accuracy: 0.0140733 85
Accuracy: 0.0141605 86
Accuracy: 0.0142479 87
Accuracy: 0.0143355 88
Accuracy: 0.0144238 89
Accuracy: 0.0145125 90
Accuracy: 0.0146012 91
Accuracy: 0.0146898 92
Accuracy: 0.0147783 93
Accuracy: 0.0148668 94
Accuracy: 0.0149552 95
Accuracy: 0.0150436 96
Accuracy: 0.0151319 97
Accuracy: 0.0152201 98
Accuracy: 0.0153082 99
Accuracy: 0.0145191 0
Accuracy: 0.0146791 1
Accuracy: 0.014854 2
Accuracy: 0.015045 3
Accuracy: 0.0152537 4
Accuracy: 0.0154797 5
Accuracy: 0.0157228 6
Accuracy: 0.0159829 7
Accuracy: 0.0162605 8
Accuracy: 0.0165565 9
Accuracy: 0.0168711 10
Accuracy: 0.0172047 11
Accuracy: 0.0175576 12
Accuracy: 0.0179302 13
Accuracy: 0.0183221 14
Accuracy: 0.018734 15
Accuracy: 0.0191659 16
Accuracy: 0.0196184 17
Accuracy: 0.0200919 18
Accuracy: 0.0205868 19
Accuracy: 0.0211035 20
Accuracy: 0.0216422 21
Accuracy: 0.0222038 22
Accuracy: 0.0227882 23
Accuracy: 0.0233955 24
Accuracy: 0.0239559 25
Accuracy: 0.024408 26
Accuracy: 0.0247719 27
Accuracy: 0.025149 28
Accuracy: 0.0255305 29
Accuracy: 0.0259064 30
Accuracy: 0.0262754 31
Accuracy: 0.0266366 32
Accuracy: 0.0269889 33
Accuracy: 0.0273313 34
Accuracy: 0.0276629 35
Accuracy: 0.0279828 36
Accuracy: 0.0282902 37
Accuracy: 0.0285843 38
Accuracy: 0.0288644 39
Accuracy: 0.0291295 40
Accuracy: 0.029379 41
Accuracy: 0.0296118 42
Accuracy: 0.0298274 43
Accuracy: 0.0300249 44
Accuracy: 0.0302036 45
Accuracy: 0.0303629 46
Accuracy: 0.030502 47
Accuracy: 0.0306202 48
Accuracy: 0.030717 49
Accuracy: 0.0307918 50
Accuracy: 0.0308442 51
Accuracy: 0.0308736 52
Accuracy: 0.0308796 53
Accuracy: 0.0308618 54
Accuracy: 0.03082 55
Accuracy: 0.0307537 56
Accuracy: 0.0306629 57
Accuracy: 0.0305473 58
Accuracy: 0.0304068 59
Accuracy: 0.0302414 60
Accuracy: 0.030051 61
Accuracy: 0.0298357 62
Accuracy: 0.0295958 63
Accuracy: 0.0293314 64
Accuracy: 0.0290428 65
Accuracy: 0.0287305 66
Accuracy: 0.0283948 67
Accuracy: 0.0280364 68
Accuracy: 0.0276559 69
Accuracy: 0.0272539 70
Accuracy: 0.0268315 71
Accuracy: 0.0263894 72
Accuracy: 0.0259287 73
Accuracy: 0.0254505 74
Accuracy: 0.0249562 75
Accuracy: 0.024447 76
Accuracy: 0.0239244 77
Accuracy: 0.0233901 78
Accuracy: 0.0228456 79
Accuracy: 0.022293 80
Accuracy: 0.0217342 81
Accuracy: 0.0211712 82
Accuracy: 0.0206063 83
Accuracy: 0.0200419 84
Accuracy: 0.0194806 85
Accuracy: 0.0189249 86
Accuracy: 0.0183779 87
Accuracy: 0.0178423 88
Accuracy: 0.0173215 89
Accuracy: 0.0168186 90
Accuracy: 0.0163372 91
Accuracy: 0.015881 92
Accuracy: 0.0154537 93
Accuracy: 0.0150594 94
Accuracy: 0.0147022 95
Accuracy: 0.0143864 96
Accuracy: 0.0141167 97
Accuracy: 0.0138978 98
Accuracy: 0.0137346 99
Accuracy: 0.0141445 0
Accuracy: 0.0141175 1
Accuracy: 0.0141433 2
Accuracy: 0.0142194 3
Accuracy: 0.0143431 4
Accuracy: 0.0145119 5
Accuracy: 0.0147232 6
Accuracy: 0.0149744 7
Accuracy: 0.0152631 8
Accuracy: 0.0155867 9
Accuracy: 0.0159428 10
Accuracy: 0.016329 11
Accuracy: 0.0167428 12
Accuracy: 0.0171819 13
Accuracy: 0.0176439 14
Accuracy: 0.0181266 15
Accuracy: 0.0186276 16
Accuracy: 0.0191447 17
Accuracy: 0.0196757 18
Accuracy: 0.0202184 19
Accuracy: 0.0207707 20
Accuracy: 0.0213306 21
Accuracy: 0.0218958 22
Accuracy: 0.0224644 23
Accuracy: 0.0230345 24
Accuracy: 0.0236042 25
Accuracy: 0.0241714 26
Accuracy: 0.0247345 27
Accuracy: 0.0252915 28
Accuracy: 0.0258408 29
Accuracy: 0.0263806 30
Accuracy: 0.0269093 31
Accuracy: 0.0274254 32
Accuracy: 0.0279273 33
Accuracy: 0.0284134 34
Accuracy: 0.0288825 35
Accuracy: 0.0293331 36
Accuracy: 0.0297639 37
Accuracy: 0.0301736 38
Accuracy: 0.0305612 39
Accuracy: 0.0309253 40
Accuracy: 0.031265 41
Accuracy: 0.0315792 42
Accuracy: 0.0318672 43
Accuracy: 0.0321278 44
Accuracy: 0.0323605 45
Accuracy: 0.0325644 46
Accuracy: 0.0327389 47
Accuracy: 0.0328834 48
Accuracy: 0.0329974 49
Accuracy: 0.0330806 50
Accuracy: 0.0331324 51
Accuracy: 0.0331527 52
Accuracy: 0.0331412 53
Accuracy: 0.0330979 54
Accuracy: 0.0330227 55
Accuracy: 0.0329156 56
Accuracy: 0.0327768 57
Accuracy: 0.0326064 58
Accuracy: 0.0324049 59
Accuracy: 0.0321726 60
Accuracy: 0.03191 61
Accuracy: 0.0316177 62
Accuracy: 0.0312963 63
Accuracy: 0.0309466 64
Accuracy: 0.0305695 65
Accuracy: 0.0301659 66
Accuracy: 0.029737 67
Accuracy: 0.0292837 68
Accuracy: 0.0288075 69
Accuracy: 0.0283098 70
Accuracy: 0.0277918 71
Accuracy: 0.0272554 72
Accuracy: 0.026702 73
Accuracy: 0.0261336 74
Accuracy: 0.025552 75
Accuracy: 0.0249593 76
Accuracy: 0.0243577 77
Accuracy: 0.0237492 78
Accuracy: 0.0231363 79
Accuracy: 0.0225216 80
Accuracy: 0.0219077 81
Accuracy: 0.0212972 82
Accuracy: 0.020693 83
Accuracy: 0.0200981 84
Accuracy: 0.0195156 85
Accuracy: 0.0189486 86
Accuracy: 0.0184007 87
Accuracy: 0.0178752 88
Accuracy: 0.0173759 89
Accuracy: 0.0169064 90
Accuracy: 0.0164706 91
Accuracy: 0.0160727 92
Accuracy: 0.0157167 93
Accuracy: 0.0154069 94
Accuracy: 0.0151479 95
Accuracy: 0.0149441 96
Accuracy: 0.0148003 97
Accuracy: 0.0147214 98
Accuracy: 0.0147125 99
Accuracy: 0.0144933 0
Accuracy: 0.0147898 1
Accuracy: 0.0151562 2
Accuracy: 0.0155882 3
Accuracy: 0.0160816 4
Accuracy: 0.0166323 5
Accuracy: 0.0172362 6
Accuracy: 0.0178894 7
Accuracy: 0.018588 8
Accuracy: 0.0193281 9
Accuracy: 0.020106 10
Accuracy: 0.0209179 11
Accuracy: 0.0217602 12
Accuracy: 0.0226294 13
Accuracy: 0.0235219 14
Accuracy: 0.0244344 15
Accuracy: 0.0253634 16
Accuracy: 0.0263058 17
Accuracy: 0.0272584 18
Accuracy: 0.0282179 19
Accuracy: 0.0291815 20
Accuracy: 0.0301461 21
Accuracy: 0.0311088 22
Accuracy: 0.0320669 23
Accuracy: 0.0330176 24
Accuracy: 0.0339582 25
Accuracy: 0.0348863 26
Accuracy: 0.0357993 27
Accuracy: 0.0366949 28
Accuracy: 0.0375706 29
Accuracy: 0.0384244 30
Accuracy: 0.0392541 31
Accuracy: 0.0400575 32
Accuracy: 0.0408328 33
Accuracy: 0.0415782 34
Accuracy: 0.0422916 35
Accuracy: 0.0429715 36
Accuracy: 0.0436164 37
Accuracy: 0.0442245 38
Accuracy: 0.0447946 39
Accuracy: 0.0453254 40
Accuracy: 0.0458155 41
Accuracy: 0.0462637 42
Accuracy: 0.0466692 43
Accuracy: 0.047031 44
Accuracy: 0.0473481 45
Accuracy: 0.0476198 46
Accuracy: 0.0478455 47
Accuracy: 0.0480247 48
Accuracy: 0.0481568 49
Accuracy: 0.0482415 50
Accuracy: 0.0482785 51
Accuracy: 0.0482678 52
Accuracy: 0.0482094 53
Accuracy: 0.048103 54
Accuracy: 0.0479492 55
Accuracy: 0.047748 56
Accuracy: 0.0474999 57
Accuracy: 0.0472054 58
Accuracy: 0.046865 59
Accuracy: 0.0464795 60
Accuracy: 0.0460497 61
Accuracy: 0.0455766 62
Accuracy: 0.0450611 63
Accuracy: 0.0445045 64
Accuracy: 0.0439081 65
Accuracy: 0.0432732 66
Accuracy: 0.0426013 67
Accuracy: 0.041894 68
Accuracy: 0.0411532 69
Accuracy: 0.0403806 70
Accuracy: 0.0395774 71
Accuracy: 0.038745 72
Accuracy: 0.0378855 73
Accuracy: 0.037001 74
Accuracy: 0.0360936 75
Accuracy: 0.0351655 76
Accuracy: 0.034219 77
Accuracy: 0.0332562 78
Accuracy: 0.0322805 79
Accuracy: 0.0312943 80
Accuracy: 0.0302993 81
Accuracy: 0.029298 82
Accuracy: 0.0282934 83
Accuracy: 0.0272891 84
Accuracy: 0.0262877 85
Accuracy: 0.025292 86
Accuracy: 0.0243055 87
Accuracy: 0.0233314 88
Accuracy: 0.0223733 89
Accuracy: 0.021435 90
Accuracy: 0.0205207 91
Accuracy: 0.0196346 92
Accuracy: 0.0187808 93
Accuracy: 0.0179646 94
Accuracy: 0.0171945 95
Accuracy: 0.0164768 96
Accuracy: 0.0158208 97
Accuracy: 0.0152392 98
Accuracy: 0.0147391 99
Accuracy: 0.014176 0
Accuracy: 0.0142865 1
Accuracy: 0.0143977 2
Accuracy: 0.0145096 3
Accuracy: 0.0146222 4
Accuracy: 0.0147356 5
Accuracy: 0.0148496 6
Accuracy: 0.0149644 7
Accuracy: 0.0150799 8
Accuracy: 0.0151961 9
Accuracy: 0.015313 10
Accuracy: 0.0154307 11
Accuracy: 0.015549 12
Accuracy: 0.0156681 13
Accuracy: 0.0157879 14
Accuracy: 0.0159084 15
Accuracy: 0.0160297 16
Accuracy: 0.0161517 17
Accuracy: 0.0162744 18
Accuracy: 0.0163979 19
Accuracy: 0.016522 20
Accuracy: 0.0166469 21
Accuracy: 0.0167726 22
Accuracy: 0.016899 23
Accuracy: 0.0170261 24
Accuracy: 0.017154 25
Accuracy: 0.0172826 26
Accuracy: 0.0174119 27
Accuracy: 0.017542 28
Accuracy: 0.0176729 29
Accuracy: 0.0178045 30
Accuracy: 0.0179369 31
Accuracy: 0.01807 32
Accuracy: 0.0182038 33
Accuracy: 0.0183385 34
Accuracy: 0.0184739 35
Accuracy: 0.01861 36
Accuracy: 0.018747 37
Accuracy: 0.0188847 38
Accuracy: 0.0190232 39
Accuracy: 0.0191624 40
Accuracy: 0.0193025 41
Accuracy: 0.0194433 42
Accuracy: 0.0195849 43
Accuracy: 0.0197273 44
Accuracy: 0.0198704 45
Accuracy: 0.0200144 46
Accuracy: 0.0201592 47
Accuracy: 0.0203048 48
Accuracy: 0.0204511 49
Accuracy: 0.0205983 50
Accuracy: 0.0207463 51
Accuracy: 0.0208951 52
Accuracy: 0.0210447 53
Accuracy: 0.0211952 54
Accuracy: 0.0213465 55
Accuracy: 0.0214985 56
Accuracy: 0.0216515 57
Accuracy: 0.0218052 58
Accuracy: 0.0219598 59
Accuracy: 0.0221153 60
Accuracy: 0.0222715 61
Accuracy: 0.0224287 62
Accuracy: 0.0225867 63
Accuracy: 0.0227455 64
Accuracy: 0.0229052 65
Accuracy: 0.0230658 66
Accuracy: 0.0232272 67
Accuracy: 0.0233895 68
Accuracy: 0.0235535 69
Accuracy: 0.023719 70
Accuracy: 0.0238869 71
Accuracy: 0.0240571 72
Accuracy: 0.0242296 73
Accuracy: 0.0244038 74
Accuracy: 0.0245791 75
Accuracy: 0.0247561 76
Accuracy: 0.0249346 77
Accuracy: 0.0251152 78
Accuracy: 0.0252985 79
Accuracy: 0.0254828 80
Accuracy: 0.0256687 81
Accuracy: 0.025857 82
Accuracy: 0.0260465 83
Accuracy: 0.0262383 84
Accuracy: 0.0264316 85
Accuracy: 0.0266263 86
Accuracy: 0.0268221 87
Accuracy: 0.0270191 88
Accuracy: 0.0272175 89
Accuracy: 0.0274176 90
Accuracy: 0.0276215 91
Accuracy: 0.0278284 92
Accuracy: 0.0280369 93
Accuracy: 0.0282466 94
Accuracy: 0.0284579 95
Accuracy: 0.0286707 96
Accuracy: 0.0288851 97
Accuracy: 0.0291017 98
Accuracy: 0.0293195 99
[(0.03175395, 83), (0.015308196, 99), (0.030879561, 53), (0.033152718, 52), (0.048278533, 51), (0.029319521, 99)]
Interp bead indices:
[1, 2, 3, 4, 5, 6]
[4, 5, 6, 7]
Accuracy: 0.0108723
Final bead error: 0.0108723
Accuracy: 0.0198618
Accuracy: 0.0154325
Accuracy: 0.0143733
Final bead error: 0.0143733
[True, True, True, True, True, True, True, True, True, True]
Accuracy: 0.0137014 0
Accuracy: 0.0136473 1
Accuracy: 0.0135972 2
Accuracy: 0.013551 3
Accuracy: 0.0135085 4
Accuracy: 0.0134695 5
Accuracy: 0.0134338 6
Accuracy: 0.0134012 7
Accuracy: 0.0133716 8
Accuracy: 0.0133447 9
Accuracy: 0.0133204 10
Accuracy: 0.0132986 11
Accuracy: 0.013279 12
Accuracy: 0.0132615 13
Accuracy: 0.013246 14
Accuracy: 0.0132323 15
Accuracy: 0.0132201 16
Accuracy: 0.0132094 17
Accuracy: 0.0132001 18
Accuracy: 0.0131919 19
Accuracy: 0.0131848 20
Accuracy: 0.0131786 21
Accuracy: 0.0131731 22
Accuracy: 0.0131682 23
Accuracy: 0.0131639 24
Accuracy: 0.0131599 25
Accuracy: 0.0131562 26
Accuracy: 0.0131527 27
Accuracy: 0.0131491 28
Accuracy: 0.0131455 29
Accuracy: 0.0131416 30
Accuracy: 0.0131375 31
Accuracy: 0.0131329 32
Accuracy: 0.0131278 33
Accuracy: 0.0131222 34
Accuracy: 0.0131159 35
Accuracy: 0.0131089 36
Accuracy: 0.013101 37
Accuracy: 0.0130922 38
Accuracy: 0.0130824 39
Accuracy: 0.0130715 40
Accuracy: 0.0130596 41
Accuracy: 0.0130465 42
Accuracy: 0.0130322 43
Accuracy: 0.0130166 44
Accuracy: 0.0129997 45
Accuracy: 0.0129814 46
Accuracy: 0.0129617 47
Accuracy: 0.0129407 48
Accuracy: 0.0129181 49
Accuracy: 0.0128942 50
Accuracy: 0.0128687 51
Accuracy: 0.0128418 52
Accuracy: 0.0128134 53
Accuracy: 0.0127834 54
Accuracy: 0.012752 55
Accuracy: 0.0127191 56
Accuracy: 0.0126847 57
Accuracy: 0.0126489 58
Accuracy: 0.0126117 59
Accuracy: 0.012573 60
Accuracy: 0.012533 61
Accuracy: 0.0124916 62
Accuracy: 0.012449 63
Accuracy: 0.0124052 64
Accuracy: 0.0123603 65
Accuracy: 0.0123142 66
Accuracy: 0.0122671 67
Accuracy: 0.012219 68
Accuracy: 0.0121701 69
Accuracy: 0.0121204 70
Accuracy: 0.01207 71
Accuracy: 0.012019 72
Accuracy: 0.0119676 73
Accuracy: 0.0119158 74
Accuracy: 0.0118637 75
Accuracy: 0.0118115 76
Accuracy: 0.0117593 77
Accuracy: 0.0117072 78
Accuracy: 0.0116554 79
Accuracy: 0.0116039 80
Accuracy: 0.0115531 81
Accuracy: 0.011503 82
Accuracy: 0.0114539 83
Accuracy: 0.0114058 84
Accuracy: 0.0113589 85
Accuracy: 0.0113135 86
Accuracy: 0.0112698 87
Accuracy: 0.011228 88
Accuracy: 0.0111882 89
Accuracy: 0.0111506 90
Accuracy: 0.0111156 91
Accuracy: 0.0110833 92
Accuracy: 0.0110539 93
Accuracy: 0.0110278 94
Accuracy: 0.0110051 95
Accuracy: 0.0109862 96
Accuracy: 0.0109713 97
Accuracy: 0.0109606 98
Accuracy: 0.0109545 99
Accuracy: 0.0111349 0
Accuracy: 0.011131 1
Accuracy: 0.0111315 2
Accuracy: 0.0111362 3
Accuracy: 0.0111449 4
Accuracy: 0.0111574 5
Accuracy: 0.0111735 6
Accuracy: 0.0111929 7
Accuracy: 0.0112156 8
Accuracy: 0.0112412 9
Accuracy: 0.0112696 10
Accuracy: 0.0113006 11
Accuracy: 0.011334 12
Accuracy: 0.0113697 13
Accuracy: 0.0114075 14
Accuracy: 0.0114471 15
Accuracy: 0.0114885 16
Accuracy: 0.0115314 17
Accuracy: 0.0115758 18
Accuracy: 0.0116214 19
Accuracy: 0.0116681 20
Accuracy: 0.0117157 21
Accuracy: 0.0117642 22
Accuracy: 0.0118134 23
Accuracy: 0.0118632 24
Accuracy: 0.0119134 25
Accuracy: 0.0119638 26
Accuracy: 0.0120145 27
Accuracy: 0.0120652 28
Accuracy: 0.0121159 29
Accuracy: 0.0121665 30
Accuracy: 0.0122169 31
Accuracy: 0.0122669 32
Accuracy: 0.0123165 33
Accuracy: 0.0123655 34
Accuracy: 0.012414 35
Accuracy: 0.0124618 36
Accuracy: 0.0125088 37
Accuracy: 0.012555 38
Accuracy: 0.0126003 39
Accuracy: 0.0126447 40
Accuracy: 0.0126881 41
Accuracy: 0.0127305 42
Accuracy: 0.0127717 43
Accuracy: 0.0128118 44
Accuracy: 0.0128508 45
Accuracy: 0.0128886 46
Accuracy: 0.0129251 47
Accuracy: 0.0129605 48
Accuracy: 0.0129946 49
Accuracy: 0.0130274 50
Accuracy: 0.013059 51
Accuracy: 0.0130894 52
Accuracy: 0.0131185 53
Accuracy: 0.0131464 54
Accuracy: 0.013173 55
Accuracy: 0.0131985 56
Accuracy: 0.0132228 57
Accuracy: 0.013246 58
Accuracy: 0.0132681 59
Accuracy: 0.0132892 60
Accuracy: 0.0133094 61
Accuracy: 0.0133286 62
Accuracy: 0.0133469 63
Accuracy: 0.0133645 64
Accuracy: 0.0133813 65
Accuracy: 0.0133975 66
Accuracy: 0.0134132 67
Accuracy: 0.0134284 68
Accuracy: 0.0134432 69
Accuracy: 0.0134578 70
Accuracy: 0.0134722 71
Accuracy: 0.0134866 72
Accuracy: 0.0135011 73
Accuracy: 0.0135158 74
Accuracy: 0.0135308 75
Accuracy: 0.0135464 76
Accuracy: 0.0135626 77
Accuracy: 0.0135796 78
Accuracy: 0.0135976 79
Accuracy: 0.0136167 80
Accuracy: 0.013637 81
Accuracy: 0.0136589 82
Accuracy: 0.0136824 83
Accuracy: 0.0137077 84
Accuracy: 0.0137351 85
Accuracy: 0.0137647 86
Accuracy: 0.0137968 87
Accuracy: 0.0138316 88
Accuracy: 0.0138693 89
Accuracy: 0.0139101 90
Accuracy: 0.0139543 91
Accuracy: 0.0140022 92
Accuracy: 0.014054 93
Accuracy: 0.0141099 94
Accuracy: 0.0141702 95
Accuracy: 0.0142352 96
Accuracy: 0.0143051 97
Accuracy: 0.0143803 98
Accuracy: 0.014461 99
Accuracy: 0.0147008 0
Accuracy: 0.0147764 1
Accuracy: 0.0148611 2
Accuracy: 0.0149541 3
Accuracy: 0.0150549 4
Accuracy: 0.0151629 5
Accuracy: 0.0152775 6
Accuracy: 0.0153982 7
Accuracy: 0.0155245 8
Accuracy: 0.0156558 9
Accuracy: 0.0157916 10
Accuracy: 0.0159313 11
Accuracy: 0.0160745 12
Accuracy: 0.0162207 13
Accuracy: 0.0163694 14
Accuracy: 0.0165201 15
Accuracy: 0.0166723 16
Accuracy: 0.0168256 17
Accuracy: 0.0169796 18
Accuracy: 0.0171338 19
Accuracy: 0.0172878 20
Accuracy: 0.0174412 21
Accuracy: 0.0175937 22
Accuracy: 0.0177447 23
Accuracy: 0.017894 24
Accuracy: 0.0180412 25
Accuracy: 0.0181858 26
Accuracy: 0.0183277 27
Accuracy: 0.0184665 28
Accuracy: 0.0186018 29
Accuracy: 0.0187333 30
Accuracy: 0.0188608 31
Accuracy: 0.0189839 32
Accuracy: 0.0191025 33
Accuracy: 0.0192161 34
Accuracy: 0.0193247 35
Accuracy: 0.019428 36
Accuracy: 0.0195257 37
Accuracy: 0.0196176 38
Accuracy: 0.0197035 39
Accuracy: 0.0197834 40
Accuracy: 0.0198569 41
Accuracy: 0.0199239 42
Accuracy: 0.0199844 43
Accuracy: 0.020038 44
Accuracy: 0.0200849 45
Accuracy: 0.0201248 46
Accuracy: 0.0201576 47
Accuracy: 0.0201833 48
Accuracy: 0.0202017 49
Accuracy: 0.020213 50
Accuracy: 0.0202169 51
Accuracy: 0.0202136 52
Accuracy: 0.0202028 53
Accuracy: 0.0201848 54
Accuracy: 0.0201595 55
Accuracy: 0.020127 56
Accuracy: 0.0200872 57
Accuracy: 0.0200402 58
Accuracy: 0.0199861 59
Accuracy: 0.019925 60
Accuracy: 0.0198571 61
Accuracy: 0.0197823 62
Accuracy: 0.0197009 63
Accuracy: 0.019613 64
Accuracy: 0.0195188 65
Accuracy: 0.0194183 66
Accuracy: 0.019312 67
Accuracy: 0.0191998 68
Accuracy: 0.019082 69
Accuracy: 0.018959 70
Accuracy: 0.0188309 71
Accuracy: 0.018698 72
Accuracy: 0.0185606 73
Accuracy: 0.0184189 74
Accuracy: 0.0182734 75
Accuracy: 0.0181243 76
Accuracy: 0.017972 77
Accuracy: 0.0178168 78
Accuracy: 0.0176591 79
Accuracy: 0.0174993 80
Accuracy: 0.0173378 81
Accuracy: 0.0171751 82
Accuracy: 0.0170115 83
Accuracy: 0.0168476 84
Accuracy: 0.0166838 85
Accuracy: 0.0165206 86
Accuracy: 0.0163585 87
Accuracy: 0.016198 88
Accuracy: 0.0160398 89
Accuracy: 0.0158843 90
Accuracy: 0.015732 91
Accuracy: 0.0155838 92
Accuracy: 0.01544 93
Accuracy: 0.0153014 94
Accuracy: 0.0151686 95
Accuracy: 0.0150422 96
Accuracy: 0.014923 97
Accuracy: 0.0148116 98
Accuracy: 0.0147088 99
Accuracy: 0.0147082 0
Accuracy: 0.0148162 1
Accuracy: 0.014925 2
Accuracy: 0.0150342 3
Accuracy: 0.0151439 4
Accuracy: 0.0152539 5
Accuracy: 0.015364 6
Accuracy: 0.0154741 7
Accuracy: 0.015584 8
Accuracy: 0.0156937 9
Accuracy: 0.015803 10
Accuracy: 0.0159118 11
Accuracy: 0.01602 12
Accuracy: 0.0161274 13
Accuracy: 0.0162339 14
Accuracy: 0.0163395 15
Accuracy: 0.0164439 16
Accuracy: 0.0165472 17
Accuracy: 0.0166492 18
Accuracy: 0.0167498 19
Accuracy: 0.0168489 20
Accuracy: 0.0169464 21
Accuracy: 0.0170422 22
Accuracy: 0.0171363 23
Accuracy: 0.0172285 24
Accuracy: 0.0173188 25
Accuracy: 0.017407 26
Accuracy: 0.0174932 27
Accuracy: 0.0175773 28
Accuracy: 0.0176591 29
Accuracy: 0.0177386 30
Accuracy: 0.0178157 31
Accuracy: 0.0178904 32
Accuracy: 0.0179627 33
Accuracy: 0.0180324 34
Accuracy: 0.0180996 35
Accuracy: 0.0181641 36
Accuracy: 0.0182259 37
Accuracy: 0.0182851 38
Accuracy: 0.0183411 39
Accuracy: 0.0183939 40
Accuracy: 0.0184435 41
Accuracy: 0.0184897 42
Accuracy: 0.0185323 43
Accuracy: 0.0185713 44
Accuracy: 0.0186067 45
Accuracy: 0.0186385 46
Accuracy: 0.0186667 47
Accuracy: 0.0186912 48
Accuracy: 0.0187118 49
Accuracy: 0.0187287 50
Accuracy: 0.0187412 51
Accuracy: 0.0187495 52
Accuracy: 0.0187539 53
Accuracy: 0.0187542 54
Accuracy: 0.0187502 55
Accuracy: 0.0187423 56
Accuracy: 0.0187302 57
Accuracy: 0.0187133 58
Accuracy: 0.0186919 59
Accuracy: 0.0186657 60
Accuracy: 0.0186351 61
Accuracy: 0.0185999 62
Accuracy: 0.0185599 63
Accuracy: 0.0185153 64
Accuracy: 0.0184662 65
Accuracy: 0.0184125 66
Accuracy: 0.0183542 67
Accuracy: 0.0182913 68
Accuracy: 0.0182239 69
Accuracy: 0.018152 70
Accuracy: 0.0180759 71
Accuracy: 0.0179949 72
Accuracy: 0.0179092 73
Accuracy: 0.017819 74
Accuracy: 0.017724 75
Accuracy: 0.0176247 76
Accuracy: 0.0175208 77
Accuracy: 0.0174128 78
Accuracy: 0.0173006 79
Accuracy: 0.0171845 80
Accuracy: 0.0170647 81
Accuracy: 0.0169412 82
Accuracy: 0.0168144 83
Accuracy: 0.0166845 84
Accuracy: 0.016552 85
Accuracy: 0.0164174 86
Accuracy: 0.0162811 87
Accuracy: 0.0161437 88
Accuracy: 0.0160054 89
Accuracy: 0.0158666 90
Accuracy: 0.0157279 91
Accuracy: 0.0155898 92
Accuracy: 0.0154533 93
Accuracy: 0.0153204 94
Accuracy: 0.0151921 95
Accuracy: 0.0150679 96
Accuracy: 0.0149512 97
Accuracy: 0.0148471 98
Accuracy: 0.0147585 99
[(0.013701362, 0), (0.014461015, 99), (0.020216927, 51), (0.018754244, 54)]
Interp bead indices:
[4, 5, 6, 7]
1
2
Thresh: 0.03
Comps: 1
***
15.718850784
1.82471144944
In [60]:
for b in xrange(len(tests[0].AllBeads)-1):
e = InterpBeadError(tests[0].AllBeads[b][0],tests[0].AllBeads[b][1], tests[0].AllBeads[b+1][0], tests[0].AllBeads[b+1][1])
Accuracy: 0.0328678 0 0.377708 0.318923
Accuracy: 0.0325319 1 0.37981 0.31657
Accuracy: 0.0322168 2 0.381912 0.314217
Accuracy: 0.031929 3 0.384015 0.311864
Accuracy: 0.0316662 4 0.386117 0.309511
Accuracy: 0.0314231 5 0.388219 0.307158
Accuracy: 0.0311933 6 0.390321 0.304805
Accuracy: 0.0309831 7 0.392424 0.302452
Accuracy: 0.0307966 8 0.394526 0.300099
Accuracy: 0.0306282 9 0.396628 0.297746
Accuracy: 0.0304846 10 0.39873 0.295393
Accuracy: 0.0303562 11 0.400833 0.29304
Accuracy: 0.0302359 12 0.402935 0.290687
Accuracy: 0.0301275 13 0.405037 0.288334
Accuracy: 0.0300331 14 0.40714 0.285981
Accuracy: 0.0299499 15 0.409242 0.283628
Accuracy: 0.0298774 16 0.411344 0.281275
Accuracy: 0.0298219 17 0.413446 0.278922
Accuracy: 0.029779 18 0.415549 0.276569
Accuracy: 0.0297442 19 0.417651 0.274216
Accuracy: 0.02972 20 0.419753 0.271863
Accuracy: 0.0297074 21 0.421855 0.26951
Accuracy: 0.0297017 22 0.423958 0.267157
Accuracy: 0.0297025 23 0.42606 0.264804
Accuracy: 0.0297108 24 0.428162 0.262451
Accuracy: 0.0297232 25 0.430265 0.260098
Accuracy: 0.0297418 26 0.432367 0.257745
Accuracy: 0.0297656 27 0.434469 0.255392
Accuracy: 0.029795 28 0.436571 0.253039
Accuracy: 0.0298324 29 0.438674 0.250687
Accuracy: 0.0298715 30 0.440776 0.248334
Accuracy: 0.029913 31 0.442878 0.245981
Accuracy: 0.0299554 32 0.44498 0.243628
Accuracy: 0.0300007 33 0.447083 0.241275
Accuracy: 0.0300486 34 0.449185 0.238922
Accuracy: 0.0300976 35 0.451287 0.236569
Accuracy: 0.0301494 36 0.453389 0.234216
Accuracy: 0.0302014 37 0.455492 0.231863
Accuracy: 0.0302536 38 0.457594 0.22951
Accuracy: 0.0303037 39 0.459696 0.227157
Accuracy: 0.0303517 40 0.461799 0.224804
Accuracy: 0.0303992 41 0.463901 0.222451
Accuracy: 0.0304442 42 0.466003 0.220098
Accuracy: 0.0304856 43 0.468105 0.217745
Accuracy: 0.030524 44 0.470208 0.215392
Accuracy: 0.0305579 45 0.47231 0.213039
Accuracy: 0.0305861 46 0.474412 0.210686
Accuracy: 0.0306082 47 0.476514 0.208333
Accuracy: 0.0306235 48 0.478617 0.20598
Accuracy: 0.0306315 49 0.480719 0.203627
Accuracy: 0.0306314 50 0.482821 0.201274
Accuracy: 0.0306228 51 0.484923 0.198921
Accuracy: 0.0306057 52 0.487026 0.196568
Accuracy: 0.0305783 53 0.489128 0.194215
Accuracy: 0.0305406 54 0.49123 0.191862
Accuracy: 0.0304929 55 0.493333 0.189509
Accuracy: 0.0304336 56 0.495435 0.187156
Accuracy: 0.0303611 57 0.497537 0.184803
Accuracy: 0.0302744 58 0.499639 0.18245
Accuracy: 0.0301725 59 0.501742 0.180097
Accuracy: 0.0300567 60 0.503844 0.177744
Accuracy: 0.0299231 61 0.505946 0.175391
Accuracy: 0.0297764 62 0.508048 0.173038
Accuracy: 0.0296125 63 0.510151 0.170685
Accuracy: 0.0294295 64 0.512253 0.168332
Accuracy: 0.0292267 65 0.514355 0.165979
Accuracy: 0.0290034 66 0.516457 0.163626
Accuracy: 0.0287642 67 0.51856 0.161273
Accuracy: 0.0285002 68 0.520662 0.158921
Accuracy: 0.0282177 69 0.522764 0.156568
Accuracy: 0.0279199 70 0.524867 0.154215
Accuracy: 0.0276005 71 0.526969 0.151862
Accuracy: 0.0272625 72 0.529071 0.149509
Accuracy: 0.0269066 73 0.531173 0.147156
Accuracy: 0.0265272 74 0.533276 0.144803
Accuracy: 0.0261155 75 0.535378 0.14245
Accuracy: 0.0256846 76 0.53748 0.140097
Accuracy: 0.0252309 77 0.539582 0.137744
Accuracy: 0.024755 78 0.541685 0.135391
Accuracy: 0.0242558 79 0.543787 0.133038
Accuracy: 0.023735 80 0.545889 0.130685
Accuracy: 0.0231935 81 0.547992 0.128332
Accuracy: 0.0226301 82 0.550094 0.125979
Accuracy: 0.0220699 83 0.552196 0.123626
Accuracy: 0.0215223 84 0.554298 0.121273
Accuracy: 0.0209869 85 0.556401 0.11892
Accuracy: 0.0204633 86 0.558503 0.116567
Accuracy: 0.0199511 87 0.560605 0.114214
Accuracy: 0.0194499 88 0.562707 0.111861
Accuracy: 0.0189593 89 0.56481 0.109508
Accuracy: 0.0184789 90 0.566912 0.107155
Accuracy: 0.0180084 91 0.569014 0.104802
Accuracy: 0.0175475 92 0.571116 0.102449
Accuracy: 0.017096 93 0.573219 0.100096
Accuracy: 0.0166534 94 0.575321 0.0977432
Accuracy: 0.0162196 95 0.577423 0.0953902
Accuracy: 0.0157942 96 0.579526 0.0930372
Accuracy: 0.0153772 97 0.581628 0.0906843
Accuracy: 0.0149682 98 0.58373 0.0883313
Accuracy: 0.0145671 99 0.585832 0.0859783
Accuracy: 0.0125931 0 0.587935 0.0836253
Accuracy: 0.0125918 1 0.588028 0.0846835
Accuracy: 0.0125906 2 0.58812 0.0857416
Accuracy: 0.0125895 3 0.588213 0.0867998
Accuracy: 0.0125884 4 0.588306 0.0878579
Accuracy: 0.0125873 5 0.588399 0.0889161
Accuracy: 0.0125863 6 0.588492 0.0899742
Accuracy: 0.0125852 7 0.588585 0.0910323
Accuracy: 0.0125841 8 0.588678 0.0920905
Accuracy: 0.0125829 9 0.588771 0.0931486
Accuracy: 0.0125816 10 0.588864 0.0942068
Accuracy: 0.0125802 11 0.588957 0.0952649
Accuracy: 0.0125787 12 0.589049 0.0963231
Accuracy: 0.0125769 13 0.589142 0.0973812
Accuracy: 0.012575 14 0.589235 0.0984393
Accuracy: 0.012573 15 0.589328 0.0994975
Accuracy: 0.0125706 16 0.589421 0.100556
Accuracy: 0.0125681 17 0.589514 0.101614
Accuracy: 0.0125652 18 0.589607 0.102672
Accuracy: 0.0125621 19 0.5897 0.10373
Accuracy: 0.0125587 20 0.589793 0.104788
Accuracy: 0.012555 21 0.589886 0.105846
Accuracy: 0.0125509 22 0.589978 0.106904
Accuracy: 0.0125465 23 0.590071 0.107963
Accuracy: 0.0125418 24 0.590164 0.109021
Accuracy: 0.0125366 25 0.590257 0.110079
Accuracy: 0.012531 26 0.59035 0.111137
Accuracy: 0.012525 27 0.590443 0.112195
Accuracy: 0.0125186 28 0.590536 0.113253
Accuracy: 0.0125118 29 0.590629 0.114311
Accuracy: 0.0125044 30 0.590722 0.11537
Accuracy: 0.0124966 31 0.590815 0.116428
Accuracy: 0.0124884 32 0.590907 0.117486
Accuracy: 0.0124796 33 0.591 0.118544
Accuracy: 0.0124703 34 0.591093 0.119602
Accuracy: 0.0124605 35 0.591186 0.12066
Accuracy: 0.0124501 36 0.591279 0.121718
Accuracy: 0.0124392 37 0.591372 0.122777
Accuracy: 0.0124277 38 0.591465 0.123835
Accuracy: 0.0124157 39 0.591558 0.124893
Accuracy: 0.0124031 40 0.591651 0.125951
Accuracy: 0.0123899 41 0.591744 0.127009
Accuracy: 0.0123761 42 0.591836 0.128067
Accuracy: 0.0123617 43 0.591929 0.129125
Accuracy: 0.0123467 44 0.592022 0.130184
Accuracy: 0.0123311 45 0.592115 0.131242
Accuracy: 0.0123148 46 0.592208 0.1323
Accuracy: 0.012298 47 0.592301 0.133358
Accuracy: 0.0122804 48 0.592394 0.134416
Accuracy: 0.0122622 49 0.592487 0.135474
Accuracy: 0.0122434 50 0.59258 0.136532
Accuracy: 0.0122239 51 0.592673 0.137591
Accuracy: 0.0122038 52 0.592765 0.138649
Accuracy: 0.0121829 53 0.592858 0.139707
Accuracy: 0.0121614 54 0.592951 0.140765
Accuracy: 0.0121393 55 0.593044 0.141823
Accuracy: 0.0121164 56 0.593137 0.142881
Accuracy: 0.0120929 57 0.59323 0.143939
Accuracy: 0.0120686 58 0.593323 0.144998
Accuracy: 0.0120437 59 0.593416 0.146056
Accuracy: 0.0120181 60 0.593509 0.147114
Accuracy: 0.0119919 61 0.593602 0.148172
Accuracy: 0.0119649 62 0.593694 0.14923
Accuracy: 0.0119372 63 0.593787 0.150288
Accuracy: 0.0119088 64 0.59388 0.151346
Accuracy: 0.0118798 65 0.593973 0.152405
Accuracy: 0.01185 66 0.594066 0.153463
Accuracy: 0.0118196 67 0.594159 0.154521
Accuracy: 0.0117889 68 0.594252 0.155579
Accuracy: 0.0117598 69 0.594345 0.156637
Accuracy: 0.0117323 70 0.594438 0.157695
Accuracy: 0.0117067 71 0.594531 0.158753
Accuracy: 0.0116836 72 0.594624 0.159812
Accuracy: 0.0116647 73 0.594716 0.16087
Accuracy: 0.0116502 74 0.594809 0.161928
Accuracy: 0.0116391 75 0.594902 0.162986
Accuracy: 0.0116309 76 0.594995 0.164044
Accuracy: 0.0116267 77 0.595088 0.165102
Accuracy: 0.0116266 78 0.595181 0.16616
Accuracy: 0.0116307 79 0.595274 0.167219
Accuracy: 0.0116407 80 0.595367 0.168277
Accuracy: 0.0116561 81 0.59546 0.169335
Accuracy: 0.0116771 82 0.595553 0.170393
Accuracy: 0.0117026 83 0.595645 0.171451
Accuracy: 0.0117326 84 0.595738 0.172509
Accuracy: 0.0117667 85 0.595831 0.173567
Accuracy: 0.0118048 86 0.595924 0.174626
Accuracy: 0.0118473 87 0.596017 0.175684
Accuracy: 0.0118944 88 0.59611 0.176742
Accuracy: 0.0119459 89 0.596203 0.1778
Accuracy: 0.0120039 90 0.596296 0.178858
Accuracy: 0.0120675 91 0.596389 0.179916
Accuracy: 0.0121368 92 0.596482 0.180974
Accuracy: 0.0122122 93 0.596574 0.182033
Accuracy: 0.0122938 94 0.596667 0.183091
Accuracy: 0.0123811 95 0.59676 0.184149
Accuracy: 0.0124728 96 0.596853 0.185207
Accuracy: 0.0125706 97 0.596946 0.186265
Accuracy: 0.0126752 98 0.597039 0.187323
Accuracy: 0.0127864 99 0.597132 0.188381
Accuracy: 0.0131643 0 0.597225 0.18944
Accuracy: 0.0135634 1 0.597059 0.191015
Accuracy: 0.0139923 2 0.596893 0.19259
Accuracy: 0.0144488 3 0.596727 0.194165
Accuracy: 0.0149289 4 0.596561 0.19574
Accuracy: 0.0154295 5 0.596395 0.197315
Accuracy: 0.0159476 6 0.596229 0.19889
Accuracy: 0.016479 7 0.596063 0.200465
Accuracy: 0.0170166 8 0.595897 0.20204
Accuracy: 0.017557 9 0.595731 0.203615
Accuracy: 0.0180921 10 0.595565 0.20519
Accuracy: 0.0186169 11 0.595399 0.206765
Accuracy: 0.0191321 12 0.595233 0.20834
Accuracy: 0.0196369 13 0.595067 0.209915
Accuracy: 0.0201283 14 0.594901 0.21149
Accuracy: 0.0205943 15 0.594735 0.213065
Accuracy: 0.0210234 16 0.594569 0.21464
Accuracy: 0.0214014 17 0.594403 0.216215
Accuracy: 0.0217258 18 0.594237 0.21779
Accuracy: 0.0219819 19 0.594071 0.219365
Accuracy: 0.0221474 20 0.593906 0.22094
Accuracy: 0.0222322 21 0.59374 0.222515
Accuracy: 0.0222573 22 0.593574 0.22409
Accuracy: 0.0222783 23 0.593408 0.225665
Accuracy: 0.0222959 24 0.593242 0.22724
Accuracy: 0.02231 25 0.593076 0.228815
Accuracy: 0.0223206 26 0.59291 0.230391
Accuracy: 0.0223276 27 0.592744 0.231966
Accuracy: 0.022331 28 0.592578 0.233541
Accuracy: 0.0223307 29 0.592412 0.235116
Accuracy: 0.0223267 30 0.592246 0.236691
Accuracy: 0.0223189 31 0.59208 0.238266
Accuracy: 0.0223073 32 0.591914 0.239841
Accuracy: 0.0222918 33 0.591748 0.241416
Accuracy: 0.0222725 34 0.591582 0.242991
Accuracy: 0.0222493 35 0.591416 0.244566
Accuracy: 0.0222221 36 0.59125 0.246141
Accuracy: 0.0221911 37 0.591084 0.247716
Accuracy: 0.022156 38 0.590918 0.249291
Accuracy: 0.0221171 39 0.590752 0.250866
Accuracy: 0.0220742 40 0.590586 0.252441
Accuracy: 0.0220273 41 0.59042 0.254016
Accuracy: 0.0219765 42 0.590254 0.255591
Accuracy: 0.0219218 43 0.590088 0.257166
Accuracy: 0.0218632 44 0.589922 0.258741
Accuracy: 0.0218007 45 0.589756 0.260316
Accuracy: 0.0217343 46 0.589591 0.261891
Accuracy: 0.0216641 47 0.589425 0.263466
Accuracy: 0.0215901 48 0.589259 0.265041
Accuracy: 0.0215123 49 0.589093 0.266616
Accuracy: 0.0214308 50 0.588927 0.268191
Accuracy: 0.0213457 51 0.588761 0.269766
Accuracy: 0.0212569 52 0.588595 0.271341
Accuracy: 0.0211646 53 0.588429 0.272916
Accuracy: 0.0210687 54 0.588263 0.274491
Accuracy: 0.0209695 55 0.588097 0.276066
Accuracy: 0.0208668 56 0.587931 0.277642
Accuracy: 0.0207608 57 0.587765 0.279217
Accuracy: 0.0206517 58 0.587599 0.280792
Accuracy: 0.0205394 59 0.587433 0.282367
Accuracy: 0.020424 60 0.587267 0.283942
Accuracy: 0.0203056 61 0.587101 0.285517
Accuracy: 0.0201844 62 0.586935 0.287092
Accuracy: 0.0200604 63 0.586769 0.288667
Accuracy: 0.0199337 64 0.586603 0.290242
Accuracy: 0.0198045 65 0.586437 0.291817
Accuracy: 0.0196728 66 0.586271 0.293392
Accuracy: 0.0195387 67 0.586105 0.294967
Accuracy: 0.0194024 68 0.585939 0.296542
Accuracy: 0.019264 69 0.585773 0.298117
Accuracy: 0.0191236 70 0.585608 0.299692
Accuracy: 0.0189813 71 0.585442 0.301267
Accuracy: 0.0188373 72 0.585276 0.302842
Accuracy: 0.0186917 73 0.58511 0.304417
Accuracy: 0.0185447 74 0.584944 0.305992
Accuracy: 0.0183964 75 0.584778 0.307567
Accuracy: 0.0182469 76 0.584612 0.309142
Accuracy: 0.0180964 77 0.584446 0.310717
Accuracy: 0.0179451 78 0.58428 0.312292
Accuracy: 0.017793 79 0.584114 0.313867
Accuracy: 0.0176405 80 0.583948 0.315442
Accuracy: 0.0174876 81 0.583782 0.317017
Accuracy: 0.0173345 82 0.583616 0.318592
Accuracy: 0.0171814 83 0.58345 0.320167
Accuracy: 0.0170284 84 0.583284 0.321742
Accuracy: 0.0168759 85 0.583118 0.323318
Accuracy: 0.0167238 86 0.582952 0.324893
Accuracy: 0.0165725 87 0.582786 0.326468
Accuracy: 0.0164221 88 0.58262 0.328043
Accuracy: 0.0162728 89 0.582454 0.329618
Accuracy: 0.0161248 90 0.582288 0.331193
Accuracy: 0.0159783 91 0.582122 0.332768
Accuracy: 0.0158336 92 0.581956 0.334343
Accuracy: 0.0156908 93 0.58179 0.335918
Accuracy: 0.0155501 94 0.581624 0.337493
Accuracy: 0.0154118 95 0.581459 0.339068
Accuracy: 0.0152762 96 0.581293 0.340643
Accuracy: 0.0151433 97 0.581127 0.342218
Accuracy: 0.0150134 98 0.580961 0.343793
Accuracy: 0.0148869 99 0.580795 0.345368
Accuracy: 0.0143028 0 0.580629 0.346943
Accuracy: 0.0144202 1 0.58107 0.34859
Accuracy: 0.0145793 2 0.58151 0.350238
Accuracy: 0.0147767 3 0.581951 0.351885
Accuracy: 0.015009 4 0.582392 0.353532
Accuracy: 0.0152729 5 0.582833 0.355179
Accuracy: 0.0155653 6 0.583274 0.356827
Accuracy: 0.0158831 7 0.583715 0.358474
Accuracy: 0.0162236 8 0.584156 0.360121
Accuracy: 0.0165838 9 0.584597 0.361768
Accuracy: 0.0169611 10 0.585038 0.363416
Accuracy: 0.0173528 11 0.585478 0.365063
Accuracy: 0.0177565 12 0.585919 0.36671
Accuracy: 0.0181698 13 0.58636 0.368357
Accuracy: 0.0185903 14 0.586801 0.370005
Accuracy: 0.0190159 15 0.587242 0.371652
Accuracy: 0.0194444 16 0.587683 0.373299
Accuracy: 0.0198738 17 0.588124 0.374947
Accuracy: 0.0203023 18 0.588565 0.376594
Accuracy: 0.020728 19 0.589006 0.378241
Accuracy: 0.0211492 20 0.589446 0.379888
Accuracy: 0.0215642 21 0.589887 0.381536
Accuracy: 0.0219714 22 0.590328 0.383183
Accuracy: 0.0223694 23 0.590769 0.38483
Accuracy: 0.0227568 24 0.59121 0.386477
Accuracy: 0.0231323 25 0.591651 0.388125
Accuracy: 0.0234947 26 0.592092 0.389772
Accuracy: 0.0238428 27 0.592533 0.391419
Accuracy: 0.0241755 28 0.592974 0.393066
Accuracy: 0.0244918 29 0.593414 0.394714
Accuracy: 0.0247909 30 0.593855 0.396361
Accuracy: 0.0250719 31 0.594296 0.398008
Accuracy: 0.025334 32 0.594737 0.399655
Accuracy: 0.0255769 33 0.595178 0.401303
Accuracy: 0.0258018 34 0.595619 0.40295
Accuracy: 0.0260075 35 0.59606 0.404597
Accuracy: 0.0261948 36 0.596501 0.406245
Accuracy: 0.0263639 37 0.596942 0.407892
Accuracy: 0.0265139 38 0.597383 0.409539
Accuracy: 0.0266459 39 0.597823 0.411186
Accuracy: 0.026758 40 0.598264 0.412834
Accuracy: 0.026849 41 0.598705 0.414481
Accuracy: 0.0269194 42 0.599146 0.416128
Accuracy: 0.0269681 43 0.599587 0.417775
Accuracy: 0.0269985 44 0.600028 0.419423
Accuracy: 0.0270077 45 0.600469 0.42107
Accuracy: 0.0269963 46 0.60091 0.422717
Accuracy: 0.0269645 47 0.601351 0.424364
Accuracy: 0.0269145 48 0.601791 0.426012
Accuracy: 0.0268463 49 0.602232 0.427659
Accuracy: 0.0267589 50 0.602673 0.429306
Accuracy: 0.0266532 51 0.603114 0.430953
Accuracy: 0.0265285 52 0.603555 0.432601
Accuracy: 0.0263857 53 0.603996 0.434248
Accuracy: 0.0262249 54 0.604437 0.435895
Accuracy: 0.0260458 55 0.604878 0.437543
Accuracy: 0.0258485 56 0.605319 0.43919
Accuracy: 0.0256353 57 0.605759 0.440837
Accuracy: 0.0254049 58 0.6062 0.442484
Accuracy: 0.0251585 59 0.606641 0.444132
Accuracy: 0.0248972 60 0.607082 0.445779
Accuracy: 0.0246224 61 0.607523 0.447426
Accuracy: 0.0243358 62 0.607964 0.449073
Accuracy: 0.0240364 63 0.608405 0.450721
Accuracy: 0.0237255 64 0.608846 0.452368
Accuracy: 0.0234035 65 0.609287 0.454015
Accuracy: 0.0230716 66 0.609727 0.455662
Accuracy: 0.0227299 67 0.610168 0.45731
Accuracy: 0.0223798 68 0.610609 0.458957
Accuracy: 0.0220227 69 0.61105 0.460604
Accuracy: 0.0216607 70 0.611491 0.462251
Accuracy: 0.0212948 71 0.611932 0.463899
Accuracy: 0.0209247 72 0.612373 0.465546
Accuracy: 0.0205509 73 0.612814 0.467193
Accuracy: 0.0201749 74 0.613255 0.468841
Accuracy: 0.0197973 75 0.613695 0.470488
Accuracy: 0.0194187 76 0.614136 0.472135
Accuracy: 0.0190408 77 0.614577 0.473782
Accuracy: 0.0186646 78 0.615018 0.47543
Accuracy: 0.0182915 79 0.615459 0.477077
Accuracy: 0.0179221 80 0.6159 0.478724
Accuracy: 0.0175575 81 0.616341 0.480371
Accuracy: 0.0171996 82 0.616782 0.482019
Accuracy: 0.0168494 83 0.617223 0.483666
Accuracy: 0.0165079 84 0.617663 0.485313
Accuracy: 0.0161764 85 0.618104 0.48696
Accuracy: 0.0158565 86 0.618545 0.488608
Accuracy: 0.0155487 87 0.618986 0.490255
Accuracy: 0.0152545 88 0.619427 0.491902
Accuracy: 0.0149753 89 0.619868 0.493549
Accuracy: 0.0147122 90 0.620309 0.495197
Accuracy: 0.0144668 91 0.62075 0.496844
Accuracy: 0.0142404 92 0.621191 0.498491
Accuracy: 0.0140343 93 0.621632 0.500139
Accuracy: 0.0138501 94 0.622072 0.501786
Accuracy: 0.0136888 95 0.622513 0.503433
Accuracy: 0.0135516 96 0.622954 0.50508
Accuracy: 0.0134399 97 0.623395 0.506728
Accuracy: 0.013355 98 0.623836 0.508375
Accuracy: 0.0132978 99 0.624277 0.510022
Accuracy: 0.0137504 0 0.624718 0.511669
Accuracy: 0.0137361 1 0.625716 0.511497
Accuracy: 0.0137389 2 0.626714 0.511325
Accuracy: 0.0137579 3 0.627712 0.511153
Accuracy: 0.013792 4 0.628711 0.510981
Accuracy: 0.0138404 5 0.629709 0.510808
Accuracy: 0.0139021 6 0.630707 0.510636
Accuracy: 0.0139761 7 0.631706 0.510464
Accuracy: 0.0140617 8 0.632704 0.510292
Accuracy: 0.0141577 9 0.633702 0.51012
Accuracy: 0.0142635 10 0.6347 0.509947
Accuracy: 0.014378 11 0.635699 0.509775
Accuracy: 0.0145005 12 0.636697 0.509603
Accuracy: 0.0146301 13 0.637695 0.509431
Accuracy: 0.0147659 14 0.638693 0.509259
Accuracy: 0.014907 15 0.639692 0.509086
Accuracy: 0.0150527 16 0.64069 0.508914
Accuracy: 0.0152022 17 0.641688 0.508742
Accuracy: 0.0153546 18 0.642686 0.50857
Accuracy: 0.0155076 19 0.643685 0.508398
Accuracy: 0.01566 20 0.644683 0.508225
Accuracy: 0.0158112 21 0.645681 0.508053
Accuracy: 0.0159586 22 0.64668 0.507881
Accuracy: 0.0161029 23 0.647678 0.507709
Accuracy: 0.0162428 24 0.648676 0.507537
Accuracy: 0.0163774 25 0.649674 0.507364
Accuracy: 0.0165076 26 0.650673 0.507192
Accuracy: 0.0166312 27 0.651671 0.50702
Accuracy: 0.0167485 28 0.652669 0.506848
Accuracy: 0.0168591 29 0.653667 0.506676
Accuracy: 0.0169625 30 0.654666 0.506503
Accuracy: 0.0170587 31 0.655664 0.506331
Accuracy: 0.0171474 32 0.656662 0.506159
Accuracy: 0.0172282 33 0.65766 0.505987
Accuracy: 0.0173009 34 0.658659 0.505815
Accuracy: 0.0173649 35 0.659657 0.505642
Accuracy: 0.01742 36 0.660655 0.50547
Accuracy: 0.0174658 37 0.661653 0.505298
Accuracy: 0.0175023 38 0.662652 0.505126
Accuracy: 0.0175291 39 0.66365 0.504954
Accuracy: 0.0175462 40 0.664648 0.504781
Accuracy: 0.0175532 41 0.665647 0.504609
Accuracy: 0.01755 42 0.666645 0.504437
Accuracy: 0.0175367 43 0.667643 0.504265
Accuracy: 0.0175129 44 0.668641 0.504093
Accuracy: 0.0174787 45 0.66964 0.50392
Accuracy: 0.0174339 46 0.670638 0.503748
Accuracy: 0.0173786 47 0.671636 0.503576
Accuracy: 0.0173125 48 0.672634 0.503404
Accuracy: 0.0172363 49 0.673633 0.503232
Accuracy: 0.0171497 50 0.674631 0.503059
Accuracy: 0.0170528 51 0.675629 0.502887
Accuracy: 0.0169465 52 0.676627 0.502715
Accuracy: 0.0168303 53 0.677626 0.502543
Accuracy: 0.0167044 54 0.678624 0.502371
Accuracy: 0.0165686 55 0.679622 0.502198
Accuracy: 0.0164237 56 0.68062 0.502026
Accuracy: 0.0162696 57 0.681619 0.501854
Accuracy: 0.0161068 58 0.682617 0.501682
Accuracy: 0.015935 59 0.683615 0.50151
Accuracy: 0.0157543 60 0.684614 0.501337
Accuracy: 0.0155654 61 0.685612 0.501165
Accuracy: 0.0153685 62 0.68661 0.500993
Accuracy: 0.0151649 63 0.687608 0.500821
Accuracy: 0.0149556 64 0.688607 0.500648
Accuracy: 0.0147409 65 0.689605 0.500476
Accuracy: 0.014521 66 0.690603 0.500304
Accuracy: 0.0142961 67 0.691601 0.500132
Accuracy: 0.0140662 68 0.6926 0.49996
Accuracy: 0.0138325 69 0.693598 0.499788
Accuracy: 0.013595 70 0.694596 0.499615
Accuracy: 0.0133557 71 0.695594 0.499443
Accuracy: 0.0131137 72 0.696593 0.499271
Accuracy: 0.0128706 73 0.697591 0.499099
Accuracy: 0.0126259 74 0.698589 0.498927
Accuracy: 0.012382 75 0.699588 0.498754
Accuracy: 0.0121392 76 0.700586 0.498582
Accuracy: 0.0118955 77 0.701584 0.49841
Accuracy: 0.0116511 78 0.702582 0.498238
Accuracy: 0.0114094 79 0.703581 0.498066
Accuracy: 0.0111711 80 0.704579 0.497893
Accuracy: 0.0109368 81 0.705577 0.497721
Accuracy: 0.0107067 82 0.706575 0.497549
Accuracy: 0.0104822 83 0.707574 0.497377
Accuracy: 0.0102625 84 0.708572 0.497205
Accuracy: 0.0100497 85 0.70957 0.497032
Accuracy: 0.00984453 86 0.710568 0.49686
Accuracy: 0.00964692 87 0.711567 0.496688
Accuracy: 0.00945682 88 0.712565 0.496516
Accuracy: 0.00927513 89 0.713563 0.496343
Accuracy: 0.00910265 90 0.714561 0.496171
Accuracy: 0.00894032 91 0.71556 0.495999
Accuracy: 0.00878887 92 0.716558 0.495827
Accuracy: 0.00864956 93 0.717556 0.495655
Accuracy: 0.00852262 94 0.718554 0.495483
Accuracy: 0.00840549 95 0.719553 0.49531
Accuracy: 0.00829895 96 0.720551 0.495138
Accuracy: 0.00820408 97 0.721549 0.494966
Accuracy: 0.00812242 98 0.722548 0.494794
Accuracy: 0.00805509 99 0.723546 0.494621
Accuracy: 0.00838106 0 0.724544 0.494449
Accuracy: 0.00842984 1 0.725344 0.494341
Accuracy: 0.0084843 2 0.726144 0.494233
Accuracy: 0.00854414 3 0.726943 0.494125
Accuracy: 0.0086091 4 0.727743 0.494017
Accuracy: 0.00867888 5 0.728543 0.493909
Accuracy: 0.00875324 6 0.729343 0.4938
Accuracy: 0.00883187 7 0.730142 0.493692
Accuracy: 0.00891453 8 0.730942 0.493584
Accuracy: 0.00900095 9 0.731742 0.493476
Accuracy: 0.00909086 10 0.732542 0.493368
Accuracy: 0.00918403 11 0.733342 0.49326
Accuracy: 0.00928015 12 0.734141 0.493152
Accuracy: 0.00937901 13 0.734941 0.493044
Accuracy: 0.00948035 14 0.735741 0.492935
Accuracy: 0.00958394 15 0.736541 0.492827
Accuracy: 0.00968953 16 0.73734 0.492719
Accuracy: 0.00979685 17 0.73814 0.492611
Accuracy: 0.0099057 18 0.73894 0.492503
Accuracy: 0.0100158 19 0.73974 0.492395
Accuracy: 0.010127 20 0.740539 0.492287
Accuracy: 0.010239 21 0.741339 0.492178
Accuracy: 0.0103516 22 0.742139 0.49207
Accuracy: 0.0104647 23 0.742939 0.491962
Accuracy: 0.0105779 24 0.743739 0.491854
Accuracy: 0.0106911 25 0.744538 0.491746
Accuracy: 0.0108042 26 0.745338 0.491638
Accuracy: 0.0109168 27 0.746138 0.49153
Accuracy: 0.0110288 28 0.746938 0.491422
Accuracy: 0.0111401 29 0.747737 0.491313
Accuracy: 0.0112504 30 0.748537 0.491205
Accuracy: 0.0113595 31 0.749337 0.491097
Accuracy: 0.0114674 32 0.750137 0.490989
Accuracy: 0.0115737 33 0.750937 0.490881
Accuracy: 0.0116784 34 0.751736 0.490773
Accuracy: 0.0117813 35 0.752536 0.490665
Accuracy: 0.0118822 36 0.753336 0.490556
Accuracy: 0.0119811 37 0.754136 0.490448
Accuracy: 0.0120777 38 0.754935 0.49034
Accuracy: 0.012172 39 0.755735 0.490232
Accuracy: 0.0122638 40 0.756535 0.490124
Accuracy: 0.012353 41 0.757335 0.490016
Accuracy: 0.0124395 42 0.758134 0.489908
Accuracy: 0.0125232 43 0.758934 0.4898
Accuracy: 0.012604 44 0.759734 0.489691
Accuracy: 0.0126818 45 0.760534 0.489583
Accuracy: 0.0127566 46 0.761333 0.489475
Accuracy: 0.0128282 47 0.762133 0.489367
Accuracy: 0.0128967 48 0.762933 0.489259
Accuracy: 0.0129619 49 0.763733 0.489151
Accuracy: 0.0130239 50 0.764533 0.489043
Accuracy: 0.0130825 51 0.765332 0.488934
Accuracy: 0.0131377 52 0.766132 0.488826
Accuracy: 0.0131895 53 0.766932 0.488718
Accuracy: 0.013238 54 0.767732 0.48861
Accuracy: 0.013283 55 0.768531 0.488502
Accuracy: 0.0133245 56 0.769331 0.488394
Accuracy: 0.0133626 57 0.770131 0.488286
Accuracy: 0.0133972 58 0.770931 0.488178
Accuracy: 0.0134284 59 0.77173 0.488069
Accuracy: 0.0134563 60 0.77253 0.487961
Accuracy: 0.0134808 61 0.77333 0.487853
Accuracy: 0.013502 62 0.77413 0.487745
Accuracy: 0.0135201 63 0.77493 0.487637
Accuracy: 0.013535 64 0.775729 0.487529
Accuracy: 0.0135468 65 0.776529 0.487421
Accuracy: 0.0135557 66 0.777329 0.487312
Accuracy: 0.0135618 67 0.778129 0.487204
Accuracy: 0.0135652 68 0.778928 0.487096
Accuracy: 0.0135661 69 0.779728 0.486988
Accuracy: 0.0135646 70 0.780528 0.48688
Accuracy: 0.0135606 71 0.781328 0.486772
Accuracy: 0.0135545 72 0.782127 0.486664
Accuracy: 0.0135464 73 0.782927 0.486556
Accuracy: 0.0135366 74 0.783727 0.486447
Accuracy: 0.0135251 75 0.784527 0.486339
Accuracy: 0.0135122 76 0.785327 0.486231
Accuracy: 0.013498 77 0.786126 0.486123
Accuracy: 0.0134831 78 0.786926 0.486015
Accuracy: 0.0134674 79 0.787726 0.485907
Accuracy: 0.0134512 80 0.788526 0.485799
Accuracy: 0.0134347 81 0.789325 0.485691
Accuracy: 0.0134183 82 0.790125 0.485582
Accuracy: 0.0134021 83 0.790925 0.485474
Accuracy: 0.0133864 84 0.791725 0.485366
Accuracy: 0.0133717 85 0.792524 0.485258
Accuracy: 0.0133582 86 0.793324 0.48515
Accuracy: 0.0133463 87 0.794124 0.485042
Accuracy: 0.0133367 88 0.794924 0.484934
Accuracy: 0.0133294 89 0.795724 0.484825
Accuracy: 0.0133248 90 0.796523 0.484717
Accuracy: 0.0133233 91 0.797323 0.484609
Accuracy: 0.0133255 92 0.798123 0.484501
Accuracy: 0.0133319 93 0.798923 0.484393
Accuracy: 0.0133427 94 0.799722 0.484285
Accuracy: 0.0133583 95 0.800522 0.484177
Accuracy: 0.0133794 96 0.801322 0.484068
Accuracy: 0.0134062 97 0.802122 0.48396
Accuracy: 0.0134393 98 0.802921 0.483852
Accuracy: 0.0134795 99 0.803721 0.483744
Accuracy: 0.0132323 0 0.804521 0.483636
Accuracy: 0.0132142 1 0.805349 0.483747
Accuracy: 0.0132006 2 0.806176 0.483858
Accuracy: 0.0131913 3 0.807004 0.483968
Accuracy: 0.0131858 4 0.807831 0.484079
Accuracy: 0.0131839 5 0.808659 0.48419
Accuracy: 0.0131854 6 0.809486 0.484301
Accuracy: 0.0131902 7 0.810314 0.484412
Accuracy: 0.013198 8 0.811141 0.484523
Accuracy: 0.0132085 9 0.811969 0.484633
Accuracy: 0.0132215 10 0.812796 0.484744
Accuracy: 0.0132372 11 0.813624 0.484855
Accuracy: 0.0132549 12 0.814451 0.484966
Accuracy: 0.0132746 13 0.815279 0.485077
Accuracy: 0.0132963 14 0.816106 0.485187
Accuracy: 0.0133195 15 0.816934 0.485298
Accuracy: 0.0133439 16 0.817761 0.485409
Accuracy: 0.0133693 17 0.818589 0.48552
Accuracy: 0.0133956 18 0.819416 0.485631
Accuracy: 0.0134227 19 0.820244 0.485742
Accuracy: 0.0134505 20 0.821071 0.485852
Accuracy: 0.0134787 21 0.821899 0.485963
Accuracy: 0.013507 22 0.822726 0.486074
Accuracy: 0.0135358 23 0.823554 0.486185
Accuracy: 0.0135644 24 0.824381 0.486296
Accuracy: 0.0135926 25 0.825209 0.486407
Accuracy: 0.0136204 26 0.826036 0.486517
Accuracy: 0.0136478 27 0.826864 0.486628
Accuracy: 0.0136746 28 0.827691 0.486739
Accuracy: 0.0137003 29 0.828519 0.48685
Accuracy: 0.0137248 30 0.829346 0.486961
Accuracy: 0.0137479 31 0.830174 0.487071
Accuracy: 0.0137696 32 0.831001 0.487182
Accuracy: 0.01379 33 0.831829 0.487293
Accuracy: 0.0138088 34 0.832656 0.487404
Accuracy: 0.0138259 35 0.833484 0.487515
Accuracy: 0.0138411 36 0.834311 0.487626
Accuracy: 0.0138542 37 0.835139 0.487736
Accuracy: 0.0138652 38 0.835966 0.487847
Accuracy: 0.0138741 39 0.836794 0.487958
Accuracy: 0.0138807 40 0.837621 0.488069
Accuracy: 0.013885 41 0.838449 0.48818
Accuracy: 0.0138867 42 0.839276 0.48829
Accuracy: 0.0138857 43 0.840104 0.488401
Accuracy: 0.013882 44 0.840932 0.488512
Accuracy: 0.0138757 45 0.841759 0.488623
Accuracy: 0.0138667 46 0.842587 0.488734
Accuracy: 0.013855 47 0.843414 0.488845
Accuracy: 0.0138403 48 0.844242 0.488955
Accuracy: 0.0138229 49 0.845069 0.489066
Accuracy: 0.0138026 50 0.845897 0.489177
Accuracy: 0.0137794 51 0.846724 0.489288
Accuracy: 0.0137531 52 0.847552 0.489399
Accuracy: 0.0137238 53 0.848379 0.489509
Accuracy: 0.0136919 54 0.849207 0.48962
Accuracy: 0.013657 55 0.850034 0.489731
Accuracy: 0.0136196 56 0.850862 0.489842
Accuracy: 0.0135794 57 0.851689 0.489953
Accuracy: 0.0135365 58 0.852517 0.490064
Accuracy: 0.0134907 59 0.853344 0.490174
Accuracy: 0.0134424 60 0.854172 0.490285
Accuracy: 0.0133915 61 0.854999 0.490396
Accuracy: 0.0133384 62 0.855827 0.490507
Accuracy: 0.0132829 63 0.856654 0.490618
Accuracy: 0.0132247 64 0.857482 0.490729
Accuracy: 0.0131639 65 0.858309 0.490839
Accuracy: 0.0131008 66 0.859137 0.49095
Accuracy: 0.0130352 67 0.859964 0.491061
Accuracy: 0.0129681 68 0.860792 0.491172
Accuracy: 0.0128991 69 0.861619 0.491283
Accuracy: 0.0128279 70 0.862447 0.491393
Accuracy: 0.0127552 71 0.863274 0.491504
Accuracy: 0.0126807 72 0.864102 0.491615
Accuracy: 0.0126046 73 0.864929 0.491726
Accuracy: 0.0125273 74 0.865757 0.491837
Accuracy: 0.012449 75 0.866584 0.491948
Accuracy: 0.0123697 76 0.867412 0.492058
Accuracy: 0.0122895 77 0.868239 0.492169
Accuracy: 0.012209 78 0.869067 0.49228
Accuracy: 0.0121281 79 0.869894 0.492391
Accuracy: 0.0120471 80 0.870722 0.492502
Accuracy: 0.0119661 81 0.871549 0.492612
Accuracy: 0.0118854 82 0.872377 0.492723
Accuracy: 0.0118056 83 0.873204 0.492834
Accuracy: 0.0117265 84 0.874032 0.492945
Accuracy: 0.0116487 85 0.874859 0.493056
Accuracy: 0.0115722 86 0.875687 0.493167
Accuracy: 0.0114978 87 0.876514 0.493277
Accuracy: 0.0114254 88 0.877342 0.493388
Accuracy: 0.0113555 89 0.878169 0.493499
Accuracy: 0.0112883 90 0.878997 0.49361
Accuracy: 0.011224 91 0.879825 0.493721
Accuracy: 0.011163 92 0.880652 0.493832
Accuracy: 0.0111067 93 0.88148 0.493942
Accuracy: 0.0110549 94 0.882307 0.494053
Accuracy: 0.011008 95 0.883135 0.494164
Accuracy: 0.0109665 96 0.883962 0.494275
Accuracy: 0.0109302 97 0.88479 0.494386
Accuracy: 0.0108994 98 0.885617 0.494496
Accuracy: 0.0108745 99 0.886445 0.494607
Accuracy: 0.00999762 0 0.887272 0.494718
Accuracy: 0.0101223 1 0.887774 0.494513
Accuracy: 0.0102491 2 0.888276 0.494308
Accuracy: 0.010378 3 0.888778 0.494103
Accuracy: 0.0105089 4 0.88928 0.493898
Accuracy: 0.0106418 5 0.889782 0.493693
Accuracy: 0.0107766 6 0.890283 0.493488
Accuracy: 0.0109133 7 0.890785 0.493283
Accuracy: 0.0110519 8 0.891287 0.493078
Accuracy: 0.0111922 9 0.891789 0.492873
Accuracy: 0.0113342 10 0.892291 0.492668
Accuracy: 0.011478 11 0.892793 0.492463
Accuracy: 0.0116234 12 0.893295 0.492258
Accuracy: 0.0117704 13 0.893797 0.492053
Accuracy: 0.0119189 14 0.894298 0.491848
Accuracy: 0.0120689 15 0.8948 0.491643
Accuracy: 0.0122204 16 0.895302 0.491438
Accuracy: 0.0123734 17 0.895804 0.491233
Accuracy: 0.0125277 18 0.896306 0.491028
Accuracy: 0.0126833 19 0.896808 0.490823
Accuracy: 0.0128403 20 0.89731 0.490618
Accuracy: 0.0129985 21 0.897812 0.490413
Accuracy: 0.0131581 22 0.898313 0.490208
Accuracy: 0.013319 23 0.898815 0.490003
Accuracy: 0.0134811 24 0.899317 0.489798
Accuracy: 0.0136444 25 0.899819 0.489593
Accuracy: 0.0138088 26 0.900321 0.489388
Accuracy: 0.0139743 27 0.900823 0.489183
Accuracy: 0.0141409 28 0.901325 0.488978
Accuracy: 0.0143085 29 0.901827 0.488773
Accuracy: 0.0144771 30 0.902328 0.488568
Accuracy: 0.0146466 31 0.90283 0.488363
Accuracy: 0.0148171 32 0.903332 0.488158
Accuracy: 0.0149884 33 0.903834 0.487953
Accuracy: 0.0151607 34 0.904336 0.487748
Accuracy: 0.0153338 35 0.904838 0.487543
Accuracy: 0.0155079 36 0.90534 0.487338
Accuracy: 0.0156827 37 0.905842 0.487133
Accuracy: 0.0158584 38 0.906344 0.486928
Accuracy: 0.0160348 39 0.906845 0.486723
Accuracy: 0.016212 40 0.907347 0.486518
Accuracy: 0.0163899 41 0.907849 0.486313
Accuracy: 0.0165686 42 0.908351 0.486108
Accuracy: 0.0167479 43 0.908853 0.485903
Accuracy: 0.016928 44 0.909355 0.485698
Accuracy: 0.0171088 45 0.909857 0.485493
Accuracy: 0.0172902 46 0.910359 0.485288
Accuracy: 0.0174723 47 0.91086 0.485083
Accuracy: 0.017655 48 0.911362 0.484878
Accuracy: 0.0178384 49 0.911864 0.484673
Accuracy: 0.0180224 50 0.912366 0.484468
Accuracy: 0.0182071 51 0.912868 0.484263
Accuracy: 0.0183924 52 0.91337 0.484058
Accuracy: 0.0185784 53 0.913872 0.483853
Accuracy: 0.0187651 54 0.914374 0.483648
Accuracy: 0.0189524 55 0.914875 0.483443
Accuracy: 0.0191403 56 0.915377 0.483238
Accuracy: 0.0193289 57 0.915879 0.483033
Accuracy: 0.0195182 58 0.916381 0.482828
Accuracy: 0.0197083 59 0.916883 0.482623
Accuracy: 0.019899 60 0.917385 0.482417
Accuracy: 0.0200903 61 0.917887 0.482212
Accuracy: 0.0202824 62 0.918389 0.482007
Accuracy: 0.0204751 63 0.91889 0.481802
Accuracy: 0.0206686 64 0.919392 0.481597
Accuracy: 0.0208628 65 0.919894 0.481392
Accuracy: 0.0210577 66 0.920396 0.481187
Accuracy: 0.0212535 67 0.920898 0.480982
Accuracy: 0.0214501 68 0.9214 0.480777
Accuracy: 0.0216475 69 0.921902 0.480572
Accuracy: 0.0218457 70 0.922404 0.480367
Accuracy: 0.0220448 71 0.922905 0.480162
Accuracy: 0.0222447 72 0.923407 0.479957
Accuracy: 0.0224456 73 0.923909 0.479752
Accuracy: 0.0226473 74 0.924411 0.479547
Accuracy: 0.0228499 75 0.924913 0.479342
Accuracy: 0.0230535 76 0.925415 0.479137
Accuracy: 0.0232582 77 0.925917 0.478932
Accuracy: 0.0234641 78 0.926419 0.478727
Accuracy: 0.0236711 79 0.926921 0.478522
Accuracy: 0.0238793 80 0.927422 0.478317
Accuracy: 0.0240888 81 0.927924 0.478112
Accuracy: 0.0242995 82 0.928426 0.477907
Accuracy: 0.0245114 83 0.928928 0.477702
Accuracy: 0.0247247 84 0.92943 0.477497
Accuracy: 0.0249396 85 0.929932 0.477292
Accuracy: 0.0251559 86 0.930434 0.477087
Accuracy: 0.0253737 87 0.930936 0.476882
Accuracy: 0.0255929 88 0.931437 0.476677
Accuracy: 0.0258137 89 0.931939 0.476472
Accuracy: 0.0260361 90 0.932441 0.476267
Accuracy: 0.0262602 91 0.932943 0.476062
Accuracy: 0.026486 92 0.933445 0.475857
Accuracy: 0.0267138 93 0.933947 0.475652
Accuracy: 0.0269433 94 0.934449 0.475447
Accuracy: 0.0271748 95 0.934951 0.475242
Accuracy: 0.0274081 96 0.935452 0.475037
Accuracy: 0.0276438 97 0.935954 0.474832
Accuracy: 0.0278816 98 0.936456 0.474627
Accuracy: 0.0281214 99 0.936958 0.474422
In [58]:
Out[58]:
0.31892276
In [ ]:
Content source: danielfreeman11/convex-nets
Similar notebooks: