In [1]:
import numpy as np
import os
import cv2
import matplotlib.cm as cm
import matplotlib.pyplot as plt
%matplotlib inline
os.chdir("/home/mckc/Image Processing/yalefaces")
face_cascade = cv2.CascadeClassifier('/home/mckc/Downloads/opencv-2.4.13/data/haarcascades_GPU/haarcascade_frontalface_default.xml')
In [2]:
def load_data(train):
from PIL import Image
print('The input of the train in' ,train.shape)
X_tr = np.zeros((1,243,320),dtype=np.uint8)
Y_tr = []
X_tst = np.zeros((1,243,320),dtype=np.uint8)
Y_tst = []
for i in train.values[0:,0]:
if ('happy' in i) or ('sad' in i) :
#print(np.array(Image.open(i)).shape,X_tst.shape)
X_tst = np.vstack((X_tst,np.array(Image.open(i)).reshape(1,243,320)))
Y_tst = np.append(Y_tst,i[7:8])
else:
#print(np.array(Image.open(i)).shape,X_tr.shape)
X_tr = np.vstack((X_tr,np.array(Image.open(i)).reshape(1,243,320)))
Y_tr = np.append(Y_tr,i[7:8])
print('The input of the train in', X_tr.shape, 'and target is %d' ,Y_tr.shape)
print('The input of the test in ',X_tst.shape, ' and target is', Y_tst.shape)
#X_tr = np.transpose(X_tr,axes=(2,0,1))
#X_tst = np.transpose(X_tst,axes=(2,0,1))
X_tr = X_tr[1:,:,:]
X_tst = X_tst[1:,:,:]
print('The input of the train in', X_tr.shape, 'and target is %d' ,Y_tr.shape)
print('The input of the test in ',X_tst.shape ,' and target is' ,Y_tst.shape)
return X_tr,X_tst,Y_tr,Y_tst
In [3]:
def simulate(X,Y):
import scipy as sp
from scipy import misc
complete = np.zeros((1,243,320),dtype=np.uint8)
Y_complete = []
for i in range(len(X)):
complete = np.vstack((complete,X[i,:,:].reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(X[i,:,:], angle = 5).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(X[i,:,:], angle = 10).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(X[i,:,:], angle = 15).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(X[i,:,:], angle = -5).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(X[i,:,:], angle = -15).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(X[i,:,:], angle = -10).reshape(1,243,320)))
rotated = np.fliplr(X[i,:,:])
complete = np.vstack((complete,sp.misc.imrotate(rotated, angle = 5).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(rotated, angle = 10).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(rotated, angle = 15).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(rotated, angle = -5).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(rotated, angle = -15).reshape(1,243,320)))
complete = np.vstack((complete,sp.misc.imrotate(rotated, angle = -10).reshape(1,243,320)))
complete = np.vstack((complete,rotated.reshape(1,243,320)))
Y_complete = np.append(Y_complete,([Y[i]]*14))
complete = complete[1:,:,:]
return complete,Y_complete
In [4]:
def extract_faces(X_tr,Y_tr):
from skimage.transform import resize
import time
start_time = time.clock()
all_faces = np.zeros((1,96,96),dtype=np.uint8)
missing = []
multiple = []
Y= []
for i in range(len(X_tr)):
faces = face_cascade.detectMultiScale(X_tr[i,:,:],scaleFactor=1.1,minNeighbors=5,minSize=(30, 30))
n_faces = len(faces)
if n_faces is 1:
for (x,y,w,h) in faces:
fac = np.array(X_tr[i,:,:])[y:(y+h),x:(x+h)]
out = (resize(fac,(96,96))).reshape((1,96,96))
all_faces = np.vstack((all_faces,out))
Y = np.append(Y,Y_tr[i])
else:
if n_faces > 1:
print ('There are multiple faces for index %d and with length %d' % (i , n_faces))
missing = np.append(missing,i)
#all_faces = np.vstack((all_faces,np.zeros((1,96,96),dtype=np.uint8)))
else:
print ('The face is missing for index %d' %i)
multiple = np.append(multiple,i)
all_faces = all_faces[1:,:,:]
print all_faces.shape
print time.clock() - start_time, "seconds"
return all_faces,missing,multiple,Y
In [9]:
import pandas as pd
train = pd.read_csv('/home/mckc/Image Processing/yalefaces/train.csv')
X_tr,X_tst,Y_tr,Y_tst = load_data(train)
('The input of the train in', (166, 2))
('The input of the train in', (137, 243, 320), 'and target is %d', (136,))
('The input of the test in ', (31, 243, 320), ' and target is', (30,))
('The input of the train in', (136, 243, 320), 'and target is %d', (136,))
('The input of the test in ', (30, 243, 320), ' and target is', (30,))
In [10]:
import time
start_time = time.clock()
X_train,Y_train = simulate(X_tr,Y_tr)
print X_train.shape,Y_train.shape
print time.clock() - start_time, "seconds"
(1904, 243, 320) (1904,)
23.039562 seconds
In [11]:
X,missing,multiple,Y = extract_faces(X_train[:,:,:],Y_train)
X_test,missing_test,multiple_test,Y_test = extract_faces(X_tst,Y_tst)
The face is missing for index 177
The face is missing for index 339
The face is missing for index 341
The face is missing for index 345
The face is missing for index 347
The face is missing for index 397
The face is missing for index 401
The face is missing for index 423
The face is missing for index 425
The face is missing for index 429
The face is missing for index 431
The face is missing for index 471
The face is missing for index 557
The face is missing for index 591
The face is missing for index 593
The face is missing for index 597
The face is missing for index 717
The face is missing for index 725
There are multiple faces for index 734 and with length 2
There are multiple faces for index 748 and with length 2
The face is missing for index 809
The face is missing for index 927
The face is missing for index 935
There are multiple faces for index 974 and with length 2
The face is missing for index 1285
The face is missing for index 1299
The face is missing for index 1319
The face is missing for index 1327
The face is missing for index 1333
The face is missing for index 1341
The face is missing for index 1346
The face is missing for index 1347
The face is missing for index 1355
The face is missing for index 1361
The face is missing for index 1369
The face is missing for index 1375
The face is missing for index 1383
The face is missing for index 1389
The face is missing for index 1397
There are multiple faces for index 1433 and with length 2
There are multiple faces for index 1434 and with length 2
There are multiple faces for index 1436 and with length 2
The face is missing for index 1501
The face is missing for index 1529
The face is missing for index 1531
The face is missing for index 1535
The face is missing for index 1537
The face is missing for index 1599
The face is missing for index 1600
The face is missing for index 1601
The face is missing for index 1602
The face is missing for index 1604
The face is missing for index 1605
The face is missing for index 1727
The face is missing for index 1728
The face is missing for index 1731
The face is missing for index 1817
The face is missing for index 1851
(1846, 96, 96)
337.989999 seconds
(30, 96, 96)
4.77485 seconds
In [23]:
#Normalising
X = X -0.5
X_test = X_test - 0.5
print X.mean(),X_test.mean()
-0.0975078792702 -0.0834656999999
In [12]:
X_tr.dtype
Out[12]:
dtype('uint8')
In [8]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.cross_validation import cross_val_score
scores = list()
scores_std = list()
n_trees = [10, 25, 50, 100, 250]
for n_tree in n_trees:
print(n_tree)
recognizer = RandomForestClassifier(n_tree,verbose=0,oob_score=True,n_jobs=5)
score = cross_val_score(recognizer, (X.reshape(-1,9216).astype(np.uint8)), Y)
scores.append(np.mean(score))
scores_std.append(np.std(score))
10
C:\Users\Omar Saleem Mohammed\AppData\Local\Continuum\Anaconda2\lib\site-packages\sklearn\ensemble\forest.py:403: UserWarning: Some inputs do not have OOB scores. This probably means too few trees were used to compute any reliable oob estimates.
warn("Some inputs do not have OOB scores. "
25
50
100
250
In [9]:
sc_array = np.array(scores)
std_array = np.array(scores_std)
print('Score: ', sc_array)
print('Std : ', std_array)
plt.figure(figsize=(4,3))
plt.plot(n_trees, scores)
plt.plot(n_trees, sc_array + std_array, 'b--')
plt.plot(n_trees, sc_array - std_array, 'b--')
plt.ylabel('CV score')
plt.xlabel('# of trees')
plt.savefig('cv_trees.png')
#plt.show()
('Score: ', array([ 0.48219103, 0.44655124, 0.47298583, 0.45519434, 0.45140036]))
('Std : ', array([ 0.01421329, 0.03113971, 0.04221252, 0.03717804, 0.03799646]))
In [50]:
import lasagne
from lasagne import layers
from lasagne.updates import nesterov_momentum,adam,sgd,adadelta
from nolearn.lasagne import NeuralNet
from nolearn.lasagne import visualize
net1 = NeuralNet(
layers=[('input', layers.InputLayer),
('hidden', layers.DenseLayer),
('output', layers.DenseLayer),
],
# layer parameters:
input_shape=(None,1,96,96),
hidden_num_units=200, # number of units in 'hidden' layer
hidden_nonlinearity = lasagne.nonlinearities.,
output_nonlinearity=lasagne.nonlinearities.softmax,
output_num_units=15, # 15 target values for the 15 subjects
# optimization method:
update=adam,
update_learning_rate=0.1,
#update_momentum=0.9,
max_epochs=1500,
verbose=1,
)
net1.fit((X.reshape(-1,1,96,96).astype(np.uint8)), Y.astype(np.uint8))
#net1.fit((X_train.reshape(-1,1,243,320).astype(np.uint8)), Y_train.astype(np.uint8))
# Neural Network with 1846415 learnable parameters
## Layer information
# name size
--- ------ -------
0 input 1x96x96
1 hidden 200
2 output 15
epoch trn loss val loss trn/val valid acc dur
------- ---------- ---------- --------- ----------- -----
1 9.86761 0.78258 12.60914 0.60916 1.09s
2 1.60566 1.44890 1.10819 0.60916 1.22s
3 1.19083 0.90324 1.31839 0.60916 1.11s
4 0.86612 0.75773 1.14305 0.60916 1.20s
5 0.77344 0.71898 1.07575 0.60916 1.11s
6 0.73997 0.70515 1.04938 0.60916 1.15s
7 0.72709 0.69857 1.04082 0.60916 1.11s
8 0.72289 0.69453 1.04083 0.60916 1.12s
9 0.72111 0.69142 1.04294 0.60916 1.23s
10 0.71927 0.68888 1.04413 0.60916 1.21s
11 0.71720 0.68681 1.04424 0.60916 1.12s
12 0.71531 0.68515 1.04401 0.60916 1.30s
13 0.71380 0.68378 1.04390 0.60916 1.36s
14 0.71264 0.68263 1.04397 0.60916 1.29s
15 0.71170 0.68163 1.04411 0.60916 1.14s
16 0.71088 0.68077 1.04423 0.60916 1.09s
17 0.71015 0.68001 1.04433 0.60916 1.10s
18 0.70951 0.67934 1.04441 0.60916 1.08s
19 0.70894 0.67875 1.04448 0.60916 1.14s
20 0.70844 0.67823 1.04455 0.60916 1.05s
21 0.70800 0.67776 1.04462 0.60916 1.06s
22 0.70760 0.67734 1.04468 0.60916 1.08s
23 0.70724 0.67695 1.04474 0.60916 1.08s
24 0.70692 0.67661 1.04480 0.60916 1.06s
25 0.70663 0.67629 1.04485 0.60916 1.10s
26 0.70636 0.67600 1.04490 0.60916 1.15s
27 0.70611 0.67574 1.04495 0.60916 1.08s
28 0.70589 0.67550 1.04499 0.60916 1.12s
29 0.70568 0.67527 1.04503 0.60916 1.06s
30 0.70549 0.67507 1.04507 0.60916 1.08s
31 0.70532 0.67487 1.04511 0.60916 1.12s
32 0.70515 0.67469 1.04514 0.60916 1.05s
33 0.70500 0.67453 1.04518 0.60916 1.08s
34 0.70486 0.67437 1.04521 0.60916 1.09s
35 0.70473 0.67423 1.04524 0.60916 1.10s
36 0.70461 0.67409 1.04527 0.60916 1.08s
37 0.70449 0.67397 1.04529 0.60916 1.09s
38 0.70438 0.67385 1.04532 0.60916 1.07s
39 0.70428 0.67373 1.04534 0.60916 1.10s
40 0.70419 0.67363 1.04537 0.60916 1.09s
41 0.70410 0.67353 1.04539 0.60916 1.07s
42 0.70401 0.67343 1.04541 0.60916 1.07s
43 0.70393 0.67334 1.04543 0.60916 1.10s
44 0.70386 0.67326 1.04545 0.60916 1.12s
45 0.70379 0.67318 1.04547 0.60916 1.12s
46 0.70372 0.67310 1.04548 0.60916 1.14s
47 0.70365 0.67303 1.04550 0.60916 1.12s
48 0.70359 0.67296 1.04552 0.60916 1.08s
49 0.70354 0.67290 1.04553 0.60916 1.14s
50 0.70348 0.67283 1.04555 0.60916 1.12s
51 0.70343 0.67277 1.04556 0.60916 1.07s
52 0.70338 0.67272 1.04558 0.60916 1.09s
53 0.70333 0.67266 1.04559 0.60916 1.09s
54 0.70328 0.67261 1.04560 0.60916 1.11s
55 0.70324 0.67256 1.04562 0.60916 1.09s
56 0.70320 0.67251 1.04563 0.60916 1.09s
57 0.70316 0.67247 1.04564 0.60916 1.09s
58 0.70312 0.67242 1.04565 0.60916 1.06s
59 0.70308 0.67238 1.04566 0.60916 1.06s
60 0.70305 0.67234 1.04567 0.60916 1.09s
61 0.70301 0.67230 1.04568 0.60916 1.10s
62 0.70298 0.67227 1.04569 0.60916 1.13s
63 0.70295 0.67223 1.04570 0.60916 1.22s
64 0.70292 0.67220 1.04571 0.60916 1.28s
65 0.70289 0.67216 1.04572 0.60916 1.55s
66 0.70286 0.67213 1.04573 0.60916 1.28s
67 0.70284 0.67210 1.04574 0.60916 1.17s
68 0.70281 0.67207 1.04574 0.60916 1.19s
69 0.70279 0.67204 1.04575 0.60916 1.08s
70 0.70276 0.67201 1.04576 0.60916 1.08s
71 0.70274 0.67198 1.04577 0.60916 1.11s
72 0.70272 0.67196 1.04577 0.60916 1.11s
73 0.70269 0.67193 1.04578 0.60916 1.21s
74 0.70267 0.67191 1.04579 0.60916 1.07s
75 0.70265 0.67189 1.04579 0.60916 1.16s
76 0.70263 0.67186 1.04580 0.60916 1.32s
77 0.70261 0.67184 1.04581 0.60916 1.15s
78 0.70260 0.67182 1.04581 0.60916 1.14s
79 0.70258 0.67180 1.04582 0.60916 1.07s
80 0.70256 0.67178 1.04582 0.60916 1.06s
81 0.70254 0.67176 1.04583 0.60916 1.09s
82 0.70253 0.67174 1.04583 0.60916 1.15s
83 0.70251 0.67172 1.04584 0.60916 1.33s
84 0.70250 0.67170 1.04584 0.60916 1.24s
85 0.70248 0.67169 1.04585 0.60916 1.29s
86 0.70247 0.67167 1.04585 0.60916 1.20s
87 0.70245 0.67165 1.04586 0.60916 1.08s
88 0.70244 0.67164 1.04586 0.60916 1.10s
89 0.70243 0.67162 1.04587 0.60916 1.07s
90 0.70241 0.67161 1.04587 0.60916 1.09s
91 0.70240 0.67159 1.04588 0.60916 1.06s
92 0.70239 0.67158 1.04588 0.60916 1.07s
93 0.70238 0.67156 1.04589 0.60916 1.07s
94 0.70237 0.67155 1.04589 0.60916 1.09s
95 0.70235 0.67154 1.04589 0.60916 1.26s
96 0.70234 0.67152 1.04590 0.60916 1.54s
97 0.70233 0.67151 1.04590 0.60916 1.08s
98 0.70232 0.67150 1.04590 0.60916 1.09s
99 0.70231 0.67149 1.04591 0.60916 1.10s
100 0.70230 0.67147 1.04591 0.60916 1.10s
101 0.70229 0.67146 1.04591 0.60916 1.15s
102 0.70228 0.67145 1.04592 0.60916 1.10s
103 0.70227 0.67144 1.04592 0.60916 1.34s
104 0.70227 0.67143 1.04592 0.60916 1.32s
105 0.70226 0.67142 1.04593 0.60916 1.19s
106 0.70225 0.67141 1.04593 0.60916 1.20s
107 0.70224 0.67140 1.04593 0.60916 1.15s
108 0.70223 0.67139 1.04594 0.60916 1.10s
109 0.70222 0.67138 1.04594 0.60916 1.06s
110 0.70222 0.67137 1.04594 0.60916 1.07s
111 0.70221 0.67136 1.04594 0.60916 1.25s
112 0.70220 0.67135 1.04595 0.60916 1.17s
113 0.70219 0.67134 1.04595 0.60916 1.08s
114 0.70219 0.67134 1.04595 0.60916 1.10s
115 0.70218 0.67133 1.04596 0.60916 1.08s
116 0.70217 0.67132 1.04596 0.60916 1.06s
117 0.70217 0.67131 1.04596 0.60916 1.08s
118 0.70216 0.67130 1.04596 0.60916 1.10s
119 0.70215 0.67130 1.04596 0.60916 1.16s
120 0.70215 0.67129 1.04597 0.60916 1.23s
121 0.70214 0.67128 1.04597 0.60916 1.08s
122 0.70213 0.67127 1.04597 0.60916 1.10s
123 0.70213 0.67127 1.04597 0.60916 1.25s
124 0.70212 0.67126 1.04598 0.60916 1.13s
125 0.70212 0.67125 1.04598 0.60916 1.57s
126 0.70211 0.67125 1.04598 0.60916 1.69s
127 0.70211 0.67124 1.04598 0.60916 2.14s
128 0.70210 0.67123 1.04598 0.60916 1.55s
129 0.70210 0.67123 1.04599 0.60916 1.21s
130 0.70209 0.67122 1.04599 0.60916 1.26s
131 0.70209 0.67122 1.04599 0.60916 1.07s
132 0.70208 0.67121 1.04599 0.60916 1.16s
133 0.70208 0.67120 1.04599 0.60916 1.13s
134 0.70207 0.67120 1.04599 0.60916 1.16s
135 0.70207 0.67119 1.04600 0.60916 1.33s
136 0.70206 0.67119 1.04600 0.60916 1.24s
137 0.70206 0.67118 1.04600 0.60916 1.25s
138 0.70205 0.67118 1.04600 0.60916 1.12s
139 0.70205 0.67117 1.04600 0.60916 1.27s
140 0.70204 0.67117 1.04600 0.60916 1.20s
141 0.70204 0.67116 1.04601 0.60916 1.38s
142 0.70204 0.67116 1.04601 0.60916 1.37s
143 0.70203 0.67115 1.04601 0.60916 1.13s
144 0.70203 0.67115 1.04601 0.60916 1.24s
145 0.70202 0.67114 1.04601 0.60916 1.11s
146 0.70202 0.67114 1.04601 0.60916 1.20s
147 0.70202 0.67113 1.04602 0.60916 1.38s
148 0.70201 0.67113 1.04602 0.60916 1.86s
149 0.70201 0.67113 1.04602 0.60916 1.19s
150 0.70201 0.67112 1.04602 0.60916 1.16s
151 0.70200 0.67112 1.04602 0.60916 1.09s
152 0.70200 0.67111 1.04602 0.60916 1.15s
153 0.70199 0.67111 1.04602 0.60916 1.08s
154 0.70199 0.67110 1.04602 0.60916 1.10s
155 0.70199 0.67110 1.04603 0.60916 1.10s
156 0.70198 0.67110 1.04603 0.60916 1.39s
157 0.70198 0.67109 1.04603 0.60916 1.21s
158 0.70198 0.67109 1.04603 0.60916 1.25s
159 0.70198 0.67109 1.04603 0.60916 1.30s
160 0.70197 0.67108 1.04603 0.60916 1.41s
161 0.70197 0.67108 1.04603 0.60916 1.22s
162 0.70197 0.67107 1.04603 0.60916 1.32s
163 0.70196 0.67107 1.04603 0.60916 1.44s
164 0.70196 0.67107 1.04604 0.60916 1.27s
165 0.70196 0.67106 1.04604 0.60916 1.21s
166 0.70196 0.67106 1.04604 0.60916 1.18s
167 0.70195 0.67106 1.04604 0.60916 1.26s
168 0.70195 0.67105 1.04604 0.60916 1.18s
169 0.70195 0.67105 1.04604 0.60916 1.09s
170 0.70194 0.67105 1.04604 0.60916 1.07s
171 0.70194 0.67105 1.04604 0.60916 1.28s
172 0.70194 0.67104 1.04604 0.60916 1.38s
173 0.70194 0.67104 1.04604 0.60916 1.09s
174 0.70193 0.67104 1.04605 0.60916 1.19s
175 0.70193 0.67103 1.04605 0.60916 1.17s
176 0.70193 0.67103 1.04605 0.60916 1.59s
177 0.70193 0.67103 1.04605 0.60916 1.27s
178 0.70192 0.67103 1.04605 0.60916 1.43s
179 0.70192 0.67102 1.04605 0.60916 1.34s
180 0.70192 0.67102 1.04605 0.60916 1.18s
181 0.70192 0.67102 1.04605 0.60916 1.28s
182 0.70192 0.67101 1.04605 0.60916 1.45s
183 0.70191 0.67101 1.04605 0.60916 1.41s
184 0.70191 0.67101 1.04605 0.60916 1.19s
185 0.70191 0.67101 1.04605 0.60916 1.18s
186 0.70191 0.67100 1.04605 0.60916 1.18s
187 0.70191 0.67100 1.04606 0.60916 1.32s
188 0.70190 0.67100 1.04606 0.60916 1.23s
189 0.70190 0.67100 1.04606 0.60916 1.26s
190 0.70190 0.67099 1.04606 0.60916 1.30s
191 0.70190 0.67099 1.04606 0.60916 1.28s
192 0.70190 0.67099 1.04606 0.60916 1.20s
193 0.70189 0.67099 1.04606 0.60916 1.23s
194 0.70189 0.67099 1.04606 0.60916 1.16s
195 0.70189 0.67098 1.04606 0.60916 1.15s
196 0.70189 0.67098 1.04606 0.60916 1.08s
197 0.70189 0.67098 1.04606 0.60916 1.14s
198 0.70188 0.67098 1.04606 0.60916 1.18s
199 0.70188 0.67098 1.04606 0.60916 1.20s
200 0.70188 0.67097 1.04606 0.60916 1.17s
201 0.70188 0.67097 1.04606 0.60916 1.08s
202 0.70188 0.67097 1.04607 0.60916 1.10s
203 0.70188 0.67097 1.04607 0.60916 1.07s
204 0.70187 0.67097 1.04607 0.60916 1.11s
205 0.70187 0.67096 1.04607 0.60916 1.08s
206 0.70187 0.67096 1.04607 0.60916 1.09s
207 0.70187 0.67096 1.04607 0.60916 1.10s
208 0.70187 0.67096 1.04607 0.60916 1.08s
209 0.70187 0.67096 1.04607 0.60916 1.10s
210 0.70186 0.67095 1.04607 0.60916 1.09s
211 0.70186 0.67095 1.04607 0.60916 1.15s
212 0.70186 0.67095 1.04607 0.60916 1.42s
213 0.70186 0.67095 1.04607 0.60916 1.34s
214 0.70186 0.67095 1.04607 0.60916 1.11s
215 0.70186 0.67095 1.04607 0.60916 1.11s
216 0.70186 0.67094 1.04607 0.60916 1.31s
217 0.70185 0.67094 1.04607 0.60916 1.68s
218 0.70185 0.67094 1.04607 0.60916 1.68s
219 0.70185 0.67094 1.04607 0.60916 1.47s
220 0.70185 0.67094 1.04607 0.60916 1.32s
221 0.70185 0.67094 1.04608 0.60916 1.40s
222 0.70185 0.67093 1.04608 0.60916 1.30s
223 0.70185 0.67093 1.04608 0.60916 1.36s
224 0.70185 0.67093 1.04608 0.60916 1.45s
225 0.70184 0.67093 1.04608 0.60916 1.19s
226 0.70184 0.67093 1.04608 0.60916 1.25s
227 0.70184 0.67093 1.04608 0.60916 1.47s
228 0.70184 0.67093 1.04608 0.60916 1.44s
229 0.70184 0.67092 1.04608 0.60916 1.45s
230 0.70184 0.67092 1.04608 0.60916 1.54s
231 0.70184 0.67092 1.04608 0.60916 1.58s
232 0.70184 0.67092 1.04608 0.60916 1.47s
233 0.70183 0.67092 1.04608 0.60916 1.35s
234 0.70183 0.67092 1.04608 0.60916 1.40s
235 0.70183 0.67092 1.04608 0.60916 1.34s
236 0.70183 0.67091 1.04608 0.60916 1.27s
237 0.70183 0.67091 1.04608 0.60916 1.29s
238 0.70183 0.67091 1.04608 0.60916 1.27s
239 0.70183 0.67091 1.04608 0.60916 1.21s
240 0.70183 0.67091 1.04608 0.60916 1.39s
241 0.70183 0.67091 1.04608 0.60916 1.41s
242 0.70182 0.67091 1.04608 0.60916 1.27s
243 0.70182 0.67091 1.04608 0.60916 1.30s
244 0.70182 0.67090 1.04608 0.60916 1.25s
245 0.70182 0.67090 1.04608 0.60916 1.31s
246 0.70182 0.67090 1.04608 0.60916 1.32s
247 0.70182 0.67090 1.04609 0.60916 1.22s
248 0.70182 0.67090 1.04609 0.60916 1.24s
249 0.70182 0.67090 1.04609 0.60916 1.27s
250 0.70182 0.67090 1.04609 0.60916 1.25s
251 0.70181 0.67090 1.04609 0.60916 1.52s
252 0.70181 0.67089 1.04609 0.60916 1.13s
253 0.70181 0.67089 1.04609 0.60916 1.27s
254 0.70181 0.67089 1.04609 0.60916 1.12s
255 0.70181 0.67089 1.04609 0.60916 1.09s
256 0.70181 0.67089 1.04609 0.60916 1.16s
257 0.70181 0.67089 1.04609 0.60916 1.11s
258 0.70181 0.67089 1.04609 0.60916 1.07s
259 0.70181 0.67089 1.04609 0.60916 1.10s
260 0.70181 0.67089 1.04609 0.60916 1.14s
261 0.70181 0.67089 1.04609 0.60916 1.12s
262 0.70180 0.67088 1.04609 0.60916 1.35s
263 0.70180 0.67088 1.04609 0.60916 1.21s
264 0.70180 0.67088 1.04609 0.60916 1.35s
265 0.70180 0.67088 1.04609 0.60916 1.40s
266 0.70180 0.67088 1.04609 0.60916 1.29s
267 0.70180 0.67088 1.04609 0.60916 1.16s
268 0.70180 0.67088 1.04609 0.60916 1.12s
269 0.70180 0.67088 1.04609 0.60916 1.12s
270 0.70180 0.67088 1.04609 0.60916 1.12s
271 0.70180 0.67088 1.04609 0.60916 1.10s
272 0.70180 0.67087 1.04609 0.60916 1.15s
273 0.70180 0.67087 1.04609 0.60916 1.39s
274 0.70179 0.67087 1.04609 0.60916 1.38s
275 0.70179 0.67087 1.04609 0.60916 1.30s
276 0.70179 0.67087 1.04609 0.60916 1.55s
277 0.70179 0.67087 1.04609 0.60916 1.41s
278 0.70179 0.67087 1.04609 0.60916 1.51s
279 0.70179 0.67087 1.04609 0.60916 1.37s
280 0.70179 0.67087 1.04609 0.60916 1.31s
281 0.70179 0.67087 1.04609 0.60916 1.18s
282 0.70179 0.67087 1.04609 0.60916 1.26s
283 0.70179 0.67087 1.04609 0.60916 1.37s
284 0.70179 0.67086 1.04609 0.60916 1.15s
285 0.70179 0.67086 1.04609 0.60916 1.19s
286 0.70179 0.67086 1.04610 0.60916 1.26s
287 0.70179 0.67086 1.04610 0.60916 1.24s
288 0.70178 0.67086 1.04610 0.60916 1.13s
289 0.70178 0.67086 1.04610 0.60916 1.07s
290 0.70178 0.67086 1.04610 0.60916 1.14s
291 0.70178 0.67086 1.04610 0.60916 1.07s
292 0.70178 0.67086 1.04610 0.60916 1.10s
293 0.70178 0.67086 1.04610 0.60916 1.16s
294 0.70178 0.67086 1.04610 0.60916 1.09s
295 0.70178 0.67086 1.04610 0.60916 1.23s
296 0.70178 0.67086 1.04610 0.60916 1.43s
297 0.70178 0.67085 1.04610 0.60916 1.25s
298 0.70178 0.67085 1.04610 0.60916 1.33s
299 0.70178 0.67085 1.04610 0.60916 1.12s
300 0.70178 0.67085 1.04610 0.60916 1.19s
301 0.70178 0.67085 1.04610 0.60916 1.17s
302 0.70178 0.67085 1.04610 0.60916 1.19s
303 0.70178 0.67085 1.04610 0.60916 1.10s
304 0.70177 0.67085 1.04610 0.60916 1.11s
305 0.70177 0.67085 1.04610 0.60916 1.16s
306 0.70177 0.67085 1.04610 0.60916 1.08s
307 0.70177 0.67085 1.04610 0.60916 1.09s
308 0.70177 0.67085 1.04610 0.60916 1.07s
309 0.70177 0.67085 1.04610 0.60916 1.09s
310 0.70177 0.67085 1.04610 0.60916 1.08s
311 0.70177 0.67085 1.04610 0.60916 1.18s
312 0.70177 0.67084 1.04610 0.60916 1.22s
313 0.70177 0.67084 1.04610 0.60916 1.22s
314 0.70177 0.67084 1.04610 0.60916 1.20s
315 0.70177 0.67084 1.04610 0.60916 1.26s
316 0.70177 0.67084 1.04610 0.60916 1.21s
317 0.70177 0.67084 1.04610 0.60916 1.19s
318 0.70177 0.67084 1.04610 0.60916 1.20s
319 0.70177 0.67084 1.04610 0.60916 1.22s
320 0.70177 0.67084 1.04610 0.60916 1.19s
321 0.70177 0.67084 1.04610 0.60916 1.29s
322 0.70176 0.67084 1.04610 0.60916 1.22s
323 0.70176 0.67084 1.04610 0.60916 1.20s
324 0.70176 0.67084 1.04610 0.60916 1.20s
325 0.70176 0.67084 1.04610 0.60916 1.23s
326 0.70176 0.67084 1.04610 0.60916 1.22s
327 0.70176 0.67084 1.04610 0.60916 1.21s
328 0.70176 0.67084 1.04610 0.60916 1.20s
329 0.70176 0.67083 1.04610 0.60916 1.27s
330 0.70176 0.67083 1.04610 0.60916 1.23s
331 0.70176 0.67083 1.04610 0.60916 1.41s
332 0.70176 0.67083 1.04610 0.60916 1.10s
333 0.70176 0.67083 1.04610 0.60916 1.08s
334 0.70176 0.67083 1.04610 0.60916 1.05s
335 0.70176 0.67083 1.04610 0.60916 1.07s
336 0.70176 0.67083 1.04610 0.60916 1.07s
337 0.70176 0.67083 1.04610 0.60916 1.07s
338 0.70176 0.67083 1.04610 0.60916 1.11s
339 0.70176 0.67083 1.04610 0.60916 1.06s
340 0.70176 0.67083 1.04610 0.60916 1.08s
341 0.70176 0.67083 1.04610 0.60916 1.09s
342 0.70176 0.67083 1.04610 0.60916 1.06s
343 0.70175 0.67083 1.04610 0.60916 1.07s
344 0.70175 0.67083 1.04610 0.60916 1.07s
345 0.70175 0.67083 1.04610 0.60916 1.07s
346 0.70175 0.67083 1.04610 0.60916 1.05s
347 0.70175 0.67083 1.04610 0.60916 1.08s
348 0.70175 0.67083 1.04610 0.60916 1.07s
349 0.70175 0.67082 1.04610 0.60916 1.07s
350 0.70175 0.67082 1.04610 0.60916 1.07s
351 0.70175 0.67082 1.04610 0.60916 1.07s
352 0.70175 0.67082 1.04610 0.60916 1.09s
353 0.70175 0.67082 1.04610 0.60916 1.07s
354 0.70175 0.67082 1.04610 0.60916 1.07s
355 0.70175 0.67082 1.04610 0.60916 1.07s
356 0.70175 0.67082 1.04610 0.60916 1.06s
357 0.70175 0.67082 1.04610 0.60916 1.07s
358 0.70175 0.67082 1.04610 0.60916 1.06s
359 0.70175 0.67082 1.04610 0.60916 1.08s
360 0.70175 0.67082 1.04610 0.60916 1.05s
361 0.70175 0.67082 1.04610 0.60916 1.17s
362 0.70175 0.67082 1.04610 0.60916 1.07s
363 0.70175 0.67082 1.04610 0.60916 1.13s
364 0.70175 0.67082 1.04610 0.60916 1.12s
365 0.70175 0.67082 1.04611 0.60916 1.07s
366 0.70175 0.67082 1.04611 0.60916 1.06s
367 0.70175 0.67082 1.04611 0.60916 1.07s
368 0.70175 0.67082 1.04611 0.60916 1.13s
369 0.70174 0.67082 1.04611 0.60916 1.08s
370 0.70174 0.67082 1.04611 0.60916 1.07s
371 0.70174 0.67082 1.04611 0.60916 1.05s
372 0.70174 0.67082 1.04611 0.60916 1.16s
373 0.70174 0.67082 1.04611 0.60916 1.16s
374 0.70174 0.67081 1.04611 0.60916 1.13s
375 0.70174 0.67081 1.04611 0.60916 1.08s
376 0.70174 0.67081 1.04611 0.60916 1.15s
377 0.70174 0.67081 1.04611 0.60916 1.11s
378 0.70174 0.67081 1.04611 0.60916 1.11s
379 0.70174 0.67081 1.04611 0.60916 1.08s
380 0.70174 0.67081 1.04611 0.60916 1.13s
381 0.70174 0.67081 1.04611 0.60916 1.11s
382 0.70174 0.67081 1.04611 0.60916 1.10s
383 0.70174 0.67081 1.04611 0.60916 1.29s
384 0.70174 0.67081 1.04611 0.60916 1.69s
385 0.70174 0.67081 1.04611 0.60916 1.41s
386 0.70174 0.67081 1.04611 0.60916 1.40s
387 0.70174 0.67081 1.04611 0.60916 1.50s
388 0.70174 0.67081 1.04611 0.60916 2.08s
389 0.70174 0.67081 1.04611 0.60916 1.70s
390 0.70174 0.67081 1.04611 0.60916 1.92s
391 0.70174 0.67081 1.04611 0.60916 1.96s
392 0.70174 0.67081 1.04611 0.60916 1.55s
393 0.70174 0.67081 1.04611 0.60916 1.28s
394 0.70174 0.67081 1.04611 0.60916 1.77s
395 0.70174 0.67081 1.04611 0.60916 1.19s
396 0.70174 0.67081 1.04611 0.60916 1.75s
397 0.70174 0.67081 1.04611 0.60916 1.62s
398 0.70174 0.67081 1.04611 0.60916 1.40s
399 0.70174 0.67081 1.04611 0.60916 1.44s
400 0.70174 0.67081 1.04611 0.60916 1.31s
401 0.70173 0.67081 1.04611 0.60916 1.22s
402 0.70173 0.67081 1.04611 0.60916 1.60s
403 0.70173 0.67081 1.04611 0.60916 1.30s
404 0.70173 0.67081 1.04611 0.60916 1.16s
405 0.70173 0.67080 1.04611 0.60916 1.45s
406 0.70173 0.67080 1.04611 0.60916 1.58s
407 0.70173 0.67080 1.04611 0.60916 1.45s
408 0.70173 0.67080 1.04611 0.60916 1.39s
409 0.70173 0.67080 1.04611 0.60916 1.30s
410 0.70173 0.67080 1.04611 0.60916 1.09s
411 0.70173 0.67080 1.04611 0.60916 1.45s
412 0.70173 0.67080 1.04611 0.60916 1.38s
413 0.70173 0.67080 1.04611 0.60916 1.78s
414 0.70173 0.67080 1.04611 0.60916 1.26s
415 0.70173 0.67080 1.04611 0.60916 1.61s
416 0.70173 0.67080 1.04611 0.60916 1.40s
417 0.70173 0.67080 1.04611 0.60916 1.21s
418 0.70173 0.67080 1.04611 0.60916 1.10s
419 0.70173 0.67080 1.04611 0.60916 1.96s
420 0.70173 0.67080 1.04611 0.60916 1.78s
421 0.70173 0.67080 1.04611 0.60916 1.14s
422 0.70173 0.67080 1.04611 0.60916 1.73s
423 0.70173 0.67080 1.04611 0.60916 1.78s
424 0.70173 0.67080 1.04611 0.60916 2.13s
425 0.70173 0.67080 1.04611 0.60916 1.53s
426 0.70173 0.67080 1.04611 0.60916 1.70s
427 0.70173 0.67080 1.04611 0.60916 1.33s
428 0.70173 0.67080 1.04611 0.60916 1.21s
429 0.70173 0.67080 1.04611 0.60916 1.47s
430 0.70173 0.67080 1.04611 0.60916 1.45s
431 0.70173 0.67080 1.04611 0.60916 1.18s
432 0.70173 0.67080 1.04611 0.60916 1.43s
433 0.70173 0.67080 1.04611 0.60916 1.45s
434 0.70173 0.67080 1.04611 0.60916 1.48s
435 0.70173 0.67080 1.04611 0.60916 1.48s
436 0.70173 0.67080 1.04611 0.60916 1.32s
437 0.70173 0.67080 1.04611 0.60916 1.22s
438 0.70173 0.67080 1.04611 0.60916 1.43s
439 0.70173 0.67080 1.04611 0.60916 1.83s
440 0.70173 0.67080 1.04611 0.60916 1.41s
441 0.70173 0.67080 1.04611 0.60916 1.31s
442 0.70173 0.67080 1.04611 0.60916 1.06s
443 0.70172 0.67080 1.04611 0.60916 1.06s
444 0.70172 0.67079 1.04611 0.60916 1.07s
445 0.70172 0.67079 1.04611 0.60916 1.43s
446 0.70172 0.67079 1.04611 0.60916 1.57s
447 0.70172 0.67079 1.04611 0.60916 1.50s
448 0.70172 0.67079 1.04611 0.60916 1.59s
449 0.70172 0.67079 1.04611 0.60916 1.57s
450 0.70172 0.67079 1.04611 0.60916 1.17s
451 0.70172 0.67079 1.04611 0.60916 1.39s
452 0.70172 0.67079 1.04611 0.60916 1.18s
453 0.70172 0.67079 1.04611 0.60916 1.34s
454 0.70172 0.67079 1.04611 0.60916 1.31s
455 0.70172 0.67079 1.04611 0.60916 1.29s
456 0.70172 0.67079 1.04611 0.60916 1.38s
457 0.70172 0.67079 1.04611 0.60916 1.39s
458 0.70172 0.67079 1.04611 0.60916 1.43s
459 0.70172 0.67079 1.04611 0.60916 1.67s
460 0.70172 0.67079 1.04611 0.60916 1.84s
461 0.70172 0.67079 1.04611 0.60916 1.57s
462 0.70172 0.67079 1.04611 0.60916 1.84s
463 0.70172 0.67079 1.04611 0.60916 1.40s
464 0.70172 0.67079 1.04611 0.60916 1.93s
465 0.70172 0.67079 1.04611 0.60916 1.61s
466 0.70172 0.67079 1.04611 0.60916 1.75s
467 0.70172 0.67079 1.04611 0.60916 1.91s
468 0.70172 0.67079 1.04611 0.60916 1.74s
469 0.70172 0.67079 1.04611 0.60916 1.86s
470 0.70172 0.67079 1.04611 0.60916 1.99s
471 0.70172 0.67079 1.04611 0.60916 1.90s
472 0.70172 0.67079 1.04611 0.60916 1.91s
473 0.70172 0.67079 1.04611 0.60916 1.44s
474 0.70172 0.67079 1.04611 0.60916 1.49s
475 0.70172 0.67079 1.04611 0.60916 1.41s
476 0.70172 0.67079 1.04611 0.60916 1.51s
477 0.70172 0.67079 1.04611 0.60916 1.50s
478 0.70172 0.67079 1.04611 0.60916 1.50s
479 0.70172 0.67079 1.04611 0.60916 1.50s
480 0.70172 0.67079 1.04611 0.60916 1.44s
481 0.70172 0.67079 1.04611 0.60916 1.50s
482 0.70172 0.67079 1.04611 0.60916 1.51s
483 0.70172 0.67079 1.04611 0.60916 1.54s
484 0.70172 0.67079 1.04611 0.60916 1.53s
485 0.70172 0.67079 1.04611 0.60916 1.34s
486 0.70172 0.67079 1.04611 0.60916 1.11s
487 0.70172 0.67079 1.04611 0.60916 1.11s
488 0.70172 0.67079 1.04611 0.60916 1.10s
489 0.70172 0.67079 1.04611 0.60916 1.52s
490 0.70172 0.67079 1.04611 0.60916 1.24s
491 0.70172 0.67079 1.04611 0.60916 1.24s
492 0.70172 0.67079 1.04611 0.60916 1.30s
493 0.70172 0.67079 1.04611 0.60916 1.26s
494 0.70172 0.67079 1.04611 0.60916 1.23s
495 0.70172 0.67079 1.04611 0.60916 1.33s
496 0.70172 0.67079 1.04611 0.60916 1.14s
497 0.70172 0.67079 1.04611 0.60916 1.12s
498 0.70172 0.67079 1.04611 0.60916 1.13s
499 0.70172 0.67079 1.04611 0.60916 1.17s
500 0.70172 0.67078 1.04611 0.60916 1.17s
501 0.70172 0.67078 1.04611 0.60916 1.14s
502 0.70171 0.67078 1.04611 0.60916 1.16s
503 0.70171 0.67078 1.04611 0.60916 1.13s
504 0.70171 0.67078 1.04611 0.60916 1.18s
505 0.70171 0.67078 1.04611 0.60916 1.25s
506 0.70171 0.67078 1.04611 0.60916 1.16s
507 0.70171 0.67078 1.04611 0.60916 1.16s
508 0.70171 0.67078 1.04611 0.60916 1.06s
509 0.70171 0.67078 1.04611 0.60916 1.15s
510 0.70171 0.67078 1.04611 0.60916 1.10s
511 0.70171 0.67078 1.04611 0.60916 1.15s
512 0.70171 0.67078 1.04611 0.60916 1.11s
513 0.70171 0.67078 1.04611 0.60916 1.09s
514 0.70171 0.67078 1.04611 0.60916 1.09s
515 0.70171 0.67078 1.04611 0.60916 1.12s
516 0.70171 0.67078 1.04611 0.60916 1.08s
517 0.70171 0.67078 1.04611 0.60916 1.08s
518 0.70171 0.67078 1.04611 0.60916 1.10s
519 0.70171 0.67078 1.04611 0.60916 1.16s
520 0.70171 0.67078 1.04611 0.60916 1.18s
521 0.70171 0.67078 1.04611 0.60916 1.33s
522 0.70171 0.67078 1.04611 0.60916 1.36s
523 0.70171 0.67078 1.04611 0.60916 1.09s
524 0.70171 0.67078 1.04611 0.60916 1.08s
525 0.70171 0.67078 1.04611 0.60916 1.08s
526 0.70171 0.67078 1.04611 0.60916 1.06s
527 0.70171 0.67078 1.04611 0.60916 1.10s
528 0.70171 0.67078 1.04611 0.60916 1.09s
529 0.70171 0.67078 1.04611 0.60916 1.07s
530 0.70171 0.67078 1.04611 0.60916 1.10s
531 0.70171 0.67078 1.04611 0.60916 1.08s
532 0.70171 0.67078 1.04611 0.60916 1.07s
533 0.70171 0.67078 1.04611 0.60916 1.07s
534 0.70171 0.67078 1.04611 0.60916 1.07s
535 0.70171 0.67078 1.04611 0.60916 1.07s
536 0.70171 0.67078 1.04611 0.60916 1.08s
537 0.70171 0.67078 1.04611 0.60916 1.08s
538 0.70171 0.67078 1.04611 0.60916 1.08s
539 0.70171 0.67078 1.04611 0.60916 1.18s
540 0.70171 0.67078 1.04611 0.60916 1.12s
541 0.70171 0.67078 1.04611 0.60916 1.07s
542 0.70171 0.67078 1.04611 0.60916 1.11s
543 0.70171 0.67078 1.04611 0.60916 1.06s
544 0.70171 0.67078 1.04611 0.60916 1.05s
545 0.70171 0.67078 1.04611 0.60916 1.09s
546 0.70171 0.67078 1.04611 0.60916 1.08s
547 0.70171 0.67078 1.04611 0.60916 1.06s
548 0.70171 0.67078 1.04611 0.60916 1.08s
549 0.70171 0.67078 1.04611 0.60916 1.07s
550 0.70171 0.67078 1.04611 0.60916 1.06s
551 0.70171 0.67078 1.04611 0.60916 1.03s
552 0.70171 0.67078 1.04611 0.60916 1.07s
553 0.70171 0.67078 1.04611 0.60916 1.07s
554 0.70171 0.67078 1.04611 0.60916 1.08s
555 0.70171 0.67078 1.04611 0.60916 1.08s
556 0.70171 0.67078 1.04611 0.60916 1.07s
557 0.70171 0.67078 1.04611 0.60916 1.08s
558 0.70171 0.67078 1.04611 0.60916 1.07s
559 0.70171 0.67078 1.04611 0.60916 1.13s
560 0.70171 0.67078 1.04611 0.60916 1.17s
561 0.70171 0.67078 1.04611 0.60916 1.14s
562 0.70171 0.67078 1.04611 0.60916 1.07s
563 0.70171 0.67078 1.04611 0.60916 1.08s
564 0.70171 0.67078 1.04611 0.60916 1.07s
565 0.70171 0.67078 1.04611 0.60916 1.10s
566 0.70171 0.67078 1.04611 0.60916 1.05s
567 0.70171 0.67078 1.04611 0.60916 1.07s
568 0.70171 0.67078 1.04611 0.60916 1.08s
569 0.70171 0.67078 1.04611 0.60916 1.12s
570 0.70171 0.67078 1.04611 0.60916 1.05s
571 0.70171 0.67078 1.04611 0.60916 1.09s
572 0.70171 0.67078 1.04611 0.60916 1.07s
573 0.70171 0.67078 1.04611 0.60916 1.05s
574 0.70171 0.67078 1.04611 0.60916 1.07s
575 0.70171 0.67078 1.04611 0.60916 1.07s
576 0.70171 0.67078 1.04611 0.60916 1.06s
577 0.70171 0.67078 1.04611 0.60916 1.06s
578 0.70171 0.67078 1.04611 0.60916 1.08s
579 0.70171 0.67078 1.04611 0.60916 1.08s
580 0.70171 0.67078 1.04611 0.60916 1.08s
581 0.70171 0.67078 1.04611 0.60916 1.08s
582 0.70171 0.67078 1.04611 0.60916 1.07s
583 0.70171 0.67078 1.04611 0.60916 1.08s
584 0.70171 0.67078 1.04611 0.60916 1.08s
585 0.70171 0.67078 1.04611 0.60916 1.09s
586 0.70171 0.67078 1.04611 0.60916 1.08s
587 0.70171 0.67078 1.04611 0.60916 1.07s
588 0.70171 0.67078 1.04611 0.60916 1.07s
589 0.70171 0.67078 1.04611 0.60916 1.09s
590 0.70171 0.67078 1.04611 0.60916 1.24s
591 0.70171 0.67078 1.04611 0.60916 1.29s
592 0.70171 0.67077 1.04611 0.60916 1.16s
593 0.70171 0.67077 1.04611 0.60916 1.12s
594 0.70171 0.67077 1.04611 0.60916 1.07s
595 0.70171 0.67077 1.04611 0.60916 1.08s
596 0.70171 0.67077 1.04611 0.60916 1.09s
597 0.70171 0.67077 1.04611 0.60916 1.29s
598 0.70171 0.67077 1.04611 0.60916 1.30s
599 0.70170 0.67077 1.04611 0.60916 1.17s
600 0.70170 0.67077 1.04611 0.60916 1.13s
601 0.70170 0.67077 1.04611 0.60916 1.06s
602 0.70170 0.67077 1.04611 0.60916 1.10s
603 0.70170 0.67077 1.04611 0.60916 1.06s
604 0.70170 0.67077 1.04611 0.60916 1.13s
605 0.70170 0.67077 1.04611 0.60916 1.11s
606 0.70170 0.67077 1.04611 0.60916 1.11s
607 0.70170 0.67077 1.04611 0.60916 1.09s
608 0.70170 0.67077 1.04611 0.60916 1.13s
609 0.70170 0.67077 1.04611 0.60916 1.12s
610 0.70170 0.67077 1.04611 0.60916 1.12s
611 0.70170 0.67077 1.04611 0.60916 1.08s
612 0.70170 0.67077 1.04611 0.60916 1.08s
613 0.70170 0.67077 1.04611 0.60916 1.12s
614 0.70170 0.67077 1.04611 0.60916 1.22s
615 0.70170 0.67077 1.04611 0.60916 1.13s
616 0.70170 0.67077 1.04611 0.60916 1.08s
617 0.70170 0.67077 1.04611 0.60916 1.11s
618 0.70170 0.67077 1.04611 0.60916 1.11s
619 0.70170 0.67077 1.04611 0.60916 1.08s
620 0.70170 0.67077 1.04611 0.60916 1.17s
621 0.70170 0.67077 1.04611 0.60916 1.08s
622 0.70170 0.67077 1.04611 0.60916 1.11s
623 0.70170 0.67077 1.04611 0.60916 1.10s
624 0.70170 0.67077 1.04611 0.60916 1.09s
625 0.70170 0.67077 1.04611 0.60916 1.10s
626 0.70170 0.67077 1.04611 0.60916 1.10s
627 0.70170 0.67077 1.04611 0.60916 1.06s
628 0.70170 0.67077 1.04611 0.60916 1.08s
629 0.70170 0.67077 1.04611 0.60916 1.09s
630 0.70170 0.67077 1.04611 0.60916 1.08s
631 0.70170 0.67077 1.04611 0.60916 1.07s
632 0.70170 0.67077 1.04611 0.60916 1.13s
633 0.70170 0.67077 1.04611 0.60916 1.11s
634 0.70170 0.67077 1.04611 0.60916 1.07s
635 0.70170 0.67077 1.04611 0.60916 1.07s
636 0.70170 0.67077 1.04611 0.60916 1.11s
637 0.70170 0.67077 1.04611 0.60916 1.14s
638 0.70170 0.67077 1.04611 0.60916 1.16s
639 0.70170 0.67077 1.04611 0.60916 1.09s
640 0.70170 0.67077 1.04611 0.60916 1.08s
641 0.70170 0.67077 1.04611 0.60916 1.07s
642 0.70170 0.67077 1.04611 0.60916 1.09s
643 0.70170 0.67077 1.04611 0.60916 1.08s
644 0.70170 0.67077 1.04611 0.60916 1.09s
645 0.70170 0.67077 1.04611 0.60916 1.15s
646 0.70170 0.67077 1.04611 0.60916 1.16s
647 0.70170 0.67077 1.04611 0.60916 1.12s
648 0.70170 0.67077 1.04611 0.60916 1.06s
649 0.70170 0.67077 1.04611 0.60916 1.10s
650 0.70170 0.67077 1.04611 0.60916 1.07s
651 0.70170 0.67077 1.04611 0.60916 1.06s
652 0.70170 0.67077 1.04611 0.60916 1.06s
653 0.70170 0.67077 1.04611 0.60916 1.09s
654 0.70170 0.67077 1.04611 0.60916 1.13s
655 0.70170 0.67077 1.04611 0.60916 1.13s
656 0.70170 0.67077 1.04611 0.60916 1.11s
657 0.70170 0.67077 1.04611 0.60916 1.15s
658 0.70170 0.67077 1.04611 0.60916 1.10s
659 0.70170 0.67077 1.04611 0.60916 1.05s
660 0.70170 0.67077 1.04611 0.60916 1.08s
661 0.70170 0.67077 1.04611 0.60916 1.07s
662 0.70170 0.67077 1.04611 0.60916 1.08s
663 0.70170 0.67077 1.04611 0.60916 1.08s
664 0.70170 0.67077 1.04611 0.60916 1.08s
665 0.70170 0.67077 1.04611 0.60916 1.10s
666 0.70170 0.67077 1.04611 0.60916 1.10s
667 0.70170 0.67077 1.04611 0.60916 1.06s
668 0.70170 0.67077 1.04611 0.60916 1.11s
669 0.70170 0.67077 1.04611 0.60916 1.07s
670 0.70170 0.67077 1.04611 0.60916 1.12s
671 0.70170 0.67077 1.04611 0.60916 1.13s
672 0.70170 0.67077 1.04611 0.60916 1.09s
673 0.70170 0.67077 1.04611 0.60916 1.09s
674 0.70170 0.67077 1.04611 0.60916 1.07s
675 0.70170 0.67077 1.04611 0.60916 1.08s
676 0.70170 0.67077 1.04611 0.60916 1.11s
677 0.70170 0.67077 1.04611 0.60916 1.15s
678 0.70170 0.67077 1.04611 0.60916 1.15s
679 0.70170 0.67077 1.04611 0.60916 1.15s
680 0.70170 0.67077 1.04611 0.60916 1.23s
681 0.70170 0.67077 1.04611 0.60916 1.22s
682 0.70170 0.67077 1.04611 0.60916 1.22s
683 0.70170 0.67077 1.04611 0.60916 1.25s
684 0.70170 0.67077 1.04611 0.60916 1.22s
685 0.70170 0.67077 1.04611 0.60916 1.24s
686 0.70170 0.67077 1.04611 0.60916 1.19s
687 0.70170 0.67077 1.04611 0.60916 1.20s
688 0.70170 0.67077 1.04611 0.60916 1.16s
689 0.70170 0.67077 1.04611 0.60916 1.13s
690 0.70170 0.67077 1.04611 0.60916 1.16s
691 0.70170 0.67077 1.04611 0.60916 1.10s
692 0.70170 0.67077 1.04611 0.60916 1.14s
693 0.70170 0.67077 1.04611 0.60916 1.13s
694 0.70170 0.67077 1.04611 0.60916 1.12s
695 0.70170 0.67077 1.04611 0.60916 1.08s
696 0.70170 0.67077 1.04611 0.60916 1.07s
697 0.70170 0.67077 1.04611 0.60916 1.09s
698 0.70170 0.67077 1.04611 0.60916 1.04s
699 0.70170 0.67077 1.04611 0.60916 1.08s
700 0.70170 0.67077 1.04611 0.60916 1.04s
701 0.70170 0.67077 1.04611 0.60916 1.08s
702 0.70170 0.67077 1.04611 0.60916 1.06s
703 0.70170 0.67077 1.04611 0.60916 1.10s
704 0.70170 0.67077 1.04611 0.60916 1.09s
705 0.70170 0.67077 1.04611 0.60916 1.06s
706 0.70170 0.67077 1.04611 0.60916 1.08s
707 0.70170 0.67077 1.04611 0.60916 1.07s
708 0.70170 0.67077 1.04611 0.60916 1.06s
709 0.70170 0.67077 1.04611 0.60916 1.07s
710 0.70170 0.67077 1.04611 0.60916 1.09s
711 0.70170 0.67077 1.04611 0.60916 1.09s
712 0.70170 0.67077 1.04611 0.60916 1.06s
713 0.70170 0.67077 1.04611 0.60916 1.05s
714 0.70170 0.67077 1.04611 0.60916 1.07s
715 0.70170 0.67077 1.04611 0.60916 1.08s
716 0.70170 0.67077 1.04611 0.60916 1.07s
717 0.70170 0.67077 1.04611 0.60916 1.08s
718 0.70170 0.67077 1.04611 0.60916 1.09s
719 0.70170 0.67077 1.04611 0.60916 1.06s
720 0.70170 0.67077 1.04611 0.60916 1.09s
721 0.70170 0.67077 1.04611 0.60916 1.08s
722 0.70170 0.67077 1.04611 0.60916 1.08s
723 0.70170 0.67077 1.04611 0.60916 1.09s
724 0.70170 0.67077 1.04611 0.60916 1.08s
725 0.70170 0.67077 1.04611 0.60916 1.05s
726 0.70170 0.67077 1.04611 0.60916 1.08s
727 0.70170 0.67077 1.04611 0.60916 1.07s
728 0.70170 0.67077 1.04611 0.60916 1.09s
729 0.70170 0.67077 1.04611 0.60916 1.12s
730 0.70170 0.67077 1.04611 0.60916 1.27s
731 0.70170 0.67077 1.04611 0.60916 1.20s
732 0.70170 0.67077 1.04611 0.60916 1.03s
733 0.70170 0.67077 1.04611 0.60916 1.04s
734 0.70170 0.67077 1.04611 0.60916 1.08s
735 0.70170 0.67077 1.04611 0.60916 1.24s
736 0.70170 0.67077 1.04611 0.60916 1.06s
737 0.70170 0.67077 1.04611 0.60916 1.09s
738 0.70170 0.67077 1.04611 0.60916 1.11s
739 0.70170 0.67077 1.04611 0.60916 1.13s
740 0.70170 0.67077 1.04611 0.60916 1.16s
741 0.70170 0.67077 1.04611 0.60916 1.12s
742 0.70170 0.67077 1.04611 0.60916 1.09s
743 0.70170 0.67077 1.04611 0.60916 1.08s
744 0.70170 0.67077 1.04611 0.60916 1.04s
745 0.70170 0.67077 1.04611 0.60916 1.08s
746 0.70170 0.67077 1.04611 0.60916 1.07s
747 0.70170 0.67077 1.04611 0.60916 1.07s
748 0.70170 0.67077 1.04611 0.60916 1.07s
749 0.70170 0.67077 1.04611 0.60916 1.09s
750 0.70170 0.67077 1.04611 0.60916 1.09s
751 0.70170 0.67077 1.04611 0.60916 1.07s
752 0.70170 0.67077 1.04611 0.60916 1.06s
753 0.70170 0.67077 1.04611 0.60916 1.08s
754 0.70170 0.67077 1.04611 0.60916 1.08s
755 0.70170 0.67077 1.04611 0.60916 1.07s
756 0.70170 0.67077 1.04611 0.60916 1.08s
757 0.70170 0.67077 1.04611 0.60916 1.09s
758 0.70170 0.67077 1.04611 0.60916 1.07s
759 0.70170 0.67077 1.04611 0.60916 1.06s
760 0.70170 0.67077 1.04611 0.60916 1.10s
761 0.70170 0.67077 1.04611 0.60916 1.08s
762 0.70170 0.67077 1.04611 0.60916 1.07s
763 0.70170 0.67077 1.04611 0.60916 1.05s
764 0.70170 0.67077 1.04611 0.60916 1.08s
765 0.70170 0.67077 1.04611 0.60916 1.06s
766 0.70170 0.67077 1.04611 0.60916 1.09s
767 0.70170 0.67077 1.04611 0.60916 1.09s
768 0.70170 0.67077 1.04611 0.60916 1.09s
769 0.70170 0.67077 1.04611 0.60916 1.08s
770 0.70170 0.67077 1.04611 0.60916 1.07s
771 0.70170 0.67077 1.04611 0.60916 1.06s
772 0.70170 0.67077 1.04611 0.60916 1.05s
773 0.70170 0.67077 1.04611 0.60916 1.06s
774 0.70170 0.67077 1.04611 0.60916 1.09s
775 0.70170 0.67077 1.04611 0.60916 1.07s
776 0.70170 0.67077 1.04611 0.60916 1.10s
777 0.70170 0.67077 1.04611 0.60916 1.10s
778 0.70170 0.67077 1.04611 0.60916 1.68s
779 0.70170 0.67077 1.04611 0.60916 1.38s
780 0.70170 0.67077 1.04611 0.60916 1.08s
781 0.70170 0.67077 1.04611 0.60916 1.09s
782 0.70170 0.67077 1.04611 0.60916 1.09s
783 0.70170 0.67077 1.04611 0.60916 1.12s
784 0.70170 0.67077 1.04611 0.60916 1.12s
785 0.70170 0.67077 1.04611 0.60916 1.09s
786 0.70170 0.67077 1.04611 0.60916 1.07s
787 0.70170 0.67077 1.04611 0.60916 1.08s
788 0.70170 0.67077 1.04611 0.60916 1.09s
789 0.70170 0.67077 1.04611 0.60916 1.08s
790 0.70170 0.67077 1.04611 0.60916 1.08s
791 0.70170 0.67077 1.04611 0.60916 1.09s
792 0.70170 0.67077 1.04611 0.60916 1.06s
793 0.70170 0.67077 1.04611 0.60916 1.09s
794 0.70170 0.67077 1.04611 0.60916 1.09s
795 0.70170 0.67077 1.04611 0.60916 1.07s
796 0.70170 0.67077 1.04611 0.60916 1.08s
797 0.70170 0.67077 1.04611 0.60916 1.09s
798 0.70170 0.67077 1.04611 0.60916 1.09s
799 0.70170 0.67077 1.04611 0.60916 1.04s
800 0.70170 0.67077 1.04611 0.60916 1.11s
801 0.70170 0.67077 1.04611 0.60916 1.07s
802 0.70170 0.67077 1.04611 0.60916 1.07s
803 0.70170 0.67077 1.04611 0.60916 1.12s
804 0.70170 0.67077 1.04611 0.60916 1.08s
805 0.70170 0.67077 1.04611 0.60916 1.06s
806 0.70170 0.67077 1.04611 0.60916 1.10s
807 0.70170 0.67077 1.04611 0.60916 1.12s
808 0.70170 0.67077 1.04611 0.60916 1.09s
809 0.70170 0.67077 1.04611 0.60916 1.09s
810 0.70170 0.67077 1.04611 0.60916 1.08s
811 0.70170 0.67077 1.04611 0.60916 1.08s
812 0.70170 0.67077 1.04611 0.60916 1.14s
813 0.70170 0.67077 1.04611 0.60916 1.16s
814 0.70170 0.67077 1.04611 0.60916 1.08s
815 0.70170 0.67077 1.04611 0.60916 1.08s
816 0.70170 0.67077 1.04611 0.60916 1.07s
817 0.70170 0.67077 1.04611 0.60916 1.07s
818 0.70170 0.67077 1.04611 0.60916 1.10s
819 0.70170 0.67077 1.04611 0.60916 1.08s
820 0.70170 0.67077 1.04611 0.60916 1.09s
821 0.70170 0.67077 1.04611 0.60916 1.06s
822 0.70170 0.67077 1.04611 0.60916 1.06s
823 0.70170 0.67077 1.04611 0.60916 1.12s
824 0.70170 0.67077 1.04611 0.60916 1.09s
825 0.70170 0.67077 1.04611 0.60916 1.08s
826 0.70170 0.67077 1.04611 0.60916 1.08s
827 0.70170 0.67077 1.04611 0.60916 1.08s
828 0.70170 0.67077 1.04611 0.60916 1.09s
829 0.70170 0.67077 1.04611 0.60916 1.10s
830 0.70170 0.67077 1.04611 0.60916 1.08s
831 0.70170 0.67077 1.04611 0.60916 1.08s
832 0.70170 0.67077 1.04611 0.60916 1.09s
833 0.70170 0.67077 1.04611 0.60916 1.13s
834 0.70170 0.67077 1.04611 0.60916 1.10s
835 0.70170 0.67077 1.04611 0.60916 1.13s
836 0.70170 0.67077 1.04611 0.60916 1.10s
837 0.70170 0.67077 1.04611 0.60916 1.08s
838 0.70170 0.67077 1.04611 0.60916 1.12s
839 0.70170 0.67077 1.04611 0.60916 1.09s
840 0.70170 0.67077 1.04611 0.60916 1.08s
841 0.70170 0.67076 1.04611 0.60916 1.07s
842 0.70170 0.67076 1.04611 0.60916 1.08s
843 0.70170 0.67076 1.04611 0.60916 1.09s
844 0.70170 0.67076 1.04611 0.60916 1.07s
845 0.70170 0.67076 1.04611 0.60916 1.08s
846 0.70170 0.67076 1.04611 0.60916 1.08s
847 0.70170 0.67076 1.04611 0.60916 1.08s
848 0.70170 0.67076 1.04611 0.60916 1.09s
849 0.70170 0.67076 1.04611 0.60916 1.06s
850 0.70170 0.67076 1.04611 0.60916 1.05s
851 0.70170 0.67076 1.04611 0.60916 1.09s
852 0.70170 0.67076 1.04611 0.60916 1.09s
853 0.70170 0.67076 1.04611 0.60916 1.08s
854 0.70170 0.67076 1.04611 0.60916 1.08s
855 0.70170 0.67076 1.04611 0.60916 1.08s
856 0.70170 0.67076 1.04611 0.60916 1.12s
857 0.70170 0.67076 1.04611 0.60916 1.07s
858 0.70170 0.67076 1.04611 0.60916 1.08s
859 0.70170 0.67076 1.04611 0.60916 1.08s
860 0.70170 0.67076 1.04611 0.60916 1.08s
861 0.70170 0.67076 1.04611 0.60916 1.06s
862 0.70170 0.67076 1.04611 0.60916 1.08s
863 0.70170 0.67076 1.04611 0.60916 1.10s
864 0.70170 0.67076 1.04611 0.60916 1.07s
865 0.70170 0.67076 1.04611 0.60916 1.05s
866 0.70170 0.67076 1.04611 0.60916 1.10s
867 0.70170 0.67076 1.04611 0.60916 1.08s
868 0.70170 0.67076 1.04611 0.60916 1.08s
869 0.70170 0.67076 1.04611 0.60916 1.08s
870 0.70170 0.67076 1.04611 0.60916 1.09s
871 0.70170 0.67076 1.04611 0.60916 1.11s
872 0.70170 0.67076 1.04611 0.60916 1.10s
873 0.70170 0.67076 1.04611 0.60916 1.07s
874 0.70170 0.67076 1.04611 0.60916 1.10s
875 0.70170 0.67076 1.04611 0.60916 1.07s
876 0.70170 0.67076 1.04611 0.60916 1.23s
877 0.70170 0.67076 1.04611 0.60916 1.18s
878 0.70170 0.67076 1.04611 0.60916 1.09s
879 0.70170 0.67076 1.04611 0.60916 1.09s
880 0.70170 0.67076 1.04611 0.60916 1.10s
881 0.70170 0.67076 1.04611 0.60916 1.09s
882 0.70170 0.67076 1.04611 0.60916 1.10s
883 0.70170 0.67076 1.04611 0.60916 1.13s
884 0.70170 0.67076 1.04611 0.60916 1.10s
885 0.70170 0.67076 1.04611 0.60916 1.11s
886 0.70170 0.67076 1.04611 0.60916 1.07s
887 0.70170 0.67076 1.04611 0.60916 1.19s
888 0.70169 0.67076 1.04611 0.60916 1.07s
889 0.70169 0.67076 1.04611 0.60916 1.10s
890 0.70169 0.67076 1.04611 0.60916 1.12s
891 0.70169 0.67076 1.04611 0.60916 1.25s
892 0.70169 0.67076 1.04611 0.60916 1.25s
893 0.70169 0.67076 1.04611 0.60916 1.16s
894 0.70169 0.67076 1.04611 0.60916 1.10s
895 0.70169 0.67076 1.04611 0.60916 1.10s
896 0.70169 0.67076 1.04611 0.60916 1.18s
897 0.70169 0.67076 1.04611 0.60916 1.22s
898 0.70169 0.67076 1.04611 0.60916 1.07s
899 0.70169 0.67076 1.04611 0.60916 1.06s
900 0.70169 0.67076 1.04611 0.60916 1.08s
901 0.70169 0.67076 1.04611 0.60916 1.08s
902 0.70169 0.67076 1.04611 0.60916 1.11s
903 0.70169 0.67076 1.04611 0.60916 1.11s
904 0.70169 0.67076 1.04611 0.60916 1.11s
905 0.70169 0.67076 1.04611 0.60916 1.10s
906 0.70169 0.67076 1.04611 0.60916 1.11s
907 0.70169 0.67076 1.04611 0.60916 1.12s
908 0.70169 0.67076 1.04611 0.60916 1.10s
909 0.70169 0.67076 1.04611 0.60916 1.12s
910 0.70169 0.67076 1.04611 0.60916 1.13s
911 0.70169 0.67076 1.04611 0.60916 1.05s
912 0.70169 0.67076 1.04611 0.60916 1.09s
913 0.70169 0.67076 1.04611 0.60916 1.11s
914 0.70169 0.67076 1.04611 0.60916 1.12s
915 0.70169 0.67076 1.04611 0.60916 1.09s
916 0.70169 0.67076 1.04611 0.60916 1.11s
917 0.70169 0.67076 1.04611 0.60916 1.19s
918 0.70169 0.67076 1.04611 0.60916 1.20s
919 0.70169 0.67076 1.04611 0.60916 1.10s
920 0.70169 0.67076 1.04611 0.60916 1.10s
921 0.70169 0.67076 1.04611 0.60916 1.08s
922 0.70169 0.67076 1.04611 0.60916 1.15s
923 0.70169 0.67076 1.04611 0.60916 1.11s
924 0.70169 0.67076 1.04611 0.60916 1.12s
925 0.70169 0.67076 1.04611 0.60916 1.09s
926 0.70169 0.67076 1.04611 0.60916 1.09s
927 0.70169 0.67076 1.04611 0.60916 1.09s
928 0.70169 0.67076 1.04611 0.60916 1.07s
929 0.70169 0.67076 1.04611 0.60916 1.08s
930 0.70169 0.67076 1.04611 0.60916 1.06s
931 0.70169 0.67076 1.04611 0.60916 1.07s
932 0.70169 0.67076 1.04611 0.60916 1.08s
933 0.70169 0.67076 1.04611 0.60916 1.06s
934 0.70169 0.67076 1.04611 0.60916 1.11s
935 0.70169 0.67076 1.04611 0.60916 1.08s
936 0.70169 0.67076 1.04611 0.60916 1.07s
937 0.70169 0.67076 1.04611 0.60916 1.09s
938 0.70169 0.67076 1.04611 0.60916 1.09s
939 0.70169 0.67076 1.04611 0.60916 1.11s
940 0.70169 0.67076 1.04611 0.60916 1.10s
941 0.70169 0.67076 1.04611 0.60916 1.09s
942 0.70169 0.67076 1.04611 0.60916 1.19s
943 0.70169 0.67076 1.04611 0.60916 1.22s
944 0.70169 0.67076 1.04611 0.60916 1.15s
945 0.70169 0.67076 1.04611 0.60916 1.15s
946 0.70169 0.67076 1.04611 0.60916 1.16s
947 0.70169 0.67076 1.04611 0.60916 1.20s
948 0.70169 0.67076 1.04611 0.60916 1.17s
949 0.70169 0.67076 1.04611 0.60916 1.17s
950 0.70169 0.67076 1.04611 0.60916 1.12s
951 0.70169 0.67076 1.04611 0.60916 1.10s
952 0.70169 0.67076 1.04611 0.60916 1.08s
953 0.70169 0.67076 1.04611 0.60916 1.13s
954 0.70169 0.67076 1.04611 0.60916 1.12s
955 0.70169 0.67076 1.04611 0.60916 1.13s
956 0.70169 0.67076 1.04611 0.60916 1.14s
957 0.70169 0.67076 1.04611 0.60916 1.24s
958 0.70169 0.67076 1.04611 0.60916 1.19s
959 0.70169 0.67076 1.04611 0.60916 1.12s
960 0.70169 0.67076 1.04611 0.60916 1.18s
961 0.70169 0.67076 1.04611 0.60916 1.11s
962 0.70169 0.67076 1.04611 0.60916 1.10s
963 0.70169 0.67076 1.04611 0.60916 1.10s
964 0.70169 0.67076 1.04611 0.60916 1.07s
965 0.70169 0.67076 1.04611 0.60916 1.09s
966 0.70169 0.67076 1.04611 0.60916 1.06s
967 0.70169 0.67076 1.04611 0.60916 1.09s
968 0.70169 0.67076 1.04611 0.60916 1.07s
969 0.70169 0.67076 1.04611 0.60916 1.08s
970 0.70169 0.67076 1.04611 0.60916 1.07s
971 0.70169 0.67076 1.04611 0.60916 1.07s
972 0.70169 0.67076 1.04611 0.60916 1.10s
973 0.70169 0.67076 1.04611 0.60916 1.08s
974 0.70169 0.67076 1.04611 0.60916 1.07s
975 0.70169 0.67076 1.04611 0.60916 1.09s
976 0.70169 0.67076 1.04611 0.60916 1.09s
977 0.70169 0.67076 1.04611 0.60916 1.08s
978 0.70169 0.67076 1.04611 0.60916 1.11s
979 0.70169 0.67076 1.04611 0.60916 1.09s
980 0.70169 0.67076 1.04611 0.60916 1.08s
981 0.70169 0.67076 1.04611 0.60916 1.07s
982 0.70169 0.67076 1.04611 0.60916 1.37s
983 0.70169 0.67076 1.04611 0.60916 1.63s
984 0.70169 0.67076 1.04611 0.60916 1.37s
985 0.70169 0.67076 1.04611 0.60916 1.15s
986 0.70169 0.67076 1.04611 0.60916 1.08s
987 0.70169 0.67076 1.04611 0.60916 1.10s
988 0.70169 0.67076 1.04611 0.60916 1.09s
989 0.70169 0.67076 1.04611 0.60916 1.09s
990 0.70169 0.67076 1.04611 0.60916 1.10s
991 0.70169 0.67076 1.04611 0.60916 1.11s
992 0.70169 0.67076 1.04611 0.60916 1.10s
993 0.70169 0.67076 1.04611 0.60916 1.08s
994 0.70169 0.67076 1.04611 0.60916 1.22s
995 0.70169 0.67076 1.04611 0.60916 1.17s
996 0.70169 0.67076 1.04611 0.60916 1.12s
997 0.70169 0.67076 1.04611 0.60916 1.12s
998 0.70169 0.67076 1.04611 0.60916 1.09s
999 0.70169 0.67076 1.04611 0.60916 1.08s
1000 0.70169 0.67076 1.04611 0.60916 1.29s
1001 0.70169 0.67076 1.04611 0.60916 1.22s
1002 0.70169 0.67076 1.04611 0.60916 1.41s
1003 0.70169 0.67076 1.04611 0.60916 1.67s
1004 0.70169 0.67076 1.04611 0.60916 1.44s
1005 0.70169 0.67076 1.04611 0.60916 1.63s
1006 0.70169 0.67076 1.04611 0.60916 1.41s
1007 0.70169 0.67076 1.04611 0.60916 1.52s
1008 0.70169 0.67076 1.04611 0.60916 1.62s
1009 0.70169 0.67076 1.04611 0.60916 1.46s
1010 0.70169 0.67076 1.04611 0.60916 1.48s
1011 0.70169 0.67076 1.04611 0.60916 1.54s
1012 0.70169 0.67076 1.04611 0.60916 1.25s
1013 0.70169 0.67076 1.04611 0.60916 1.20s
1014 0.70169 0.67076 1.04611 0.60916 1.20s
1015 0.70169 0.67076 1.04611 0.60916 1.17s
1016 0.70169 0.67076 1.04611 0.60916 1.14s
1017 0.70169 0.67076 1.04611 0.60916 1.13s
1018 0.70169 0.67076 1.04611 0.60916 1.12s
1019 0.70169 0.67076 1.04611 0.60916 1.12s
1020 0.70169 0.67076 1.04611 0.60916 1.13s
1021 0.70169 0.67076 1.04611 0.60916 1.10s
1022 0.70169 0.67076 1.04611 0.60916 1.12s
1023 0.70169 0.67076 1.04611 0.60916 1.18s
1024 0.70169 0.67076 1.04611 0.60916 1.16s
1025 0.70169 0.67076 1.04611 0.60916 1.14s
1026 0.70169 0.67076 1.04611 0.60916 1.14s
1027 0.70169 0.67076 1.04611 0.60916 1.12s
1028 0.70169 0.67076 1.04611 0.60916 1.21s
1029 0.70169 0.67076 1.04611 0.60916 1.20s
1030 0.70169 0.67076 1.04611 0.60916 1.19s
1031 0.70169 0.67076 1.04611 0.60916 1.47s
1032 0.70169 0.67076 1.04611 0.60916 1.53s
1033 0.70169 0.67076 1.04611 0.60916 1.51s
1034 0.70169 0.67076 1.04611 0.60916 1.67s
1035 0.70169 0.67076 1.04611 0.60916 1.24s
1036 0.70169 0.67076 1.04611 0.60916 1.24s
1037 0.70169 0.67076 1.04611 0.60916 1.44s
1038 0.70169 0.67076 1.04611 0.60916 1.36s
1039 0.70169 0.67076 1.04611 0.60916 1.36s
1040 0.70169 0.67076 1.04611 0.60916 1.35s
1041 0.70169 0.67076 1.04611 0.60916 1.54s
1042 0.70169 0.67076 1.04611 0.60916 1.67s
1043 0.70169 0.67076 1.04611 0.60916 1.48s
1044 0.70169 0.67076 1.04611 0.60916 1.44s
1045 0.70169 0.67076 1.04611 0.60916 1.34s
1046 0.70169 0.67076 1.04611 0.60916 1.26s
1047 0.70169 0.67076 1.04611 0.60916 1.14s
1048 0.70169 0.67076 1.04611 0.60916 1.46s
1049 0.70169 0.67076 1.04611 0.60916 1.20s
1050 0.70169 0.67076 1.04611 0.60916 1.15s
1051 0.70169 0.67076 1.04611 0.60916 1.13s
1052 0.70169 0.67076 1.04611 0.60916 1.12s
1053 0.70169 0.67076 1.04611 0.60916 1.26s
1054 0.70169 0.67076 1.04611 0.60916 1.26s
1055 0.70169 0.67076 1.04611 0.60916 1.18s
1056 0.70169 0.67076 1.04611 0.60916 1.12s
1057 0.70169 0.67076 1.04611 0.60916 1.31s
1058 0.70169 0.67076 1.04611 0.60916 1.34s
1059 0.70169 0.67076 1.04611 0.60916 1.24s
1060 0.70169 0.67076 1.04611 0.60916 1.40s
1061 0.70169 0.67076 1.04611 0.60916 1.13s
1062 0.70169 0.67076 1.04611 0.60916 1.42s
1063 0.70169 0.67076 1.04611 0.60916 1.25s
1064 0.70169 0.67076 1.04611 0.60916 1.13s
1065 0.70169 0.67076 1.04611 0.60916 1.13s
1066 0.70169 0.67076 1.04611 0.60916 1.13s
1067 0.70169 0.67076 1.04611 0.60916 1.14s
1068 0.70169 0.67076 1.04611 0.60916 1.14s
1069 0.70169 0.67076 1.04611 0.60916 1.14s
1070 0.70169 0.67076 1.04611 0.60916 1.14s
1071 0.70169 0.67076 1.04611 0.60916 1.15s
1072 0.70169 0.67076 1.04611 0.60916 1.32s
1073 0.70169 0.67076 1.04611 0.60916 1.21s
1074 0.70169 0.67076 1.04611 0.60916 1.27s
1075 0.70169 0.67076 1.04611 0.60916 1.27s
1076 0.70169 0.67076 1.04611 0.60916 1.36s
1077 0.70169 0.67076 1.04611 0.60916 1.25s
1078 0.70169 0.67076 1.04611 0.60916 1.15s
1079 0.70169 0.67076 1.04611 0.60916 1.27s
1080 0.70169 0.67076 1.04611 0.60916 1.18s
1081 0.70169 0.67076 1.04611 0.60916 1.14s
1082 0.70169 0.67076 1.04611 0.60916 1.31s
1083 0.70169 0.67076 1.04611 0.60916 1.21s
1084 0.70169 0.67076 1.04611 0.60916 1.10s
1085 0.70169 0.67076 1.04611 0.60916 1.21s
1086 0.70169 0.67076 1.04611 0.60916 1.12s
1087 0.70169 0.67076 1.04611 0.60916 1.08s
1088 0.70169 0.67076 1.04611 0.60916 1.15s
1089 0.70169 0.67076 1.04611 0.60916 1.11s
1090 0.70169 0.67076 1.04611 0.60916 1.08s
1091 0.70169 0.67076 1.04611 0.60916 1.15s
1092 0.70169 0.67076 1.04611 0.60916 1.15s
1093 0.70169 0.67076 1.04611 0.60916 1.17s
1094 0.70169 0.67076 1.04611 0.60916 1.13s
1095 0.70169 0.67076 1.04611 0.60916 1.14s
1096 0.70169 0.67076 1.04611 0.60916 1.23s
1097 0.70169 0.67076 1.04611 0.60916 1.15s
1098 0.70169 0.67076 1.04611 0.60916 1.19s
1099 0.70169 0.67076 1.04611 0.60916 1.12s
1100 0.70169 0.67076 1.04611 0.60916 1.19s
1101 0.70169 0.67076 1.04611 0.60916 1.19s
1102 0.70169 0.67076 1.04611 0.60916 1.17s
1103 0.70169 0.67076 1.04611 0.60916 1.16s
1104 0.70169 0.67076 1.04611 0.60916 1.13s
1105 0.70169 0.67076 1.04611 0.60916 1.22s
1106 0.70169 0.67076 1.04611 0.60916 1.17s
1107 0.70169 0.67076 1.04611 0.60916 1.19s
1108 0.70169 0.67076 1.04611 0.60916 1.12s
1109 0.70169 0.67076 1.04611 0.60916 1.19s
1110 0.70169 0.67076 1.04611 0.60916 1.18s
1111 0.70169 0.67076 1.04611 0.60916 1.20s
1112 0.70169 0.67076 1.04611 0.60916 1.17s
1113 0.70169 0.67076 1.04611 0.60916 1.15s
1114 0.70169 0.67076 1.04611 0.60916 1.08s
1115 0.70169 0.67076 1.04611 0.60916 1.14s
1116 0.70169 0.67076 1.04611 0.60916 1.23s
1117 0.70169 0.67076 1.04611 0.60916 1.20s
1118 0.70169 0.67076 1.04611 0.60916 1.21s
1119 0.70169 0.67076 1.04611 0.60916 1.16s
1120 0.70169 0.67076 1.04611 0.60916 1.21s
1121 0.70169 0.67076 1.04611 0.60916 1.17s
1122 0.70169 0.67076 1.04611 0.60916 1.19s
1123 0.70169 0.67076 1.04611 0.60916 1.14s
1124 0.70169 0.67076 1.04611 0.60916 1.25s
1125 0.70169 0.67076 1.04611 0.60916 1.19s
1126 0.70169 0.67076 1.04611 0.60916 1.18s
1127 0.70169 0.67076 1.04611 0.60916 1.22s
1128 0.70169 0.67076 1.04611 0.60916 1.18s
1129 0.70169 0.67076 1.04611 0.60916 1.15s
1130 0.70169 0.67076 1.04611 0.60916 1.17s
1131 0.70169 0.67076 1.04611 0.60916 1.08s
1132 0.70169 0.67076 1.04611 0.60916 1.08s
1133 0.70169 0.67076 1.04611 0.60916 1.12s
1134 0.70169 0.67076 1.04611 0.60916 1.17s
1135 0.70169 0.67076 1.04611 0.60916 1.28s
1136 0.70169 0.67076 1.04611 0.60916 1.20s
1137 0.70169 0.67076 1.04611 0.60916 1.11s
1138 0.70169 0.67076 1.04611 0.60916 1.12s
1139 0.70169 0.67076 1.04611 0.60916 1.16s
1140 0.70169 0.67076 1.04611 0.60916 1.22s
1141 0.70169 0.67076 1.04611 0.60916 1.18s
1142 0.70169 0.67076 1.04611 0.60916 1.22s
1143 0.70169 0.67076 1.04611 0.60916 1.11s
1144 0.70169 0.67076 1.04611 0.60916 1.16s
1145 0.70169 0.67076 1.04611 0.60916 1.14s
1146 0.70169 0.67076 1.04611 0.60916 1.21s
1147 0.70169 0.67076 1.04611 0.60916 1.15s
1148 0.70169 0.67076 1.04611 0.60916 1.22s
1149 0.70169 0.67076 1.04611 0.60916 1.14s
1150 0.70169 0.67076 1.04611 0.60916 1.20s
1151 0.70169 0.67076 1.04611 0.60916 1.17s
1152 0.70169 0.67076 1.04611 0.60916 1.22s
1153 0.70169 0.67076 1.04611 0.60916 1.18s
1154 0.70169 0.67076 1.04611 0.60916 1.13s
1155 0.70169 0.67076 1.04611 0.60916 1.10s
1156 0.70169 0.67076 1.04611 0.60916 1.08s
1157 0.70169 0.67076 1.04611 0.60916 1.08s
1158 0.70169 0.67076 1.04611 0.60916 1.08s
1159 0.70169 0.67076 1.04611 0.60916 1.12s
1160 0.70169 0.67076 1.04611 0.60916 1.10s
1161 0.70169 0.67076 1.04611 0.60916 1.17s
1162 0.70169 0.67076 1.04611 0.60916 1.14s
1163 0.70169 0.67076 1.04611 0.60916 1.25s
1164 0.70169 0.67076 1.04611 0.60916 1.13s
1165 0.70169 0.67076 1.04611 0.60916 1.10s
1166 0.70169 0.67076 1.04611 0.60916 1.07s
1167 0.70169 0.67076 1.04611 0.60916 1.09s
1168 0.70169 0.67076 1.04611 0.60916 1.15s
1169 0.70169 0.67076 1.04611 0.60916 1.19s
1170 0.70169 0.67076 1.04611 0.60916 1.25s
1171 0.70169 0.67076 1.04611 0.60916 1.09s
1172 0.70169 0.67076 1.04611 0.60916 1.13s
1173 0.70169 0.67076 1.04611 0.60916 1.13s
1174 0.70169 0.67076 1.04611 0.60916 1.12s
1175 0.70169 0.67076 1.04611 0.60916 1.13s
1176 0.70169 0.67076 1.04611 0.60916 1.11s
1177 0.70169 0.67076 1.04611 0.60916 1.15s
1178 0.70169 0.67076 1.04611 0.60916 1.13s
1179 0.70169 0.67076 1.04611 0.60916 1.23s
1180 0.70169 0.67076 1.04611 0.60916 1.20s
1181 0.70169 0.67076 1.04611 0.60916 1.13s
1182 0.70169 0.67076 1.04611 0.60916 1.14s
1183 0.70169 0.67076 1.04611 0.60916 1.15s
1184 0.70169 0.67076 1.04611 0.60916 1.25s
1185 0.70169 0.67076 1.04611 0.60916 1.12s
1186 0.70169 0.67076 1.04611 0.60916 1.20s
1187 0.70169 0.67076 1.04611 0.60916 1.19s
1188 0.70169 0.67076 1.04611 0.60916 1.26s
1189 0.70169 0.67076 1.04611 0.60916 1.22s
1190 0.70169 0.67076 1.04611 0.60916 1.21s
1191 0.70169 0.67076 1.04611 0.60916 1.21s
1192 0.70169 0.67076 1.04611 0.60916 1.12s
1193 0.70169 0.67076 1.04611 0.60916 1.13s
1194 0.70169 0.67076 1.04611 0.60916 1.17s
1195 0.70169 0.67076 1.04611 0.60916 1.18s
1196 0.70169 0.67076 1.04611 0.60916 1.14s
1197 0.70169 0.67076 1.04611 0.60916 1.08s
1198 0.70169 0.67076 1.04611 0.60916 1.18s
1199 0.70169 0.67076 1.04611 0.60916 1.15s
1200 0.70169 0.67076 1.04611 0.60916 1.11s
1201 0.70169 0.67076 1.04611 0.60916 1.22s
1202 0.70169 0.67076 1.04611 0.60916 1.15s
1203 0.70169 0.67076 1.04611 0.60916 1.21s
1204 0.70169 0.67076 1.04611 0.60916 1.09s
1205 0.70169 0.67076 1.04611 0.60916 1.18s
1206 0.70169 0.67076 1.04611 0.60916 1.09s
1207 0.70169 0.67076 1.04611 0.60916 1.08s
1208 0.70169 0.67076 1.04611 0.60916 1.10s
1209 0.70169 0.67076 1.04611 0.60916 1.14s
1210 0.70169 0.67076 1.04611 0.60916 1.22s
1211 0.70169 0.67076 1.04611 0.60916 1.26s
1212 0.70169 0.67076 1.04611 0.60916 1.21s
1213 0.70169 0.67076 1.04611 0.60916 1.23s
1214 0.70169 0.67076 1.04611 0.60916 1.22s
1215 0.70169 0.67076 1.04611 0.60916 1.21s
1216 0.70169 0.67076 1.04611 0.60916 1.23s
1217 0.70169 0.67076 1.04611 0.60916 1.17s
1218 0.70169 0.67076 1.04611 0.60916 1.13s
1219 0.70169 0.67076 1.04611 0.60916 1.17s
1220 0.70169 0.67076 1.04611 0.60916 1.13s
1221 0.70169 0.67076 1.04611 0.60916 1.13s
1222 0.70169 0.67076 1.04611 0.60916 1.13s
1223 0.70169 0.67076 1.04611 0.60916 1.15s
1224 0.70169 0.67076 1.04611 0.60916 1.10s
1225 0.70169 0.67076 1.04611 0.60916 1.10s
1226 0.70169 0.67076 1.04611 0.60916 1.11s
1227 0.70169 0.67076 1.04611 0.60916 1.16s
1228 0.70169 0.67076 1.04611 0.60916 1.10s
1229 0.70169 0.67076 1.04611 0.60916 1.11s
1230 0.70169 0.67076 1.04611 0.60916 1.07s
1231 0.70169 0.67076 1.04611 0.60916 1.16s
1232 0.70169 0.67076 1.04611 0.60916 1.10s
1233 0.70169 0.67076 1.04611 0.60916 1.30s
1234 0.70169 0.67076 1.04611 0.60916 1.13s
1235 0.70169 0.67076 1.04611 0.60916 1.09s
1236 0.70169 0.67076 1.04611 0.60916 1.21s
1237 0.70169 0.67076 1.04611 0.60916 1.17s
1238 0.70169 0.67076 1.04611 0.60916 1.12s
1239 0.70169 0.67076 1.04611 0.60916 1.12s
1240 0.70169 0.67076 1.04611 0.60916 1.13s
1241 0.70169 0.67076 1.04611 0.60916 1.32s
1242 0.70169 0.67076 1.04611 0.60916 1.32s
1243 0.70169 0.67076 1.04611 0.60916 1.13s
1244 0.70169 0.67076 1.04611 0.60916 1.07s
1245 0.70169 0.67076 1.04611 0.60916 1.16s
1246 0.70169 0.67076 1.04611 0.60916 1.19s
1247 0.70169 0.67076 1.04611 0.60916 1.18s
1248 0.70169 0.67076 1.04611 0.60916 1.18s
1249 0.70169 0.67076 1.04611 0.60916 1.23s
1250 0.70169 0.67076 1.04611 0.60916 1.15s
1251 0.70169 0.67076 1.04611 0.60916 1.15s
1252 0.70169 0.67076 1.04611 0.60916 1.07s
1253 0.70169 0.67076 1.04611 0.60916 1.06s
1254 0.70169 0.67076 1.04611 0.60916 1.15s
1255 0.70169 0.67076 1.04611 0.60916 1.17s
1256 0.70169 0.67076 1.04611 0.60916 1.18s
1257 0.70169 0.67076 1.04611 0.60916 1.15s
1258 0.70169 0.67076 1.04611 0.60916 1.13s
1259 0.70169 0.67076 1.04611 0.60916 1.08s
1260 0.70169 0.67076 1.04611 0.60916 1.12s
1261 0.70169 0.67076 1.04611 0.60916 1.13s
1262 0.70169 0.67076 1.04611 0.60916 1.08s
1263 0.70169 0.67076 1.04611 0.60916 1.07s
1264 0.70169 0.67076 1.04611 0.60916 1.07s
1265 0.70169 0.67076 1.04611 0.60916 1.10s
1266 0.70169 0.67076 1.04611 0.60916 1.06s
1267 0.70169 0.67076 1.04611 0.60916 1.10s
1268 0.70169 0.67076 1.04611 0.60916 1.07s
1269 0.70169 0.67076 1.04611 0.60916 1.09s
1270 0.70169 0.67076 1.04611 0.60916 1.06s
1271 0.70169 0.67076 1.04611 0.60916 1.07s
1272 0.70169 0.67076 1.04611 0.60916 1.08s
1273 0.70169 0.67076 1.04611 0.60916 1.08s
1274 0.70169 0.67076 1.04611 0.60916 1.12s
1275 0.70169 0.67076 1.04611 0.60916 1.08s
1276 0.70169 0.67076 1.04611 0.60916 1.09s
1277 0.70169 0.67076 1.04611 0.60916 1.15s
1278 0.70169 0.67076 1.04611 0.60916 1.09s
1279 0.70169 0.67076 1.04611 0.60916 1.14s
1280 0.70169 0.67076 1.04611 0.60916 1.10s
1281 0.70169 0.67076 1.04611 0.60916 1.47s
1282 0.70169 0.67076 1.04611 0.60916 1.27s
1283 0.70169 0.67076 1.04611 0.60916 1.23s
1284 0.70169 0.67076 1.04611 0.60916 1.11s
1285 0.70169 0.67076 1.04611 0.60916 1.17s
1286 0.70169 0.67076 1.04611 0.60916 1.23s
1287 0.70169 0.67076 1.04611 0.60916 1.37s
1288 0.70169 0.67076 1.04611 0.60916 1.13s
1289 0.70169 0.67076 1.04611 0.60916 1.28s
1290 0.70169 0.67076 1.04611 0.60916 1.14s
1291 0.70169 0.67076 1.04611 0.60916 1.34s
1292 0.70169 0.67076 1.04611 0.60916 1.18s
1293 0.70169 0.67076 1.04611 0.60916 1.14s
1294 0.70169 0.67076 1.04611 0.60916 1.27s
1295 0.70169 0.67076 1.04611 0.60916 1.17s
1296 0.70169 0.67076 1.04611 0.60916 1.14s
1297 0.70169 0.67076 1.04611 0.60916 1.20s
1298 0.70169 0.67076 1.04611 0.60916 1.22s
1299 0.70169 0.67076 1.04611 0.60916 1.17s
1300 0.70169 0.67076 1.04611 0.60916 1.16s
1301 0.70169 0.67076 1.04611 0.60916 1.19s
1302 0.70169 0.67076 1.04611 0.60916 1.50s
1303 0.70169 0.67076 1.04611 0.60916 1.47s
1304 0.70169 0.67076 1.04611 0.60916 1.41s
1305 0.70169 0.67076 1.04611 0.60916 1.44s
1306 0.70169 0.67076 1.04611 0.60916 1.39s
1307 0.70169 0.67076 1.04611 0.60916 1.38s
1308 0.70169 0.67076 1.04611 0.60916 1.68s
1309 0.70169 0.67076 1.04611 0.60916 1.36s
1310 0.70169 0.67076 1.04611 0.60916 1.14s
1311 0.70169 0.67076 1.04611 0.60916 1.14s
1312 0.70169 0.67076 1.04611 0.60916 1.13s
1313 0.70169 0.67076 1.04611 0.60916 1.08s
1314 0.70169 0.67076 1.04611 0.60916 1.13s
1315 0.70169 0.67076 1.04611 0.60916 1.07s
1316 0.70169 0.67076 1.04611 0.60916 1.12s
1317 0.70169 0.67076 1.04611 0.60916 1.12s
1318 0.70169 0.67076 1.04611 0.60916 1.17s
1319 0.70169 0.67076 1.04611 0.60916 1.11s
1320 0.70169 0.67076 1.04611 0.60916 1.16s
1321 0.70169 0.67076 1.04611 0.60916 1.14s
1322 0.70169 0.67076 1.04611 0.60916 1.11s
1323 0.70169 0.67076 1.04611 0.60916 1.12s
1324 0.70169 0.67076 1.04611 0.60916 1.07s
1325 0.70169 0.67076 1.04611 0.60916 1.12s
1326 0.70169 0.67076 1.04611 0.60916 1.19s
1327 0.70169 0.67076 1.04611 0.60916 1.20s
1328 0.70169 0.67076 1.04611 0.60916 1.23s
1329 0.70169 0.67076 1.04611 0.60916 1.15s
1330 0.70169 0.67076 1.04611 0.60916 1.13s
1331 0.70169 0.67076 1.04611 0.60916 1.19s
1332 0.70169 0.67076 1.04611 0.60916 1.20s
1333 0.70169 0.67076 1.04611 0.60916 1.08s
1334 0.70169 0.67076 1.04611 0.60916 1.09s
1335 0.70169 0.67076 1.04611 0.60916 1.09s
1336 0.70169 0.67076 1.04611 0.60916 1.09s
1337 0.70169 0.67076 1.04611 0.60916 1.16s
1338 0.70169 0.67076 1.04611 0.60916 1.13s
1339 0.70169 0.67076 1.04611 0.60916 1.11s
1340 0.70169 0.67076 1.04611 0.60916 1.14s
1341 0.70169 0.67076 1.04611 0.60916 1.08s
1342 0.70169 0.67076 1.04611 0.60916 1.18s
1343 0.70169 0.67076 1.04611 0.60916 1.21s
1344 0.70169 0.67076 1.04611 0.60916 1.16s
1345 0.70169 0.67076 1.04611 0.60916 1.29s
1346 0.70169 0.67076 1.04611 0.60916 1.14s
1347 0.70169 0.67076 1.04611 0.60916 1.13s
1348 0.70169 0.67076 1.04611 0.60916 1.13s
1349 0.70169 0.67076 1.04611 0.60916 1.12s
1350 0.70169 0.67076 1.04611 0.60916 1.19s
1351 0.70169 0.67076 1.04611 0.60916 1.18s
1352 0.70169 0.67076 1.04611 0.60916 1.09s
1353 0.70169 0.67076 1.04611 0.60916 1.11s
1354 0.70169 0.67076 1.04611 0.60916 1.19s
1355 0.70169 0.67076 1.04611 0.60916 1.25s
1356 0.70169 0.67076 1.04611 0.60916 1.11s
1357 0.70169 0.67076 1.04611 0.60916 1.21s
1358 0.70169 0.67076 1.04611 0.60916 1.37s
1359 0.70169 0.67076 1.04611 0.60916 1.19s
1360 0.70169 0.67076 1.04611 0.60916 1.20s
1361 0.70169 0.67076 1.04611 0.60916 1.17s
1362 0.70169 0.67076 1.04611 0.60916 1.22s
1363 0.70169 0.67076 1.04611 0.60916 1.18s
1364 0.70169 0.67076 1.04611 0.60916 1.16s
1365 0.70169 0.67076 1.04611 0.60916 1.13s
1366 0.70169 0.67076 1.04611 0.60916 1.15s
1367 0.70169 0.67076 1.04611 0.60916 1.10s
1368 0.70169 0.67076 1.04611 0.60916 1.14s
1369 0.70169 0.67076 1.04611 0.60916 1.09s
1370 0.70169 0.67076 1.04611 0.60916 1.11s
1371 0.70169 0.67076 1.04611 0.60916 1.11s
1372 0.70169 0.67076 1.04611 0.60916 1.08s
1373 0.70169 0.67076 1.04611 0.60916 1.11s
1374 0.70169 0.67076 1.04611 0.60916 1.10s
1375 0.70169 0.67076 1.04611 0.60916 1.08s
1376 0.70169 0.67076 1.04611 0.60916 1.11s
1377 0.70169 0.67076 1.04611 0.60916 1.16s
1378 0.70169 0.67076 1.04611 0.60916 1.09s
1379 0.70169 0.67076 1.04611 0.60916 1.11s
1380 0.70169 0.67076 1.04611 0.60916 1.10s
1381 0.70169 0.67076 1.04611 0.60916 1.15s
1382 0.70169 0.67076 1.04611 0.60916 1.26s
1383 0.70169 0.67076 1.04611 0.60916 1.34s
1384 0.70169 0.67076 1.04611 0.60916 1.69s
1385 0.70169 0.67076 1.04611 0.60916 1.36s
1386 0.70169 0.67076 1.04611 0.60916 1.28s
1387 0.70169 0.67076 1.04611 0.60916 1.24s
1388 0.70169 0.67076 1.04611 0.60916 1.23s
1389 0.70169 0.67076 1.04611 0.60916 1.24s
1390 0.70169 0.67076 1.04611 0.60916 1.23s
1391 0.70169 0.67076 1.04611 0.60916 1.23s
1392 0.70169 0.67076 1.04611 0.60916 1.25s
1393 0.70169 0.67076 1.04611 0.60916 1.29s
1394 0.70169 0.67076 1.04611 0.60916 1.25s
1395 0.70169 0.67076 1.04611 0.60916 1.27s
1396 0.70169 0.67076 1.04611 0.60916 1.40s
1397 0.70169 0.67076 1.04611 0.60916 1.13s
1398 0.70169 0.67076 1.04611 0.60916 1.07s
1399 0.70169 0.67076 1.04611 0.60916 1.09s
1400 0.70169 0.67076 1.04611 0.60916 1.10s
1401 0.70169 0.67076 1.04611 0.60916 1.10s
1402 0.70169 0.67076 1.04611 0.60916 1.09s
1403 0.70169 0.67076 1.04611 0.60916 1.11s
1404 0.70169 0.67076 1.04611 0.60916 1.08s
1405 0.70169 0.67076 1.04611 0.60916 1.10s
1406 0.70169 0.67076 1.04611 0.60916 1.08s
1407 0.70169 0.67076 1.04611 0.60916 1.14s
1408 0.70169 0.67076 1.04611 0.60916 1.13s
1409 0.70169 0.67076 1.04611 0.60916 1.11s
1410 0.70169 0.67076 1.04611 0.60916 1.10s
1411 0.70169 0.67076 1.04611 0.60916 1.10s
1412 0.70169 0.67076 1.04611 0.60916 1.15s
1413 0.70169 0.67076 1.04611 0.60916 1.09s
1414 0.70169 0.67076 1.04611 0.60916 1.11s
1415 0.70169 0.67076 1.04611 0.60916 1.12s
1416 0.70169 0.67076 1.04611 0.60916 1.17s
1417 0.70169 0.67076 1.04611 0.60916 1.11s
1418 0.70169 0.67076 1.04611 0.60916 1.09s
1419 0.70169 0.67076 1.04611 0.60916 1.09s
1420 0.70169 0.67076 1.04611 0.60916 1.11s
1421 0.70169 0.67076 1.04611 0.60916 1.17s
1422 0.70169 0.67076 1.04611 0.60916 1.10s
1423 0.70169 0.67076 1.04611 0.60916 1.09s
1424 0.70169 0.67076 1.04611 0.60916 1.10s
1425 0.70169 0.67076 1.04611 0.60916 1.11s
1426 0.70169 0.67076 1.04611 0.60916 1.08s
1427 0.70169 0.67076 1.04611 0.60916 1.11s
1428 0.70169 0.67076 1.04611 0.60916 1.09s
1429 0.70169 0.67076 1.04611 0.60916 1.11s
1430 0.70169 0.67076 1.04611 0.60916 1.09s
1431 0.70169 0.67076 1.04611 0.60916 1.08s
1432 0.70169 0.67076 1.04611 0.60916 1.10s
1433 0.70169 0.67076 1.04611 0.60916 1.11s
1434 0.70169 0.67076 1.04611 0.60916 1.09s
1435 0.70169 0.67076 1.04611 0.60916 1.10s
1436 0.70169 0.67076 1.04611 0.60916 1.07s
1437 0.70169 0.67076 1.04611 0.60916 1.09s
1438 0.70169 0.67076 1.04611 0.60916 1.09s
1439 0.70169 0.67076 1.04611 0.60916 1.09s
1440 0.70169 0.67076 1.04611 0.60916 1.07s
1441 0.70169 0.67076 1.04611 0.60916 1.07s
1442 0.70169 0.67076 1.04611 0.60916 1.08s
1443 0.70169 0.67076 1.04611 0.60916 1.10s
1444 0.70169 0.67076 1.04611 0.60916 1.08s
1445 0.70169 0.67076 1.04611 0.60916 1.13s
1446 0.70169 0.67076 1.04611 0.60916 1.14s
1447 0.70169 0.67076 1.04611 0.60916 1.11s
1448 0.70169 0.67076 1.04611 0.60916 1.11s
1449 0.70169 0.67076 1.04611 0.60916 1.10s
1450 0.70169 0.67076 1.04611 0.60916 1.08s
1451 0.70169 0.67076 1.04611 0.60916 1.15s
1452 0.70169 0.67076 1.04611 0.60916 1.13s
1453 0.70169 0.67076 1.04611 0.60916 1.08s
1454 0.70169 0.67076 1.04611 0.60916 1.19s
1455 0.70169 0.67076 1.04611 0.60916 1.10s
1456 0.70169 0.67076 1.04611 0.60916 1.09s
1457 0.70169 0.67076 1.04611 0.60916 1.28s
1458 0.70169 0.67076 1.04611 0.60916 1.27s
1459 0.70169 0.67076 1.04611 0.60916 1.13s
1460 0.70169 0.67076 1.04611 0.60916 1.19s
1461 0.70169 0.67076 1.04611 0.60916 1.22s
1462 0.70169 0.67076 1.04611 0.60916 1.09s
1463 0.70169 0.67076 1.04611 0.60916 1.08s
1464 0.70169 0.67076 1.04611 0.60916 1.09s
1465 0.70169 0.67076 1.04611 0.60916 1.06s
1466 0.70169 0.67076 1.04611 0.60916 1.24s
1467 0.70169 0.67076 1.04611 0.60916 1.10s
1468 0.70169 0.67076 1.04611 0.60916 1.16s
1469 0.70169 0.67076 1.04611 0.60916 1.11s
1470 0.70169 0.67076 1.04611 0.60916 1.09s
1471 0.70169 0.67076 1.04611 0.60916 1.10s
1472 0.70169 0.67076 1.04611 0.60916 1.11s
1473 0.70169 0.67076 1.04611 0.60916 1.11s
1474 0.70169 0.67076 1.04611 0.60916 1.08s
1475 0.70169 0.67076 1.04611 0.60916 1.08s
1476 0.70169 0.67076 1.04611 0.60916 1.11s
1477 0.70169 0.67076 1.04611 0.60916 1.11s
1478 0.70169 0.67076 1.04611 0.60916 1.11s
1479 0.70169 0.67076 1.04611 0.60916 1.09s
1480 0.70169 0.67076 1.04611 0.60916 1.09s
1481 0.70169 0.67076 1.04611 0.60916 1.08s
1482 0.70169 0.67076 1.04611 0.60916 1.10s
1483 0.70169 0.67076 1.04611 0.60916 1.08s
1484 0.70169 0.67076 1.04611 0.60916 1.08s
1485 0.70169 0.67076 1.04611 0.60916 1.08s
1486 0.70169 0.67076 1.04611 0.60916 1.07s
1487 0.70169 0.67076 1.04611 0.60916 1.08s
1488 0.70169 0.67076 1.04611 0.60916 1.11s
1489 0.70169 0.67076 1.04611 0.60916 1.10s
1490 0.70169 0.67076 1.04611 0.60916 1.09s
1491 0.70169 0.67076 1.04611 0.60916 1.08s
1492 0.70169 0.67076 1.04611 0.60916 1.08s
1493 0.70169 0.67076 1.04611 0.60916 1.09s
1494 0.70169 0.67076 1.04611 0.60916 1.09s
1495 0.70169 0.67076 1.04611 0.60916 1.08s
1496 0.70169 0.67076 1.04611 0.60916 1.09s
1497 0.70169 0.67076 1.04611 0.60916 1.08s
1498 0.70169 0.67076 1.04611 0.60916 1.10s
1499 0.70169 0.67076 1.04611 0.60916 1.14s
1500 0.70169 0.67076 1.04611 0.60916 1.10s
Out[50]:
NeuralNet(X_tensor_type=None,
batch_iterator_test=<nolearn.lasagne.base.BatchIterator object at 0x000000001AE3EBA8>,
batch_iterator_train=<nolearn.lasagne.base.BatchIterator object at 0x000000001AE3EA90>,
check_input=True, custom_scores=None,
hidden_nonlinearity=<function rectify at 0x000000001A6EDB38>,
hidden_num_units=200, input_shape=(None, 1, 96, 96),
layers=[('input', <class 'lasagne.layers.input.InputLayer'>), ('hidden', <class 'lasagne.layers.dense.DenseLayer'>), ('output', <class 'lasagne.layers.dense.DenseLayer'>)],
loss=None, max_epochs=1500, more_params={},
objective=<function objective at 0x000000001AE3A828>,
objective_loss_function=<function categorical_crossentropy at 0x000000001AD6DAC8>,
on_batch_finished=[],
on_epoch_finished=[<nolearn.lasagne.handlers.PrintLog instance at 0x000000007F733488>],
on_training_finished=[],
on_training_started=[<nolearn.lasagne.handlers.PrintLayerInfo instance at 0x00000000838D2C08>],
output_nonlinearity=<function softmax at 0x000000001A6EDA58>,
output_num_units=15, regression=False, scores_train=[],
scores_valid=[],
train_split=<nolearn.lasagne.base.TrainSplit object at 0x000000001AE3EBE0>,
update=<function adam at 0x000000001AD78908>,
update_learning_rate=0.1, use_label_encoder=False, verbose=1,
y_tensor_type=TensorType(int32, vector))
In [22]:
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras import backend as K
from keras.optimizers import Adam
from keras.utils import np_utils
Y_Keras = np_utils.to_categorical(Y, 15)
# Create first network with Keras
from keras.models import Sequential
from keras.layers import Dense, Activation,Dropout
model = Sequential()
model.add(Dense(512, input_dim=9216,activation='relu'))
model.add(Dense(15,activation='softmax'))
# Compile model
model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])
import time
model.fit((X.reshape(-1,9216).astype(np.uint8)), Y_Keras, nb_epoch=10, batch_size=50,verbose=1,
validation_data=(X_test.reshape(-1,9216).astype(np.uint8), np_utils.to_categorical(Y_test, 15)))
Train on 2062 samples, validate on 15 samples
Epoch 1/10
2062/2062 [==============================] - 6s - loss: 1.0105 - acc: 0.6358 - val_loss: 0.7210 - val_acc: 0.8000
Epoch 2/10
2062/2062 [==============================] - 3s - loss: 0.6854 - acc: 0.7396 - val_loss: 0.6561 - val_acc: 0.7333
Epoch 3/10
2062/2062 [==============================] - 3s - loss: 0.5600 - acc: 0.8050 - val_loss: 0.5918 - val_acc: 0.7333
Epoch 4/10
2062/2062 [==============================] - 4s - loss: 0.4899 - acc: 0.8274 - val_loss: 0.7485 - val_acc: 0.6667
Epoch 5/10
2062/2062 [==============================] - 4s - loss: 0.4483 - acc: 0.8448 - val_loss: 0.4979 - val_acc: 0.7333
Epoch 6/10
2062/2062 [==============================] - 3s - loss: 0.3565 - acc: 0.8851 - val_loss: 0.4109 - val_acc: 0.8000
Epoch 7/10
2062/2062 [==============================] - 3s - loss: 0.3055 - acc: 0.8957 - val_loss: 0.4790 - val_acc: 0.8000
Epoch 8/10
2062/2062 [==============================] - 5s - loss: 0.2581 - acc: 0.9243 - val_loss: 0.4445 - val_acc: 0.7333
Epoch 9/10
2062/2062 [==============================] - 4s - loss: 0.2377 - acc: 0.9239 - val_loss: 0.4291 - val_acc: 0.8000
Epoch 10/10
2062/2062 [==============================] - 5s - loss: 0.1904 - acc: 0.9471 - val_loss: 0.4283 - val_acc: 0.7333
Out[22]:
<keras.callbacks.History at 0x166e7128>
In [23]:
model = Sequential()
model.add(Dense(512, input_dim=9216))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(15))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
model.fit((X.reshape(-1,9216).astype(np.uint8)), Y_Keras, nb_epoch=10, batch_size=10,verbose=1,
validation_data=(X_test.reshape(-1,9216).astype(np.uint8), np_utils.to_categorical(Y_test, 15)))
____________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
====================================================================================================
dense_8 (Dense) (None, 512) 4719104 dense_input_4[0][0]
____________________________________________________________________________________________________
activation_4 (Activation) (None, 512) 0 dense_8[0][0]
____________________________________________________________________________________________________
dropout_3 (Dropout) (None, 512) 0 activation_4[0][0]
____________________________________________________________________________________________________
dense_9 (Dense) (None, 512) 262656 dropout_3[0][0]
____________________________________________________________________________________________________
activation_5 (Activation) (None, 512) 0 dense_9[0][0]
____________________________________________________________________________________________________
dropout_4 (Dropout) (None, 512) 0 activation_5[0][0]
____________________________________________________________________________________________________
dense_10 (Dense) (None, 15) 7695 dropout_4[0][0]
____________________________________________________________________________________________________
activation_6 (Activation) (None, 15) 0 dense_10[0][0]
====================================================================================================
Total params: 4989455
____________________________________________________________________________________________________
Train on 2062 samples, validate on 15 samples
Epoch 1/10
2062/2062 [==============================] - 23s - loss: 0.8280 - acc: 0.6290 - val_loss: 0.6365 - val_acc: 0.6000
Epoch 2/10
2062/2062 [==============================] - 19s - loss: 0.5170 - acc: 0.7459 - val_loss: 0.6507 - val_acc: 0.8000
Epoch 3/10
2062/2062 [==============================] - 26s - loss: 0.4181 - acc: 0.8012 - val_loss: 0.4983 - val_acc: 0.8000
Epoch 4/10
2062/2062 [==============================] - 19s - loss: 0.3299 - acc: 0.8540 - val_loss: 0.5947 - val_acc: 0.8000
Epoch 5/10
2062/2062 [==============================] - 24s - loss: 0.2729 - acc: 0.8846 - val_loss: 0.5745 - val_acc: 0.7333
Epoch 6/10
2062/2062 [==============================] - 23s - loss: 0.2245 - acc: 0.9093 - val_loss: 0.5834 - val_acc: 0.7333
Epoch 7/10
2062/2062 [==============================] - 19s - loss: 0.2019 - acc: 0.9171 - val_loss: 0.6370 - val_acc: 0.7333
Epoch 8/10
2062/2062 [==============================] - 28s - loss: 0.1646 - acc: 0.9340 - val_loss: 0.9770 - val_acc: 0.7333
Epoch 9/10
2062/2062 [==============================] - 34s - loss: 0.1387 - acc: 0.9413 - val_loss: 0.7005 - val_acc: 0.8667
Epoch 10/10
2062/2062 [==============================] - 36s - loss: 0.1124 - acc: 0.9559 - val_loss: 1.0708 - val_acc: 0.6667
Out[23]:
<keras.callbacks.History at 0x1bdcfa20>
In [ ]:
def CNN(n_epochs):
net1 = NeuralNet(
layers=[
('input', layers.InputLayer),
('conv1', layers.Conv2DLayer), #Convolutional layer. Params defined below
('pool1', layers.MaxPool2DLayer), # Like downsampling, for execution speed
('conv2', layers.Conv2DLayer),
('hidden3', layers.DenseLayer),
('output', layers.DenseLayer),
],
input_shape=(None, 1, 96, 96),
conv1_num_filters=7,
conv1_filter_size=(3, 3),
conv1_nonlinearity=lasagne.nonlinearities.rectify,
pool1_pool_size=(2, 2),
conv2_num_filters=12,
conv2_filter_size=(2, 2),
conv2_nonlinearity=lasagne.nonlinearities.rectify,
hidden3_num_units=1000,
output_num_units=15,
output_nonlinearity=lasagne.nonlinearities.softmax,
update_learning_rate=0.0001,
update_momentum=0.9,
max_epochs=n_epochs,
verbose=1,
)
return net1
prediction = lasagne.layers.get_output('conv1')
cnn = CNN(1000).fit((X.reshape(-1,1,96,96).astype(np.uint8)), Y.astype(np.uint8)) # train the CNN model for 15 epochs
# Neural Network with 25408433 learnable parameters
## Layer information
# name size
--- ------- --------
0 input 1x96x96
1 conv1 7x94x94
2 pool1 7x47x47
3 conv2 12x46x46
4 hidden3 1000
5 output 15
epoch trn loss val loss trn/val valid acc dur
------- ---------- ---------- --------- ----------- ------
1 2.70684 2.70195 1.00181 0.60916 44.63s
2 2.69764 2.68985 1.00289 0.60916 50.07s
3 2.68376 2.67329 1.00392 0.60916 47.69s
4 2.66456 2.64920 1.00580 0.60916 42.25s
5 2.63497 2.60806 1.01032 0.60916 44.36s
6 2.58138 2.52950 1.02051 0.60916 42.83s
7 2.47649 2.37018 1.04485 0.60916 49.47s
8 2.26611 2.05720 1.10155 0.60916 47.87s
9 1.88085 1.54077 1.22072 0.60916 36.62s
10 1.36008 1.03546 1.31350 0.60916 51.18s
11 0.96240 0.80594 1.19414 0.60916 48.94s
12 0.80102 0.74827 1.07051 0.60916 45.71s
13 0.80975 0.72965 1.10978 0.60916 30.30s
14 0.81664 0.71370 1.14424 0.60916 52.95s
15 0.80060 0.70973 1.12802 0.60916 50.57s
16 0.79849 0.70806 1.12772 0.60916 52.69s
17 0.79897 0.70586 1.13191 0.60916 52.29s
18 0.79751 0.70418 1.13253 0.60916 45.29s
19 0.79564 0.70311 1.13161 0.60916 43.87s
20 0.79456 0.70212 1.13165 0.60916 47.46s
21 0.79339 0.70107 1.13168 0.60916 50.97s
22 0.79139 0.70042 1.12987 0.60916 31.31s
23 0.79031 0.69971 1.12948 0.60916 47.05s
24 0.78905 0.69894 1.12893 0.60916 51.05s
25 0.78722 0.69844 1.12711 0.60916 52.62s
26 0.78612 0.69789 1.12642 0.60916 49.26s
27 0.78485 0.69737 1.12544 0.60916 42.16s
28 0.78356 0.69690 1.12435 0.60916 51.01s
29 0.78234 0.69646 1.12331 0.60916 45.06s
30 0.78113 0.69604 1.12226 0.60916 51.61s
31 0.77995 0.69564 1.12121 0.60916 51.82s
32 0.77879 0.69525 1.12016 0.60916 45.23s
33 0.77766 0.69489 1.11911 0.60916 47.70s
34 0.77655 0.69454 1.11808 0.60916 45.74s
35 0.77547 0.69421 1.11706 0.60916 41.93s
36 0.77449 0.69375 1.11639 0.60916 48.83s
37 0.77289 0.69353 1.11444 0.60916 51.44s
38 0.77207 0.69322 1.11374 0.60916 52.53s
39 0.77120 0.69275 1.11325 0.60916 43.98s
40 0.76943 0.69259 1.11095 0.60916 44.79s
41 0.76876 0.69232 1.11042 0.60916 46.67s
42 0.76790 0.69203 1.10963 0.60916 57.47s
43 0.76698 0.69179 1.10870 0.60916 50.36s
44 0.76614 0.69155 1.10786 0.60916 49.07s
45 0.76533 0.69131 1.10707 0.60916 51.05s
46 0.76452 0.69108 1.10626 0.60916 46.71s
47 0.76373 0.69086 1.10547 0.60916 47.08s
48 0.76300 0.69054 1.10493 0.60916 43.78s
49 0.76179 0.69040 1.10341 0.60916 40.55s
50 0.76119 0.69019 1.10287 0.60916 55.09s
51 0.76050 0.68998 1.10220 0.60916 53.85s
52 0.75958 0.68974 1.10125 0.60916 49.31s
53 0.75883 0.68953 1.10050 0.60916 47.78s
54 0.75809 0.68936 1.09970 0.60916 54.37s
55 0.75746 0.68919 1.09906 0.60916 51.55s
56 0.75684 0.68901 1.09845 0.60916 48.29s
57 0.75621 0.68884 1.09781 0.60916 46.42s
58 0.75561 0.68865 1.09723 0.60916 43.82s
59 0.75491 0.68851 1.09645 0.60916 40.06s
60 0.75436 0.68835 1.09589 0.60916 42.15s
61 0.75379 0.68819 1.09532 0.60916 42.58s
62 0.75322 0.68804 1.09473 0.60916 42.78s
63 0.75266 0.68789 1.09415 0.60916 41.23s
64 0.75212 0.68775 1.09359 0.60916 41.95s
65 0.75159 0.68761 1.09304 0.60916 40.63s
66 0.75106 0.68747 1.09250 0.60916 40.82s
67 0.75055 0.68734 1.09196 0.60916 41.09s
68 0.75004 0.68721 1.09143 0.60916 41.82s
69 0.74954 0.68708 1.09091 0.60916 40.61s
70 0.74905 0.68695 1.09040 0.60916 41.99s
71 0.74857 0.68683 1.08990 0.60916 45.03s
72 0.74819 0.68652 1.08983 0.60916 40.95s
73 0.74688 0.68644 1.08806 0.60916 42.92s
74 0.74638 0.68631 1.08752 0.60916 41.20s
75 0.74584 0.68621 1.08689 0.60916 46.42s
76 0.74546 0.68609 1.08654 0.60916 41.34s
77 0.74509 0.68588 1.08632 0.60916 41.10s
78 0.74427 0.68582 1.08523 0.60916 50.11s
79 0.74397 0.68571 1.08495 0.60916 44.05s
80 0.74361 0.68560 1.08462 0.60916 48.30s
81 0.74321 0.68549 1.08420 0.60916 49.25s
82 0.74282 0.68539 1.08379 0.60916 48.47s
83 0.74245 0.68529 1.08341 0.60916 57.00s
84 0.74208 0.68519 1.08303 0.60916 52.19s
85 0.74172 0.68510 1.08264 0.60916 42.11s
86 0.74145 0.68487 1.08261 0.60916 52.60s
87 0.74042 0.68485 1.08114 0.60916 49.43s
88 0.74022 0.68477 1.08098 0.60916 42.00s
89 0.73985 0.68465 1.08062 0.60916 44.17s
90 0.73947 0.68454 1.08024 0.60916 47.00s
91 0.73910 0.68445 1.07983 0.60916 42.80s
92 0.73875 0.68438 1.07946 0.60916 45.59s
93 0.73845 0.68429 1.07915 0.60916 47.38s
94 0.73814 0.68421 1.07883 0.60916 42.74s
95 0.73783 0.68413 1.07850 0.60916 48.27s
96 0.73752 0.68405 1.07817 0.60916 47.12s
97 0.73722 0.68397 1.07785 0.60916 43.54s
98 0.73692 0.68389 1.07754 0.60916 49.13s
99 0.73661 0.68382 1.07721 0.60916 47.21s
100 0.73632 0.68374 1.07690 0.60916 43.87s
101 0.73603 0.68367 1.07659 0.60916 46.42s
102 0.73575 0.68360 1.07629 0.60916 47.80s
103 0.73546 0.68353 1.07598 0.60916 49.01s
104 0.73518 0.68346 1.07568 0.60916 44.60s
105 0.73491 0.68339 1.07539 0.60916 46.23s
106 0.73463 0.68332 1.07509 0.60916 44.07s
107 0.73436 0.68325 1.07480 0.60916 50.27s
108 0.73410 0.68319 1.07452 0.60916 40.69s
109 0.73383 0.68312 1.07423 0.60916 48.91s
110 0.73330 0.68303 1.07360 0.60916 46.04s
111 0.73303 0.68293 1.07336 0.60916 47.71s
112 0.73269 0.68287 1.07297 0.60916 49.15s
113 0.73244 0.68281 1.07268 0.60916 48.53s
114 0.73221 0.68275 1.07244 0.60916 40.67s
115 0.73198 0.68269 1.07220 0.60916 40.92s
116 0.73174 0.68263 1.07194 0.60916 42.71s
117 0.73150 0.68257 1.07169 0.60916 42.35s
118 0.73127 0.68251 1.07144 0.60916 44.32s
119 0.73104 0.68246 1.07119 0.60916 45.24s
120 0.73081 0.68240 1.07094 0.60916 42.30s
121 0.73059 0.68234 1.07070 0.60916 45.47s
122 0.73036 0.68229 1.07046 0.60916 44.74s
123 0.73014 0.68224 1.07022 0.60916 47.43s
124 0.72992 0.68218 1.06998 0.60916 45.02s
125 0.72970 0.68213 1.06974 0.60916 50.11s
126 0.72949 0.68208 1.06951 0.60916 42.22s
127 0.72928 0.68203 1.06928 0.60916 47.86s
128 0.72907 0.68198 1.06905 0.60916 46.09s
129 0.72886 0.68193 1.06882 0.60916 10861.08s
130 0.72865 0.68188 1.06860 0.60916 37935.18s
In [27]:
def CNN(n_epochs):
net1 = NeuralNet(
layers=[
('input', layers.InputLayer),
('conv1', layers.Conv2DLayer), #Convolutional layer. Params defined below
('pool1', layers.MaxPool2DLayer), # Like downsampling, for execution speed
('conv2', layers.Conv2DLayer),
('hidden3', layers.DenseLayer),
('output', layers.DenseLayer),
],
input_shape=(None, 1, 96, 96),
conv1_num_filters=7,
conv1_filter_size=(3, 3),
conv1_nonlinearity=lasagne.nonlinearities.rectify,
pool1_pool_size=(2, 2),
conv2_num_filters=12,
conv2_filter_size=(2, 2),
conv2_nonlinearity=lasagne.nonlinearities.rectify,
hidden3_num_units=1000,
output_num_units=15,
output_nonlinearity=lasagne.nonlinearities.softmax,
update_learning_rate=0.0001,
update_momentum=0.9,
max_epochs=n_epochs,
verbose=1,
)
return net1
cnn = CNN(500).fit((X.reshape(-1,1,96,96).astype(np.uint8)), Y.astype(np.uint8)) # train the CNN model for 15 epochs
# Neural Network with 25408433 learnable parameters
## Layer information
# name size
--- ------- --------
0 input 1x96x96
1 conv1 7x94x94
2 pool1 7x47x47
3 conv2 12x46x46
4 hidden3 1000
5 output 15
epoch trn loss val loss trn/val valid acc dur
------- ---------- ---------- --------- ----------- ------
1 2.71286 2.68199 1.01151 0.09201 38.10s
2 2.67701 2.62813 1.01860 0.34867 34.35s
3 2.62043 2.54286 1.03051 0.56174 36.38s
4 2.51866 2.37385 1.06100 0.58596 35.82s
5 2.29321 1.98945 1.15269 0.60775 36.45s
6 1.80290 1.29479 1.39243 0.60775 37.28s
7 1.17065 0.84128 1.39150 0.60775 34.07s
8 0.80062 0.74194 1.07909 0.58354 23.43s
9 0.82500 0.72519 1.13763 0.52300 32.00s
10 0.83458 0.70614 1.18189 0.59564 27.89s
11 0.80339 0.70625 1.13754 0.58838 29.57s
12 0.80219 0.70463 1.13845 0.57869 31.71s
13 0.79730 0.70245 1.13502 0.56901 36.65s
14 0.78917 0.70178 1.12452 0.56416 29.39s
15 0.78424 0.70122 1.11840 0.56174 36.53s
16 0.77947 0.70078 1.11229 0.56659 37.43s
17 0.77545 0.70004 1.10772 0.56901 36.36s
18 0.77156 0.69957 1.10291 0.56659 34.80s
19 0.76859 0.69914 1.09934 0.56901 34.23s
20 0.76517 0.69882 1.09495 0.56659 33.63s
21 0.76198 0.69854 1.09082 0.56659 37.43s
22 0.75929 0.69824 1.08744 0.56174 37.43s
23 0.75663 0.69807 1.08389 0.55690 34.89s
24 0.75448 0.69780 1.08122 0.55690 38.37s
25 0.75195 0.69761 1.07789 0.55932 37.56s
26 0.74952 0.69753 1.07454 0.55932 38.80s
27 0.74691 0.69759 1.07071 0.55690 33.33s
28 0.74430 0.69753 1.06705 0.54722 36.02s
29 0.74213 0.69746 1.06405 0.54722 34.41s
30 0.73970 0.69754 1.06045 0.54237 37.37s
31 0.73773 0.69758 1.05756 0.54237 40.80s
32 0.73589 0.69756 1.05494 0.54479 36.64s
33 0.73396 0.69750 1.05228 0.54237 35.71s
34 0.73213 0.69746 1.04970 0.53753 37.15s
35 0.73040 0.69737 1.04736 0.53995 32.97s
In [ ]:
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
model = Sequential()
model.add(Convolution2D(7, 10, 10,
border_mode='valid',
input_shape=(1, 96, 96)))
model.add(Activation('relu'))
model.add(Convolution2D(7, 10, 10))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(15))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
model.fit((X.reshape(-1,1,96,96).astype(np.uint8)), Y_Keras, batch_size=20, nb_epoch=30,
verbose=1, validation_data=(X_test.reshape(-1,1,96,96).astype(np.uint8), np_utils.to_categorical(Y_test, 15)))
#score = model.evaluate(X_test, Y_test, verbose=1)
print('Test score:', score[0])
print('Test accuracy:', score[1])
Train on 2062 samples, validate on 15 samples
Epoch 1/30
240/2062 [==>...........................] - ETA: 390s - loss: 1.6402 - acc: 0.5500
In [9]:
print ('Attempt to identify the faces using the eigen vectors from the image')
from scipy.spatial.distance import pdist, squareform
from scipy.cluster.hierarchy import linkage, dendrogram
all_eigens = np.zeros((1,96))
for i in range(len(faces)):
cov_mat = np.cov(faces[i,:,:])
eigen_vals,eigen_vecs = np.linalg.eig(cov_mat)
all_eigens = np.vstack((all_eigens,eigen_vals.reshape(1,96)))
all_eigens = all_eigens[1:,:]
data_dist = pdist(all_eigens[:30,:]) # computing the distance
data_link = linkage(data_dist) # computing the linkage
dendrogram(data_link,labels=all_eigens[:30,:].dtype.names)
plt.xlabel('Samples')
plt.ylabel('Distance')
plt.suptitle('Samples clustering', fontweight='bold', fontsize=15);
Attempt to identify the faces using the eigen vectors from the image
In [10]:
print ('Attempt to identify the faces using the PCA from the image')
from sklearn.decomposition import PCA
pca = PCA(n_components=95)
pca_values = np.zeros((1,95))
for i in range(len(faces)):
pca.fit(faces[i,:,:])
pca_values = np.vstack((pca_values,pca.explained_variance_ratio_.reshape(1,95)))
pca_values = pca_values[1:,:]*100
data_dist = pdist(pca_values[:30,:]) # computing the distance
data_link = linkage(data_dist) # computing the linkage
dendrogram(data_link,labels=pca_values[:30,:].dtype.names)
plt.xlabel('Samples')
plt.ylabel('Distance')
plt.suptitle('Samples clustering', fontweight='bold', fontsize=15);
Attempt to identify the faces using the PCA from the image
In [11]:
print ('Attempt to identify the faces using the SVD from the image')
from sklearn.decomposition import TruncatedSVD
svd = TruncatedSVD(n_components=95, random_state=42)
svd_values = np.zeros((1,95))
for i in range(len(faces)):
svd.fit(faces[i,:,:])
svd_values = np.vstack((svd_values,svd.explained_variance_ratio_.reshape(1,95)))
svd_values = svd_values[1:,:]
data_dist = pdist(svd_values[:30,:]) # computing the distance
data_link = linkage(data_dist) # computing the linkage
dendrogram(data_link,labels=svd_values[:30,:].dtype.names)
plt.xlabel('Samples')
plt.ylabel('Distance')
plt.suptitle('Samples clustering', fontweight='bold', fontsize=15);
Attempt to identify the faces using the SVD from the image
In [21]:
i = 1
n_faces = 10
print ('There are multiple faces for index %d and with length %d ' % (i , n_faces))
There are multiple faces for index 1 and with length 10
In [ ]:
Content source: mdomarsaleem/Facial_Plan
Similar notebooks: