In [1]:
import numpy as np
import nolearn
import sklearn.linear_model as lm
import scipy.stats as sps
import math
import pandas as pd
from decimal import Decimal
from lasagne import layers, nonlinearities
from lasagne.updates import nesterov_momentum
from lasagne import layers
from nolearn.lasagne import NeuralNet
from sklearn.ensemble import RandomForestRegressor, AdaBoostRegressor, GradientBoostingRegressor, ExtraTreesRegressor, BaggingRegressor
from sklearn.cross_validation import train_test_split
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.svm import SVR
from sklearn.externals import joblib
%matplotlib inline
%load_ext autoreload
%autoreload 2
Using gpu device 0: Quadro 2000
In [2]:
import custom_funcs as cf
cf.init_seaborn('white', 'poster')
from isoelectric_point import isoelectric_points
from molecular_weight import molecular_weights
In [3]:
# Read in the protease inhibitor data
data, drug_cols, feat_cols = cf.read_data('hiv-protease-data.csv', n_data_cols=8)
print(len(data))
# Read in the consensus data
consensus_map = cf.read_consensus('hiv-protease-consensus.fasta')
# Clean the data
data = cf.clean_data(data, feat_cols, consensus_map)
# Identify feature columns
data = cf.drop_ambiguous_sequences(data, feat_cols)
data.dropna(inplace=True, subset=feat_cols)
data
1808
Out[3]:
FPV
ATV
IDV
LPV
NFV
SQV
TPV
DRV
P1
P2
...
P90
P91
P92
P93
P94
P95
P96
P97
P98
P99
SeqID
4432
1.5
NaN
1.0
NaN
2.2
1.1
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
4664
3.1
NaN
8.7
NaN
32.0
16.9
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
5221
NaN
NaN
0.8
0.8
1.2
0.7
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
5279
8.3
79.0
16.0
12.0
600.0
1000.0
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
5444
2.7
21.0
24.0
6.1
42.0
132.0
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
5462
2.1
16.0
12.0
22.0
15.0
82.0
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
5464
2.1
NaN
22.2
7.8
24.7
104.8
NaN
NaN
P
Q
...
M
T
Q
L
G
C
T
L
N
F
5681
NaN
NaN
26.0
25.0
37.0
7.4
NaN
NaN
P
Q
...
M
T
Q
L
G
C
T
L
N
F
6024
NaN
NaN
8.3
3.0
22.0
3.4
NaN
NaN
P
Q
...
M
T
Q
L
G
C
T
L
N
F
6028
NaN
NaN
16.0
20.0
37.0
7.9
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
7042
11.0
18.0
28.0
17.0
53.0
62.0
NaN
NaN
P
Q
...
M
T
Q
L
G
C
T
L
N
F
7085
0.4
2.0
1.9
0.9
3.7
2.5
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
7103
NaN
NaN
0.7
0.7
11.0
0.4
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
7119
1.4
0.9
1.0
0.8
1.6
0.8
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
7412
6.2
NaN
12.0
NaN
10.2
591.5
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
7430
2.8
NaN
48.9
NaN
80.7
42.1
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
7443
2.3
NaN
12.0
NaN
11.0
574.2
NaN
NaN
P
Q
...
L
T
Q
L
G
C
T
L
N
F
8188
4.7
29.0
25.0
34.0
28.0
147.0
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
8468
1.4
11.0
17.0
4.4
26.0
20.0
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
8506
5.4
15.0
19.0
7.2
34.0
70.0
NaN
NaN
P
Q
...
M
T
Q
I
G
C
T
L
N
F
8626
11.0
15.0
33.0
34.0
56.0
1.5
NaN
NaN
P
Q
...
M
T
Q
L
G
C
T
L
N
F
8654
NaN
NaN
NaN
NaN
7.0
1.0
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
8658
NaN
NaN
NaN
NaN
4.0
1.0
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
8660
NaN
NaN
NaN
NaN
37.0
5.0
NaN
NaN
P
Q
...
M
T
Q
L
G
C
T
L
N
F
8666
NaN
NaN
NaN
NaN
2.0
1.0
NaN
NaN
P
Q
...
L
T
Q
L
G
C
T
L
N
F
8674
NaN
NaN
NaN
NaN
1.0
1.0
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
9431
NaN
NaN
2.8
0.8
12.0
0.9
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
9556
NaN
NaN
1.2
1.2
3.0
1.0
NaN
NaN
P
Q
...
L
T
Q
L
G
C
T
L
N
F
9564
0.4
2.2
0.8
0.5
24.0
0.8
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
9706
NaN
NaN
0.3
0.3
0.4
0.4
NaN
NaN
P
Q
...
L
T
Q
I
G
C
T
L
N
F
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
235719
0.6
0.6
0.4
0.6
0.7
0.5
0.8
0.5
P
Q
...
L
T
Q
I
G
C
T
L
N
F
235721
4.7
2.8
3.4
4.4
5.1
5.2
3.2
1.7
P
Q
...
L
T
K
I
G
C
T
L
N
F
235725
0.5
0.8
0.6
0.6
0.7
0.6
0.9
0.5
P
Q
...
L
T
Q
I
G
C
T
L
N
F
235729
1.1
1.1
1.0
1.2
1.2
1.1
0.8
1.0
P
Q
...
L
T
Q
I
G
C
T
L
N
F
235733
23.0
115.0
35.0
102.0
68.0
184.0
NaN
NaN
P
Q
...
M
K
Q
L
G
C
T
L
N
F
235739
0.5
0.7
0.8
0.7
0.8
0.8
0.8
0.7
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257923
1.1
0.8
1.0
1.0
0.9
1.0
1.1
1.6
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257927
0.4
0.6
0.4
0.4
0.8
0.4
0.5
0.3
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257929
0.1
0.4
0.3
0.3
0.3
0.4
0.4
0.4
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257933
0.6
0.9
1.0
0.7
1.0
0.6
0.6
0.6
P
Q
...
L
T
Q
L
G
C
T
L
N
F
257935
0.8
0.9
0.8
0.8
0.8
0.8
0.8
0.7
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257939
200.0
204.0
54.0
98.0
32.0
20.0
4.2
117.0
P
Q
...
M
T
Q
I
G
C
T
L
N
F
257941
0.6
1.0
0.8
0.7
0.9
0.6
0.7
0.5
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257947
0.6
1.0
1.1
0.9
1.3
1.3
1.7
1.6
P
Q
...
L
T
Q
I
G
C
T
L
N
F
257957
46.0
200.0
200.0
200.0
89.0
200.0
1.3
33.0
P
Q
...
L
T
Q
L
G
C
T
L
N
F
257963
0.6
0.7
0.7
0.9
1.1
0.9
1.0
1.0
P
Q
...
L
T
Q
I
G
C
T
L
N
F
258503
0.2
8.3
3.3
1.4
7.1
1.9
0.6
0.3
P
Q
...
L
T
R
L
G
C
T
L
N
F
258505
0.7
0.8
0.8
0.8
0.9
0.9
0.9
1.0
P
Q
...
L
T
Q
I
G
C
T
L
N
F
258507
0.5
0.8
0.8
0.8
1.2
0.7
0.9
0.6
P
Q
...
L
T
Q
I
G
C
T
L
N
F
258509
2.5
5.0
4.5
2.0
9.1
3.5
2.4
1.3
P
Q
...
M
T
Q
L
G
C
T
L
N
F
259173
0.7
1.0
1.2
1.1
2.0
1.0
1.0
0.8
P
Q
...
L
T
Q
L
G
C
T
L
N
F
259175
0.9
0.8
1.0
1.0
0.8
0.8
0.7
0.8
P
Q
...
L
T
Q
I
G
C
T
L
N
F
259181
2.6
9.3
21.0
6.8
13.0
21.0
1.4
1.5
P
Q
...
M
T
Q
I
G
C
T
L
N
F
259191
1.1
27.0
30.0
36.0
36.0
200.0
0.6
0.6
P
Q
...
M
T
Q
L
G
C
T
L
N
F
259195
1.1
1.5
1.6
1.1
1.4
1.3
1.5
0.9
P
Q
...
L
T
Q
L
G
C
T
L
N
F
259199
0.5
0.8
0.7
0.6
1.1
0.6
0.6
0.6
P
Q
...
L
T
Q
L
G
C
T
L
N
F
259215
0.6
0.8
1.0
0.7
1.3
0.6
0.7
0.6
P
Q
...
L
T
Q
L
G
C
T
L
N
F
259227
6.3
6.2
6.3
3.4
20.5
5.3
6.7
2.9
P
Q
...
M
T
Q
I
G
C
T
L
N
F
259253
0.9
0.8
0.9
0.9
1.5
0.7
0.7
0.6
P
Q
...
L
T
Q
I
G
C
T
L
N
F
259257
0.8
0.8
0.8
0.6
1.7
0.7
1.1
0.5
P
Q
...
L
T
Q
L
G
C
T
L
N
F
802 rows × 107 columns
In [4]:
# consensus_map
In [5]:
print(drug_cols)
DRUG = 'FPV'
Index(['FPV', 'ATV', 'IDV', 'LPV', 'NFV', 'SQV', 'TPV', 'DRV'], dtype='object')
In [6]:
feat_cols
Out[6]:
Index(['P1', 'P2', 'P3', 'P4', 'P5', 'P6', 'P7', 'P8', 'P9', 'P10', 'P11',
'P12', 'P13', 'P14', 'P15', 'P16', 'P17', 'P18', 'P19', 'P20', 'P21',
'P22', 'P23', 'P24', 'P25', 'P26', 'P27', 'P28', 'P29', 'P30', 'P31',
'P32', 'P33', 'P34', 'P35', 'P36', 'P37', 'P38', 'P39', 'P40', 'P41',
'P42', 'P43', 'P44', 'P45', 'P46', 'P47', 'P48', 'P49', 'P50', 'P51',
'P52', 'P53', 'P54', 'P55', 'P56', 'P57', 'P58', 'P59', 'P60', 'P61',
'P62', 'P63', 'P64', 'P65', 'P66', 'P67', 'P68', 'P69', 'P70', 'P71',
'P72', 'P73', 'P74', 'P75', 'P76', 'P77', 'P78', 'P79', 'P80', 'P81',
'P82', 'P83', 'P84', 'P85', 'P86', 'P87', 'P88', 'P89', 'P90', 'P91',
'P92', 'P93', 'P94', 'P95', 'P96', 'P97', 'P98', 'P99'],
dtype='object')
In [7]:
# Split data into predictor variables and dependent variables.
# Predictors are the sequence features
# Dependent are the drug resistance values
X, Y = cf.split_data_xy(data, feat_cols, DRUG)
# Binarize the sequence features such that there are 99 x 20 columns in total.
from sklearn.preprocessing import LabelBinarizer
lb = LabelBinarizer()
lb.fit(list('CHIMSVAGLPTRFYWDNEQK'))
X_binarized = pd.DataFrame()
for col in X.columns:
binarized_cols = lb.transform(X[col])
for i, c in enumerate(lb.classes_):
X_binarized[col + '_' + c] = binarized_cols[:,i]
X_binarized
/home/ericmjl/hiv-resistance-prediction/custom_funcs.py:189: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame
See the the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
subset.dropna(inplace=True)
Out[7]:
P1_A
P1_C
P1_D
P1_E
P1_F
P1_G
P1_H
P1_I
P1_K
P1_L
...
P99_M
P99_N
P99_P
P99_Q
P99_R
P99_S
P99_T
P99_V
P99_W
P99_Y
0
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
2
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
3
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
4
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
5
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
6
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
7
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
8
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
9
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
10
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
11
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
12
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
13
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
14
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
15
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
16
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
17
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
18
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
19
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
20
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
21
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
22
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
23
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
24
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
25
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
26
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
27
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
28
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
29
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
696
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
697
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
698
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
699
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
700
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
701
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
702
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
703
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
704
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
705
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
706
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
707
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
708
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
709
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
710
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
711
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
712
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
713
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
714
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
715
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
716
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
717
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
718
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
719
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
720
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
721
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
722
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
723
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
724
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
725
0
0
0
0
0
0
0
0
0
0
...
0
0
0
0
0
0
0
0
0
0
726 rows × 1980 columns
In [8]:
# View distribution of drug resistance values
import matplotlib.pyplot as plt
std = (3,3)
fig = cf.plot_Y_histogram(Y, DRUG, figsize=std)
In [9]:
# Split data into training and testing set.
tts_data = X_train, X_test, Y_train, Y_test = train_test_split(X_binarized, Y, test_size=0.33)
# Train a bunch of ensemble regressors:
## Random Forest
kwargs = {'n_jobs':-1, 'n_estimators':1000}
rfr, rfr_preds, rfr_mse, rfr_r2 = cf.train_model(*tts_data, model=RandomForestRegressor, modelargs=kwargs)
## Gradient Boosting
kwargs = {'n_estimators':1000}
gbr, gbr_preds, gbr_mse, gbr_r2 = cf.train_model(*tts_data, model=GradientBoostingRegressor, modelargs=kwargs)
## AdaBoost
kwargs = {'n_estimators':1000}
abr, abr_preds, abr_mse, abr_r2 = cf.train_model(*tts_data, model=AdaBoostRegressor, modelargs=kwargs)
## ExtraTrees
kwargs = {'n_estimators':1000, 'n_jobs':-1}
etr, etr_preds, etr_mse, etr_r2 = cf.train_model(*tts_data, model=ExtraTreesRegressor, modelargs=kwargs)
## Bagging
bgr, bgr_preds, bgr_mse, bgr_r2 = cf.train_model(*tts_data, model=BaggingRegressor)
# Plot the results of regression
cf.scatterplot_results(rfr_preds, Y_test, rfr_mse, rfr_r2, DRUG, 'Rand. Forest', figsize=std)
plt.savefig('figures/{0} random_forest_poster.pdf'.format(DRUG), bbox_inches='tight')
cf.scatterplot_results(gbr_preds, Y_test, gbr_mse, gbr_r2, DRUG, 'Grad. Boost', figsize=std)
plt.savefig('figures/{0} gradient_boost_poster.pdf'.format(DRUG), bbox_inches='tight')
cf.scatterplot_results(abr_preds, Y_test, abr_mse, abr_r2, DRUG, 'AdaBoost', figsize=std)
plt.savefig('figures/{0} adaboost_poster.pdf'.format(DRUG), bbox_inches='tight')
cf.scatterplot_results(etr_preds, Y_test, etr_mse, etr_r2, DRUG, 'ExtraTrees', figsize=std)
plt.savefig('figures/{0} extratrees_poster.pdf'.format(DRUG), bbox_inches='tight')
cf.scatterplot_results(bgr_preds, Y_test, bgr_mse, bgr_r2, DRUG, 'Bagging', figsize=std)
plt.savefig('figures/{0} bagging_poster.pdf'.format(DRUG), bbox_inches='tight')
/home/ericmjl/anaconda3/lib/python3.4/site-packages/matplotlib/collections.py:590: FutureWarning: elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
if self._edgecolors == str('face'):
In [ ]:
In [10]:
# Grab the feature importances - that is, how important a particular feature is for predicting drug resistance
cf.barplot_feature_importances(rfr, DRUG, 'Rand. Forest', figsize=(5,3))
plt.savefig('figures/{0} random_forest_feat_impt.pdf'.format(DRUG), bbox_inches='tight')
cf.barplot_feature_importances(gbr, DRUG, 'Grad. Boost', figsize=(5,3))
plt.savefig('figures/{0} gradient_boost_feat_impt.pdf'.format(DRUG), bbox_inches='tight')
cf.barplot_feature_importances(abr, DRUG, 'AdaBoost', figsize=(5,3))
plt.savefig('figures/{0} adaboost_feat_impt.pdf'.format(DRUG), bbox_inches='tight')
cf.barplot_feature_importances(etr, DRUG, 'ExtraTrees', figsize=(5,3))
plt.savefig('figures/{0} extratrees_feat_impt.pdf'.format(DRUG), bbox_inches='tight')
# cf.barplot_feature_importances(bgr, DRUG, 'Bagging') ## feature_importances_ do not exist for bagging
In [11]:
# Extract a table version of feature importance
rfr_fi = cf.extract_mutational_importance(rfr, X_test)
gbr_fi = cf.extract_mutational_importance(gbr, X_test)
abr_fi = cf.extract_mutational_importance(abr, X_test)
# Join data to compare random forest and gradient boosting.
# joined = rfr_fi.set_index(0).join(gbr_fi.set_index(0), lsuffix='r', rsuffix='g')
# sps.spearmanr(joined['1r'], joined['1g'])
rfr_fi.to_csv('figures/{0} random_forest feature_importance.csv'.format(DRUG))
gbr_fi.to_csv('figures/{0} gradient_boost feature_importance.csv'.format(DRUG))
abr_fi.to_csv('figures/{0} adaboost feature_importance.csv'.format(DRUG))
In [12]:
# Train a bunch of linear model learners for comparison.
brr, brr_preds, brr_mse, brr_r2 = cf.train_model(*tts_data, model=lm.BayesianRidge)
ard, ard_preds, ard_mse, ard_r2 = cf.train_model(*tts_data, model=lm.ARDRegression)
logr, logr_preds, logr_mse, logr_r2 = cf.train_model(*tts_data, model=lm.LogisticRegression)
enr, enr_preds, enr_mse, enr_r2 = cf.train_model(*tts_data, model=lm.ElasticNet)
svr, svr_preds, svr_mse, svr_r2 = cf.train_model(*tts_data, model=SVR)
# Likewise, plot the results
cf.scatterplot_results(brr_preds, Y_test, brr_mse, brr_r2, DRUG, 'Bayesian Ridge', figsize=std)
cf.scatterplot_results(ard_preds, Y_test, ard_mse, ard_r2, DRUG, 'ARD Regression', figsize=std)
cf.scatterplot_results(logr_preds, Y_test, logr_mse, logr_r2, DRUG, 'Logistic Regression', figsize=std)
cf.scatterplot_results(enr_preds, Y_test, enr_mse, enr_r2, DRUG, 'ElasticNet', figsize=std)
cf.scatterplot_results(svr_preds, Y_test, svr_mse, svr_r2, DRUG, 'SVMs', figsize=std)
/home/ericmjl/anaconda3/lib/python3.4/site-packages/matplotlib/collections.py:590: FutureWarning: elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
if self._edgecolors == str('face'):
Out[12]:
In [13]:
# Let's now try a neural network.
# Neural Network 1 Specification: Feed Forward ANN with 1 hidden layer.
x_train = X_train.astype(np.float32)
y_train = Y_train.astype(np.float32)
x_test = X_test.astype(np.float32)
y_test = Y_test.astype(np.float32)
net1 = NeuralNet(
layers=[ # three layers: one hidden layer
('input', layers.InputLayer),
('hidden1', layers.DenseLayer),
('dropout1', layers.DropoutLayer),
#('hidden2', layers.DenseLayer),
#('dropout2', layers.DropoutLayer),
('nonlinear', layers.NonlinearityLayer),
('output', layers.DenseLayer),
],
# layer parameters:
input_shape=(None, x_train.shape[1]), #
hidden1_num_units=math.ceil(x_train.shape[1]), # number of units in hidden layer
hidden1_nonlinearity=nonlinearities.tanh,
dropout1_p = 0.4,
#hidden2_num_units=math.ceil(x_train.shape[1] / 2),
#dropout2_p = 0.5,
output_nonlinearity=None, # output layer uses identity function
output_num_units=1, # 1 target values
# optimization method:
update=nesterov_momentum,
update_learning_rate=0.01,
update_momentum=0.95,
regression=True, # flag to indicate we're dealing with regression problem
max_epochs=1000, # we want to train this many epochs
verbose=1,
)
net1.fit(x_train.values, y_train.values)
# Neural Network with 3924361 learnable parameters
## Layer information
# name size
--- --------- ------
0 input 1980
1 hidden1 1980
2 dropout1 1980
3 nonlinear 1980
4 output 1
epoch train loss valid loss train/val dur
------- ------------ ------------ ----------- -----
1 12.59619 2.44780 5.14593 0.07s
2 2.61336 2.20166 1.18699 0.07s
3 2.20374 1.99350 1.10546 0.07s
4 1.97106 1.80216 1.09372 0.07s
5 1.63082 1.63121 0.99977 0.07s
6 1.34881 1.04006 1.29686 0.07s
7 1.10921 1.74530 0.63554 0.07s
8 1.32155 1.81279 0.72901 0.07s
9 1.28494 1.08133 1.18829 0.07s
10 0.96176 0.78407 1.22663 0.07s
11 0.82693 1.07843 0.76679 0.07s
12 0.84364 0.75525 1.11704 0.07s
13 0.77994 0.72542 1.07517 0.07s
14 0.78055 1.01990 0.76532 0.07s
15 0.85883 0.73972 1.16102 0.07s
16 0.68441 0.72345 0.94604 0.07s
17 0.60534 0.89260 0.67817 0.07s
18 0.69925 0.68405 1.02221 0.07s
19 0.63123 1.12371 0.56174 0.07s
20 0.70969 1.20189 0.59048 0.07s
21 0.77372 0.92757 0.83414 0.07s
22 0.62810 0.78056 0.80468 0.07s
23 0.62831 0.67113 0.93621 0.07s
24 0.59683 0.74995 0.79583 0.07s
25 0.53863 0.76603 0.70314 0.07s
26 0.58233 0.65421 0.89013 0.07s
27 0.51782 0.80816 0.64074 0.07s
28 0.50229 0.83151 0.60407 0.07s
29 0.47911 0.62637 0.76489 0.07s
30 0.55700 0.85998 0.64769 0.07s
31 0.53940 0.81746 0.65986 0.07s
32 0.51662 0.68156 0.75800 0.07s
33 0.52149 0.85754 0.60812 0.07s
34 0.56740 0.59056 0.96078 0.07s
35 0.43813 0.66349 0.66034 0.07s
36 0.43555 0.79542 0.54757 0.07s
37 0.56313 0.63159 0.89161 0.07s
38 0.40987 1.32925 0.30835 0.07s
39 0.64758 0.58973 1.09811 0.07s
40 0.44714 0.99615 0.44887 0.07s
41 0.58663 0.66070 0.88789 0.07s
42 0.45422 1.06172 0.42782 0.07s
43 0.56654 0.59225 0.95659 0.07s
44 0.41088 0.54602 0.75251 0.07s
45 0.37486 0.60996 0.61457 0.07s
46 0.43776 0.55882 0.78336 0.07s
47 0.33118 0.57914 0.57185 0.07s
48 0.41880 0.62336 0.67185 0.07s
49 0.37904 0.60020 0.63152 0.07s
50 0.38822 0.72366 0.53647 0.07s
51 0.35622 0.59345 0.60026 0.07s
52 0.38635 0.78135 0.49446 0.07s
53 0.48609 0.64814 0.74998 0.07s
54 0.37296 0.53673 0.69487 0.07s
55 0.38835 0.58466 0.66423 0.07s
56 0.34947 0.52374 0.66726 0.07s
57 0.39255 0.70188 0.55929 0.07s
58 0.37721 0.55923 0.67451 0.07s
59 0.35831 0.53565 0.66894 0.07s
60 0.37375 0.51780 0.72180 0.07s
61 0.35872 0.51549 0.69589 0.07s
62 0.42699 0.63653 0.67081 0.07s
63 0.38235 0.52483 0.72853 0.07s
64 0.43413 0.95917 0.45261 0.07s
65 0.53459 0.53160 1.00563 0.07s
66 0.37151 0.54315 0.68399 0.07s
67 0.37556 0.56055 0.66998 0.07s
68 0.35315 0.51595 0.68447 0.07s
69 0.37059 0.73016 0.50755 0.07s
70 0.42203 0.52685 0.80104 0.07s
71 0.36744 0.68160 0.53908 0.07s
72 0.34438 0.53351 0.64549 0.07s
73 0.44812 0.62461 0.71745 0.07s
74 0.37179 0.53256 0.69812 0.07s
75 0.42547 0.49620 0.85746 0.07s
76 0.33751 0.49065 0.68788 0.07s
77 0.36155 0.67190 0.53810 0.07s
78 0.36810 0.51709 0.71187 0.07s
79 0.34594 0.68518 0.50489 0.07s
80 0.35537 0.55789 0.63699 0.07s
81 0.38007 0.54330 0.69955 0.07s
82 0.36145 0.49920 0.72406 0.07s
83 0.42204 0.57645 0.73213 0.07s
84 0.32304 0.51658 0.62535 0.07s
85 0.33949 0.49496 0.68588 0.07s
86 0.29508 0.53414 0.55243 0.07s
87 0.33479 0.54453 0.61483 0.07s
88 0.35145 0.49693 0.70724 0.07s
89 0.31850 0.50838 0.62651 0.07s
90 0.31377 0.52162 0.60152 0.07s
91 0.33111 0.51545 0.64237 0.07s
92 0.32534 0.62596 0.51976 0.07s
93 0.35229 0.50149 0.70249 0.07s
94 0.30529 0.56845 0.53706 0.07s
95 0.30885 0.48059 0.64264 0.07s
96 0.30145 0.51187 0.58891 0.07s
97 0.32058 0.59768 0.53638 0.07s
98 0.32557 0.46862 0.69474 0.07s
99 0.29166 0.45967 0.63449 0.07s
100 0.29986 0.45676 0.65649 0.07s
101 0.33991 0.68016 0.49976 0.07s
102 0.39350 0.46596 0.84448 0.07s
103 0.26712 0.52593 0.50790 0.07s
104 0.29664 0.48116 0.61650 0.07s
105 0.30734 0.46170 0.66566 0.07s
106 0.32803 0.50327 0.65179 0.07s
107 0.30381 0.47441 0.64040 0.07s
108 0.33768 0.48795 0.69203 0.07s
109 0.26173 0.48383 0.54095 0.07s
110 0.30258 0.52863 0.57239 0.07s
111 0.31880 0.48946 0.65133 0.07s
112 0.34213 0.51887 0.65937 0.07s
113 0.32232 0.47234 0.68238 0.07s
114 0.31409 0.46047 0.68211 0.07s
115 0.35906 0.54228 0.66214 0.07s
116 0.26614 0.48122 0.55305 0.07s
117 0.25452 0.52550 0.48434 0.07s
118 0.29882 0.59615 0.50125 0.07s
119 0.33730 0.48307 0.69824 0.07s
120 0.29037 0.49153 0.59074 0.07s
121 0.29732 0.46691 0.63679 0.07s
122 0.32525 0.47820 0.68016 0.07s
123 0.28199 0.49108 0.57422 0.07s
124 0.29244 0.48115 0.60779 0.07s
125 0.29065 0.48069 0.60465 0.07s
126 0.32853 0.49480 0.66397 0.07s
127 0.31810 0.51558 0.61698 0.07s
128 0.29434 0.49517 0.59442 0.07s
129 0.29087 0.49291 0.59012 0.07s
130 0.25721 0.48442 0.53096 0.07s
131 0.28316 0.48931 0.57870 0.07s
132 0.25189 0.52286 0.48175 0.07s
133 0.29358 0.48265 0.60828 0.07s
134 0.26225 0.46339 0.56594 0.07s
135 0.31997 0.50682 0.63133 0.07s
136 0.39413 0.53336 0.73895 0.07s
137 0.29383 0.49886 0.58900 0.07s
138 0.31138 0.49581 0.62803 0.07s
139 0.33844 0.47778 0.70836 0.07s
140 0.36338 0.52407 0.69337 0.07s
141 0.29554 0.47662 0.62007 0.07s
142 0.27940 0.53775 0.51957 0.07s
143 0.36348 0.50432 0.72074 0.07s
144 0.28901 0.48612 0.59453 0.07s
145 0.27960 0.47362 0.59034 0.07s
146 0.29278 0.45288 0.64649 0.07s
147 0.37592 0.59486 0.63195 0.07s
148 0.35139 0.49322 0.71244 0.07s
149 0.31349 0.47427 0.66099 0.07s
150 0.30365 0.49870 0.60889 0.07s
151 0.32854 0.46190 0.71127 0.07s
152 0.25977 0.45959 0.56522 0.07s
153 0.32146 0.47571 0.67574 0.07s
154 0.27254 0.45905 0.59369 0.07s
155 0.26707 0.49513 0.53939 0.07s
156 0.27515 0.46121 0.59659 0.07s
157 0.28296 0.48625 0.58193 0.07s
158 0.30230 0.52176 0.57939 0.07s
159 0.24874 0.47795 0.52043 0.07s
160 0.24423 0.48796 0.50051 0.07s
161 0.25614 0.50301 0.50922 0.07s
162 0.29070 0.50220 0.57886 0.07s
163 0.22858 0.48053 0.47568 0.07s
164 0.24979 0.47686 0.52383 0.07s
165 0.36511 0.53661 0.68039 0.07s
166 0.32926 0.52670 0.62514 0.07s
167 0.29171 0.52215 0.55867 0.07s
168 0.27596 0.52302 0.52763 0.07s
169 0.24730 0.51435 0.48080 0.07s
170 0.27097 0.48959 0.55347 0.07s
171 0.25289 0.52938 0.47772 0.07s
172 0.26009 0.46083 0.56440 0.07s
173 0.22799 0.48633 0.46881 0.07s
174 0.24615 0.48504 0.50749 0.07s
175 0.27997 0.52898 0.52926 0.07s
176 0.32375 0.49756 0.65067 0.07s
177 0.30006 0.51380 0.58400 0.07s
178 0.29888 0.48548 0.61563 0.07s
179 0.38663 0.48759 0.79294 0.07s
180 0.25072 0.52533 0.47726 0.07s
181 0.27532 0.48335 0.56961 0.07s
182 0.28573 0.50802 0.56243 0.07s
183 0.31048 0.50526 0.61450 0.07s
184 0.30612 0.47168 0.64899 0.07s
185 0.29209 0.48177 0.60628 0.07s
186 0.24887 0.47473 0.52423 0.07s
187 0.26107 0.46913 0.55649 0.07s
188 0.33288 0.46657 0.71346 0.07s
189 0.29698 0.48138 0.61693 0.07s
190 0.28584 0.47570 0.60088 0.07s
191 0.29091 0.47008 0.61885 0.07s
192 0.24053 0.48271 0.49830 0.07s
193 0.21729 0.51892 0.41874 0.07s
194 0.24751 0.49008 0.50503 0.07s
195 0.27505 0.50004 0.55005 0.07s
196 0.24941 0.48602 0.51316 0.07s
197 0.26901 0.53286 0.50483 0.07s
198 0.30742 0.50129 0.61325 0.07s
199 0.32295 0.46362 0.69658 0.07s
200 0.26173 0.53044 0.49343 0.07s
201 0.32095 0.44999 0.71324 0.07s
202 0.35156 0.46409 0.75752 0.07s
203 0.26148 0.50556 0.51720 0.07s
204 0.30855 0.52652 0.58602 0.07s
205 0.24949 0.45659 0.54643 0.07s
206 0.27250 0.49308 0.55265 0.07s
207 0.29511 0.44701 0.66018 0.07s
208 0.27865 0.48690 0.57229 0.07s
209 0.26093 0.46693 0.55882 0.07s
210 0.25810 0.46067 0.56026 0.07s
211 0.28781 0.50254 0.57270 0.07s
212 0.29430 0.47349 0.62156 0.07s
213 0.25910 0.47843 0.54157 0.07s
214 0.28201 0.49659 0.56790 0.07s
215 0.28092 0.50977 0.55106 0.07s
216 0.33160 0.47624 0.69629 0.07s
217 0.31121 0.46605 0.66776 0.07s
218 0.30275 0.43474 0.69638 0.07s
219 0.28251 0.45676 0.61851 0.07s
220 0.31075 0.46811 0.66383 0.07s
221 0.26768 0.48671 0.54999 0.07s
222 0.31973 0.49929 0.64038 0.07s
223 0.36027 0.50041 0.71995 0.07s
224 0.27523 0.50105 0.54930 0.07s
225 0.28807 0.50022 0.57589 0.07s
226 0.28330 0.57115 0.49602 0.07s
227 0.30008 0.49823 0.60229 0.07s
228 0.27906 0.48714 0.57284 0.07s
229 0.28126 0.47674 0.58996 0.07s
230 0.30869 0.47263 0.65314 0.07s
231 0.25575 0.46424 0.55089 0.07s
232 0.25911 0.50167 0.51649 0.07s
233 0.28065 0.49366 0.56851 0.07s
234 0.28614 0.50599 0.56551 0.07s
235 0.31691 0.52034 0.60904 0.07s
236 0.29490 0.48539 0.60755 0.07s
237 0.26235 0.45335 0.57870 0.07s
238 0.31420 0.48488 0.64800 0.07s
239 0.26486 0.49171 0.53864 0.07s
240 0.27799 0.48202 0.57670 0.07s
241 0.30912 0.48986 0.63104 0.07s
242 0.28158 0.49787 0.56557 0.07s
243 0.26855 0.48558 0.55305 0.07s
244 0.32544 0.53980 0.60289 0.07s
245 0.30296 0.45735 0.66244 0.07s
246 0.25591 0.46364 0.55196 0.07s
247 0.31454 0.47191 0.66652 0.07s
248 0.26459 0.46784 0.56557 0.07s
249 0.27102 0.45999 0.58919 0.07s
250 0.31353 0.50476 0.62115 0.07s
251 0.29766 0.49508 0.60123 0.07s
252 0.28482 0.46529 0.61213 0.07s
253 0.27656 0.45961 0.60173 0.07s
254 0.26939 0.45670 0.58986 0.07s
255 0.42162 0.51195 0.82354 0.07s
256 0.32664 0.56674 0.57635 0.07s
257 0.35308 0.49890 0.70772 0.07s
258 0.32431 0.43816 0.74018 0.07s
259 0.29014 0.44554 0.65121 0.07s
260 0.25906 0.50113 0.51696 0.07s
261 0.25551 0.48450 0.52737 0.07s
262 0.30987 0.48715 0.63609 0.07s
263 0.34051 0.46892 0.72615 0.07s
264 0.29971 0.49590 0.60438 0.07s
265 0.27982 0.49802 0.56187 0.07s
266 0.26814 0.49475 0.54196 0.07s
267 0.31703 0.50904 0.62279 0.07s
268 0.30825 0.48684 0.63316 0.07s
269 0.26096 0.49659 0.52549 0.07s
270 0.32513 0.45374 0.71656 0.07s
271 0.32912 0.51471 0.63943 0.07s
272 0.36334 0.53394 0.68049 0.07s
273 0.32503 0.46818 0.69425 0.07s
274 0.26101 0.47056 0.55467 0.07s
275 0.31227 0.48655 0.64180 0.07s
276 0.30033 0.48107 0.62430 0.07s
277 0.28025 0.47091 0.59513 0.07s
278 0.32878 0.49655 0.66211 0.07s
279 0.30834 0.45683 0.67494 0.07s
280 0.27031 0.44233 0.61111 0.07s
281 0.34663 0.48829 0.70989 0.07s
282 0.27949 0.49120 0.56900 0.07s
283 0.29338 0.47398 0.61897 0.07s
284 0.32850 0.45650 0.71960 0.07s
285 0.25415 0.47658 0.53327 0.07s
286 0.24997 0.45696 0.54703 0.07s
287 0.29386 0.44452 0.66109 0.07s
288 0.26300 0.54203 0.48522 0.07s
289 0.24964 0.47719 0.52314 0.07s
290 0.27947 0.45719 0.61127 0.07s
291 0.31369 0.46189 0.67915 0.07s
292 0.27625 0.54889 0.50329 0.07s
293 0.29744 0.49147 0.60519 0.07s
294 0.25352 0.46173 0.54907 0.07s
295 0.28435 0.50452 0.56361 0.07s
296 0.31110 0.46152 0.67407 0.07s
297 0.25745 0.45543 0.56529 0.07s
298 0.29062 0.45499 0.63875 0.07s
299 0.23948 0.47976 0.49917 0.07s
300 0.28611 0.46860 0.61055 0.07s
301 0.26037 0.49368 0.52741 0.07s
302 0.29128 0.48330 0.60269 0.07s
303 0.22244 0.43619 0.50996 0.07s
304 0.22784 0.44000 0.51782 0.07s
305 0.29701 0.44488 0.66763 0.07s
306 0.27662 0.44555 0.62085 0.07s
307 0.25627 0.47452 0.54006 0.07s
308 0.27701 0.43420 0.63797 0.07s
309 0.30592 0.53272 0.57425 0.07s
310 0.30716 0.45149 0.68033 0.07s
311 0.26777 0.44810 0.59756 0.07s
312 0.30129 0.44220 0.68134 0.07s
313 0.26195 0.49293 0.53142 0.07s
314 0.26386 0.47817 0.55181 0.07s
315 0.30788 0.45932 0.67031 0.07s
316 0.26620 0.43858 0.60696 0.07s
317 0.30641 0.43940 0.69734 0.07s
318 0.27183 0.43488 0.62507 0.07s
319 0.27475 0.42975 0.63932 0.07s
320 0.30260 0.45972 0.65822 0.07s
321 0.29873 0.45421 0.65769 0.07s
322 0.26397 0.45649 0.57825 0.07s
323 0.25328 0.43754 0.57886 0.07s
324 0.26456 0.44699 0.59186 0.07s
325 0.30847 0.49229 0.62659 0.07s
326 0.26455 0.44809 0.59039 0.07s
327 0.24830 0.44899 0.55301 0.07s
328 0.29448 0.44315 0.66451 0.07s
329 0.24827 0.43896 0.56558 0.07s
330 0.26016 0.43723 0.59502 0.07s
331 0.26885 0.46992 0.57212 0.07s
332 0.28539 0.45595 0.62593 0.07s
333 0.24993 0.44743 0.55859 0.07s
334 0.21954 0.46145 0.47575 0.07s
335 0.28148 0.44470 0.63296 0.07s
336 0.32947 0.44859 0.73445 0.07s
337 0.28268 0.44754 0.63162 0.07s
338 0.23443 0.43584 0.53789 0.07s
339 0.26501 0.44162 0.60009 0.07s
340 0.23763 0.46087 0.51561 0.07s
341 0.24444 0.44847 0.54506 0.07s
342 0.23135 0.43781 0.52842 0.07s
343 0.24613 0.45620 0.53953 0.07s
344 0.30075 0.47869 0.62828 0.07s
345 0.27557 0.49034 0.56200 0.07s
346 0.28829 0.52712 0.54691 0.07s
347 0.28488 0.42610 0.66857 0.07s
348 0.25199 0.44798 0.56250 0.07s
349 0.20130 0.45423 0.44316 0.07s
350 0.25161 0.46387 0.54242 0.07s
351 0.25549 0.45836 0.55741 0.07s
352 0.30060 0.45332 0.66311 0.07s
353 0.26122 0.45698 0.57162 0.07s
354 0.23510 0.44766 0.52518 0.07s
355 0.24006 0.43751 0.54868 0.07s
356 0.21838 0.45830 0.47651 0.07s
357 0.24080 0.44695 0.53877 0.07s
358 0.26691 0.45545 0.58604 0.07s
359 0.22774 0.43836 0.51952 0.07s
360 0.26642 0.42290 0.62998 0.07s
361 0.27108 0.41867 0.64749 0.07s
362 0.25572 0.45659 0.56007 0.07s
363 0.23250 0.44899 0.51782 0.07s
364 0.20982 0.45498 0.46116 0.07s
365 0.28607 0.45826 0.62425 0.07s
366 0.21603 0.46342 0.46618 0.07s
367 0.22735 0.44746 0.50809 0.07s
368 0.22697 0.50191 0.45220 0.07s
369 0.31930 0.54410 0.58683 0.07s
370 0.28611 0.46155 0.61988 0.07s
371 0.20705 0.42446 0.48780 0.07s
372 0.23361 0.49750 0.46957 0.07s
373 0.25835 0.43381 0.59553 0.07s
374 0.31603 0.47549 0.66463 0.07s
375 0.27315 0.46105 0.59246 0.07s
376 0.23411 0.46532 0.50311 0.07s
377 0.29105 0.44492 0.65417 0.07s
378 0.27212 0.45279 0.60100 0.07s
379 0.20893 0.46276 0.45149 0.07s
380 0.23624 0.45843 0.51532 0.07s
381 0.24372 0.45768 0.53251 0.07s
382 0.26806 0.43243 0.61989 0.07s
383 0.29265 0.45946 0.63696 0.07s
384 0.24348 0.45815 0.53143 0.07s
385 0.27702 0.44753 0.61901 0.07s
386 0.26637 0.46593 0.57170 0.07s
387 0.25772 0.44572 0.57820 0.07s
388 0.21051 0.44492 0.47313 0.07s
389 0.23000 0.43496 0.52878 0.07s
390 0.25448 0.43225 0.58874 0.07s
391 0.25431 0.42365 0.60028 0.07s
392 0.23518 0.41867 0.56173 0.07s
393 0.21552 0.45495 0.47372 0.07s
394 0.26648 0.43085 0.61849 0.07s
395 0.23848 0.42460 0.56165 0.07s
396 0.21635 0.41001 0.52767 0.07s
397 0.31491 0.43815 0.71874 0.07s
398 0.28029 0.42889 0.65353 0.07s
399 0.22319 0.46080 0.48436 0.07s
400 0.29837 0.43685 0.68302 0.07s
401 0.27040 0.43631 0.61973 0.07s
402 0.24805 0.43780 0.56660 0.07s
403 0.23491 0.43553 0.53936 0.07s
404 0.30484 0.46999 0.64861 0.07s
405 0.27528 0.42634 0.64569 0.07s
406 0.32856 0.44752 0.73418 0.07s
407 0.28308 0.43655 0.64844 0.07s
408 0.28188 0.44858 0.62838 0.07s
409 0.27042 0.44414 0.60886 0.07s
410 0.24370 0.46645 0.52247 0.07s
411 0.23683 0.51269 0.46195 0.07s
412 0.33951 0.60574 0.56048 0.07s
413 0.34017 0.54151 0.62820 0.07s
414 0.31311 0.45418 0.68939 0.07s
415 0.28273 0.46777 0.60442 0.07s
416 0.26757 0.45177 0.59226 0.07s
417 0.24874 0.45112 0.55139 0.07s
418 0.23814 0.45544 0.52287 0.07s
419 0.26691 0.44750 0.59644 0.07s
420 0.28572 0.48047 0.59468 0.07s
421 0.26495 0.44102 0.60076 0.07s
422 0.22392 0.44568 0.50241 0.07s
423 0.24449 0.48554 0.50355 0.07s
424 0.29273 0.47288 0.61903 0.07s
425 0.28013 0.46423 0.60343 0.07s
426 0.27412 0.45483 0.60268 0.07s
427 0.29224 0.47421 0.61627 0.07s
428 0.27175 0.46039 0.59027 0.07s
429 0.30595 0.45150 0.67763 0.07s
430 0.31193 0.46102 0.67661 0.07s
431 0.24822 0.46877 0.52952 0.07s
432 0.34269 0.46957 0.72980 0.07s
433 0.24840 0.44646 0.55639 0.07s
434 0.24576 0.44358 0.55403 0.07s
435 0.31080 0.45086 0.68933 0.07s
436 0.25938 0.42198 0.61468 0.07s
437 0.25322 0.42926 0.58989 0.07s
438 0.26241 0.42709 0.61442 0.07s
439 0.24867 0.44043 0.56460 0.07s
440 0.28484 0.44813 0.63560 0.07s
441 0.25486 0.46083 0.55306 0.07s
442 0.21297 0.46471 0.45829 0.07s
443 0.22737 0.45175 0.50331 0.07s
444 0.24330 0.44281 0.54945 0.07s
445 0.23760 0.45851 0.51819 0.07s
446 0.22834 0.44689 0.51095 0.07s
447 0.21482 0.45759 0.46946 0.07s
448 0.23983 0.45312 0.52928 0.07s
449 0.25846 0.45133 0.57267 0.07s
450 0.23951 0.46230 0.51809 0.07s
451 0.22870 0.46096 0.49614 0.07s
452 0.20459 0.46817 0.43699 0.07s
453 0.23489 0.48075 0.48858 0.07s
454 0.27666 0.46835 0.59072 0.07s
455 0.24017 0.46659 0.51474 0.07s
456 0.21907 0.46259 0.47357 0.07s
457 0.24582 0.45279 0.54290 0.07s
458 0.24761 0.44762 0.55317 0.07s
459 0.18875 0.45340 0.41629 0.07s
460 0.24538 0.47123 0.52072 0.07s
461 0.23545 0.49487 0.47577 0.07s
462 0.23300 0.48535 0.48007 0.07s
463 0.24772 0.44758 0.55346 0.07s
464 0.23243 0.43784 0.53086 0.07s
465 0.25702 0.43221 0.59468 0.07s
466 0.19099 0.44160 0.43249 0.07s
467 0.23484 0.43298 0.54239 0.07s
468 0.18308 0.44824 0.40845 0.07s
469 0.27251 0.46893 0.58112 0.07s
470 0.24438 0.47752 0.51176 0.07s
471 0.23500 0.45976 0.51115 0.07s
472 0.27085 0.42785 0.63305 0.07s
473 0.21651 0.43996 0.49212 0.07s
474 0.24944 0.44126 0.56530 0.07s
475 0.20664 0.45797 0.45120 0.07s
476 0.25204 0.45157 0.55815 0.07s
477 0.21569 0.47227 0.45670 0.07s
478 0.24468 0.47455 0.51560 0.07s
479 0.23948 0.46275 0.51751 0.07s
480 0.24532 0.45017 0.54494 0.07s
481 0.24462 0.46520 0.52584 0.07s
482 0.16492 0.46467 0.35491 0.07s
483 0.21965 0.46164 0.47580 0.07s
484 0.24143 0.46169 0.52293 0.07s
485 0.20922 0.46408 0.45082 0.07s
486 0.17755 0.45991 0.38605 0.07s
487 0.21033 0.44448 0.47319 0.07s
488 0.22397 0.44190 0.50684 0.07s
489 0.25043 0.44381 0.56427 0.07s
490 0.22963 0.43021 0.53376 0.07s
491 0.26223 0.45585 0.57526 0.07s
492 0.29118 0.44817 0.64971 0.07s
493 0.25956 0.43620 0.59505 0.07s
494 0.21318 0.43985 0.48466 0.07s
495 0.21388 0.45242 0.47275 0.07s
496 0.28768 0.45571 0.63129 0.07s
497 0.24938 0.44760 0.55714 0.07s
498 0.21445 0.43744 0.49024 0.07s
499 0.24479 0.43229 0.56626 0.07s
500 0.20827 0.45034 0.46248 0.07s
501 0.25749 0.45247 0.56908 0.07s
502 0.24979 0.45602 0.54775 0.07s
503 0.22320 0.43535 0.51270 0.07s
504 0.20482 0.43726 0.46842 0.07s
505 0.23770 0.42847 0.55476 0.07s
506 0.27851 0.44184 0.63036 0.07s
507 0.24268 0.42547 0.57038 0.07s
508 0.23441 0.43474 0.53920 0.07s
509 0.23927 0.44503 0.53765 0.07s
510 0.20998 0.44080 0.47636 0.07s
511 0.21784 0.43834 0.49696 0.07s
512 0.20176 0.44015 0.45839 0.07s
513 0.20662 0.43669 0.47316 0.07s
514 0.21999 0.43927 0.50081 0.07s
515 0.24010 0.41942 0.57246 0.07s
516 0.24278 0.44291 0.54814 0.07s
517 0.18424 0.47399 0.38869 0.07s
518 0.26589 0.47273 0.56245 0.07s
519 0.21820 0.46271 0.47157 0.07s
520 0.22907 0.44839 0.51088 0.07s
521 0.23157 0.43261 0.53528 0.07s
522 0.28391 0.44421 0.63915 0.07s
523 0.21887 0.45050 0.48585 0.07s
524 0.22148 0.45483 0.48694 0.07s
525 0.23828 0.46107 0.51679 0.07s
526 0.23663 0.43527 0.54363 0.07s
527 0.23841 0.42957 0.55501 0.07s
528 0.23653 0.45900 0.51531 0.07s
529 0.27577 0.42702 0.64579 0.07s
530 0.24289 0.45065 0.53897 0.07s
531 0.23049 0.45779 0.50348 0.07s
532 0.25596 0.47719 0.53639 0.07s
533 0.26055 0.46918 0.55534 0.07s
534 0.26208 0.43017 0.60924 0.07s
535 0.26542 0.41364 0.64167 0.07s
536 0.18030 0.44686 0.40349 0.07s
537 0.18955 0.43403 0.43672 0.07s
538 0.23088 0.43167 0.53486 0.07s
539 0.21868 0.43023 0.50827 0.07s
540 0.21251 0.43269 0.49113 0.07s
541 0.21065 0.44182 0.47678 0.07s
542 0.21392 0.42816 0.49963 0.07s
543 0.18529 0.41556 0.44588 0.07s
544 0.21214 0.43173 0.49137 0.07s
545 0.25871 0.45791 0.56498 0.07s
546 0.23142 0.43881 0.52738 0.07s
547 0.19117 0.43313 0.44136 0.07s
548 0.21113 0.43204 0.48867 0.07s
549 0.21556 0.42470 0.50754 0.07s
550 0.24815 0.44838 0.55344 0.07s
551 0.24773 0.43663 0.56737 0.07s
552 0.21898 0.45593 0.48030 0.07s
553 0.29728 0.46862 0.63438 0.07s
554 0.29514 0.47950 0.61552 0.07s
555 0.22709 0.44663 0.50844 0.07s
556 0.20107 0.44634 0.45049 0.07s
557 0.22049 0.43425 0.50775 0.07s
558 0.23905 0.43409 0.55069 0.07s
559 0.24581 0.43370 0.56678 0.07s
560 0.21293 0.46280 0.46009 0.07s
561 0.21811 0.45958 0.47459 0.07s
562 0.22609 0.43416 0.52074 0.07s
563 0.22308 0.42225 0.52833 0.07s
564 0.27005 0.43430 0.62180 0.07s
565 0.29869 0.43072 0.69346 0.07s
566 0.22459 0.42568 0.52760 0.07s
567 0.18578 0.43736 0.42478 0.07s
568 0.22702 0.44297 0.51249 0.07s
569 0.19801 0.46861 0.42256 0.07s
570 0.22026 0.45252 0.48674 0.07s
571 0.19263 0.45084 0.42727 0.07s
572 0.20158 0.45748 0.44062 0.07s
573 0.21823 0.42700 0.51106 0.07s
574 0.20764 0.41336 0.50232 0.07s
575 0.23712 0.42993 0.55152 0.07s
576 0.23263 0.42912 0.54211 0.07s
577 0.21464 0.43908 0.48883 0.07s
578 0.19913 0.43930 0.45329 0.07s
579 0.28504 0.43994 0.64791 0.07s
580 0.26157 0.43844 0.59658 0.07s
581 0.22064 0.43900 0.50260 0.07s
582 0.20347 0.44471 0.45754 0.07s
583 0.25880 0.44327 0.58386 0.07s
584 0.22861 0.43316 0.52777 0.07s
585 0.16939 0.42294 0.40050 0.07s
586 0.24237 0.44651 0.54281 0.07s
587 0.24220 0.45819 0.52860 0.07s
588 0.20742 0.43567 0.47610 0.07s
589 0.27741 0.43170 0.64258 0.07s
590 0.23911 0.42921 0.55708 0.07s
591 0.21666 0.43040 0.50339 0.07s
592 0.23259 0.45737 0.50854 0.07s
593 0.24445 0.44970 0.54359 0.07s
594 0.18812 0.45249 0.41573 0.07s
595 0.22617 0.46937 0.48185 0.07s
596 0.20828 0.46100 0.45181 0.07s
597 0.21980 0.45138 0.48696 0.07s
598 0.17499 0.44168 0.39620 0.07s
599 0.22756 0.43967 0.51756 0.07s
600 0.27039 0.44473 0.60797 0.07s
601 0.31173 0.45333 0.68765 0.07s
602 0.20045 0.45075 0.44470 0.07s
603 0.20441 0.45027 0.45397 0.07s
604 0.23597 0.44745 0.52737 0.07s
605 0.20160 0.46549 0.43308 0.07s
606 0.27416 0.44565 0.61519 0.07s
607 0.23086 0.44470 0.51913 0.07s
608 0.25097 0.42239 0.59418 0.07s
609 0.22315 0.42996 0.51901 0.07s
610 0.22879 0.44222 0.51736 0.07s
611 0.29453 0.44709 0.65876 0.07s
612 0.29873 0.43968 0.67943 0.07s
613 0.30241 0.46971 0.64382 0.07s
614 0.24003 0.46307 0.51834 0.07s
615 0.21684 0.44924 0.48268 0.07s
616 0.22846 0.44308 0.51561 0.07s
617 0.19169 0.43746 0.43820 0.07s
618 0.23868 0.43730 0.54581 0.07s
619 0.18659 0.42876 0.43518 0.07s
620 0.23861 0.42752 0.55812 0.07s
621 0.18462 0.44842 0.41172 0.07s
622 0.24345 0.44269 0.54992 0.07s
623 0.22988 0.42946 0.53528 0.07s
624 0.22179 0.45058 0.49224 0.07s
625 0.18881 0.46421 0.40673 0.07s
626 0.22483 0.42815 0.52511 0.07s
627 0.26532 0.42310 0.62709 0.07s
628 0.21141 0.42468 0.49781 0.07s
629 0.22400 0.45300 0.49449 0.07s
630 0.23447 0.44442 0.52758 0.07s
631 0.26402 0.45169 0.58451 0.07s
632 0.21863 0.43816 0.49897 0.07s
633 0.19179 0.43320 0.44272 0.07s
634 0.19583 0.44048 0.44459 0.07s
635 0.25748 0.42705 0.60292 0.07s
636 0.18728 0.42853 0.43702 0.07s
637 0.17946 0.43365 0.41383 0.07s
638 0.26953 0.43048 0.62613 0.07s
639 0.21890 0.44171 0.49557 0.07s
640 0.26963 0.43635 0.61792 0.07s
641 0.26082 0.42832 0.60893 0.07s
642 0.21125 0.42109 0.50168 0.07s
643 0.20699 0.42074 0.49196 0.07s
644 0.20986 0.41552 0.50505 0.07s
645 0.22345 0.42583 0.52475 0.07s
646 0.21122 0.43152 0.48949 0.07s
647 0.21433 0.44533 0.48129 0.07s
648 0.21245 0.42685 0.49772 0.07s
649 0.22348 0.42412 0.52692 0.07s
650 0.24030 0.44576 0.53907 0.07s
651 0.24067 0.43711 0.55058 0.07s
652 0.24170 0.42993 0.56217 0.07s
653 0.19423 0.43177 0.44985 0.07s
654 0.20498 0.41887 0.48937 0.07s
655 0.22304 0.44878 0.49698 0.07s
656 0.30119 0.44368 0.67883 0.07s
657 0.26291 0.44226 0.59447 0.07s
658 0.23425 0.42294 0.55386 0.07s
659 0.21815 0.42972 0.50766 0.07s
660 0.19842 0.43750 0.45353 0.07s
661 0.26761 0.44198 0.60548 0.07s
662 0.20943 0.41902 0.49981 0.07s
663 0.20805 0.41438 0.50208 0.07s
664 0.20051 0.40612 0.49371 0.07s
665 0.17147 0.42558 0.40292 0.07s
666 0.23564 0.43556 0.54102 0.07s
667 0.20853 0.42664 0.48876 0.07s
668 0.19376 0.42233 0.45880 0.07s
669 0.27957 0.43730 0.63931 0.07s
670 0.20470 0.42879 0.47739 0.07s
671 0.25382 0.45061 0.56330 0.07s
672 0.25914 0.44412 0.58348 0.07s
673 0.28147 0.45976 0.61222 0.07s
674 0.25246 0.43550 0.57970 0.07s
675 0.16668 0.46238 0.36048 0.07s
676 0.26722 0.43940 0.60816 0.07s
677 0.34711 0.42641 0.81403 0.07s
678 0.24523 0.41864 0.58577 0.07s
679 0.27670 0.43007 0.64339 0.07s
680 0.24927 0.44678 0.55792 0.07s
681 0.22301 0.44035 0.50643 0.07s
682 0.25157 0.40246 0.62508 0.07s
683 0.26052 0.40693 0.64021 0.07s
684 0.19566 0.41965 0.46624 0.07s
685 0.28388 0.43992 0.64529 0.07s
686 0.23976 0.44163 0.54290 0.07s
687 0.20402 0.41588 0.49057 0.07s
688 0.22963 0.41554 0.55260 0.07s
689 0.21240 0.42022 0.50545 0.07s
690 0.26267 0.41901 0.62688 0.07s
691 0.20318 0.42841 0.47426 0.07s
692 0.21714 0.44666 0.48613 0.07s
693 0.22826 0.44481 0.51317 0.07s
694 0.22258 0.44266 0.50284 0.07s
695 0.21964 0.43083 0.50981 0.07s
696 0.23772 0.43574 0.54555 0.07s
697 0.23714 0.43272 0.54802 0.07s
698 0.26039 0.44783 0.58144 0.07s
699 0.23444 0.43982 0.53304 0.07s
700 0.24256 0.44299 0.54756 0.07s
701 0.20559 0.46063 0.44631 0.07s
702 0.27515 0.43415 0.63376 0.07s
703 0.19014 0.42470 0.44770 0.07s
704 0.22255 0.41165 0.54064 0.07s
705 0.23931 0.40479 0.59119 0.07s
706 0.21614 0.39734 0.54397 0.07s
707 0.22440 0.43024 0.52157 0.07s
708 0.20089 0.43940 0.45720 0.07s
709 0.23318 0.43985 0.53014 0.07s
710 0.22402 0.45278 0.49476 0.07s
711 0.21977 0.44889 0.48959 0.07s
712 0.19993 0.43389 0.46078 0.07s
713 0.21979 0.44350 0.49557 0.07s
714 0.21117 0.44897 0.47033 0.07s
715 0.24138 0.48032 0.50255 0.07s
716 0.27467 0.45391 0.60511 0.07s
717 0.26082 0.43075 0.60551 0.07s
718 0.24678 0.42740 0.57739 0.07s
719 0.23230 0.44061 0.52723 0.07s
720 0.22156 0.46019 0.48145 0.07s
721 0.22795 0.44324 0.51429 0.07s
722 0.25967 0.42638 0.60902 0.07s
723 0.23090 0.42654 0.54135 0.07s
724 0.22917 0.41999 0.54566 0.07s
725 0.25154 0.42368 0.59369 0.07s
726 0.19001 0.41472 0.45816 0.07s
727 0.21932 0.40486 0.54170 0.07s
728 0.24431 0.42038 0.58117 0.07s
729 0.20935 0.47704 0.43885 0.07s
730 0.17824 0.44159 0.40363 0.07s
731 0.20418 0.42672 0.47848 0.07s
732 0.21484 0.46191 0.46510 0.07s
733 0.21779 0.44479 0.48965 0.07s
734 0.19558 0.43905 0.44545 0.07s
735 0.18982 0.44243 0.42904 0.07s
736 0.25803 0.42664 0.60479 0.07s
737 0.25537 0.44528 0.57350 0.07s
738 0.20422 0.46192 0.44212 0.07s
739 0.20555 0.43356 0.47410 0.07s
740 0.23670 0.47355 0.49985 0.07s
741 0.17633 0.46017 0.38319 0.07s
742 0.25382 0.43715 0.58061 0.07s
743 0.20433 0.44091 0.46344 0.07s
744 0.23004 0.43711 0.52627 0.07s
745 0.22096 0.43544 0.50745 0.07s
746 0.20635 0.43242 0.47720 0.07s
747 0.18377 0.42471 0.43270 0.07s
748 0.20172 0.41825 0.48231 0.07s
749 0.19813 0.42811 0.46281 0.07s
750 0.21774 0.44137 0.49333 0.07s
751 0.20328 0.44077 0.46120 0.07s
752 0.29008 0.43734 0.66327 0.07s
753 0.23332 0.41535 0.56175 0.07s
754 0.21490 0.41654 0.51592 0.07s
755 0.21130 0.42120 0.50166 0.07s
756 0.24442 0.41822 0.58442 0.07s
757 0.20270 0.41924 0.48350 0.07s
758 0.25551 0.43359 0.58929 0.07s
759 0.21029 0.43236 0.48637 0.07s
760 0.21732 0.42683 0.50916 0.07s
761 0.23883 0.41080 0.58137 0.07s
762 0.26432 0.42175 0.62672 0.07s
763 0.19578 0.44495 0.44000 0.07s
764 0.23593 0.41759 0.56499 0.07s
765 0.20662 0.41156 0.50203 0.07s
766 0.26315 0.40629 0.64769 0.07s
767 0.22409 0.42287 0.52991 0.07s
768 0.20701 0.43443 0.47651 0.07s
769 0.30914 0.44037 0.70200 0.07s
770 0.20508 0.43819 0.46800 0.07s
771 0.20605 0.41256 0.49943 0.07s
772 0.21868 0.41493 0.52702 0.07s
773 0.21349 0.41984 0.50849 0.07s
774 0.20149 0.42698 0.47190 0.07s
775 0.24926 0.43004 0.57961 0.07s
776 0.26595 0.45492 0.58461 0.07s
777 0.22627 0.44687 0.50633 0.07s
778 0.21571 0.45411 0.47503 0.07s
779 0.22396 0.44351 0.50497 0.07s
780 0.25525 0.44300 0.57619 0.07s
781 0.21442 0.43331 0.49483 0.07s
782 0.19859 0.41981 0.47306 0.07s
783 0.20878 0.42768 0.48817 0.07s
784 0.19991 0.43459 0.46000 0.07s
785 0.15625 0.40467 0.38612 0.07s
786 0.23745 0.40709 0.58327 0.07s
787 0.19458 0.40245 0.48349 0.07s
788 0.20836 0.44701 0.46613 0.07s
789 0.21330 0.41614 0.51256 0.07s
790 0.18997 0.42792 0.44393 0.07s
791 0.26484 0.42396 0.62466 0.07s
792 0.23165 0.40914 0.56619 0.07s
793 0.20745 0.41873 0.49542 0.07s
794 0.22044 0.42177 0.52266 0.07s
795 0.21342 0.41146 0.51869 0.07s
796 0.25210 0.44385 0.56800 0.07s
797 0.28360 0.45555 0.62253 0.07s
798 0.21576 0.43810 0.49248 0.07s
799 0.27915 0.43144 0.64702 0.07s
800 0.25284 0.42080 0.60086 0.07s
801 0.22291 0.45405 0.49095 0.07s
802 0.23786 0.50280 0.47307 0.07s
803 0.23237 0.43454 0.53476 0.07s
804 0.19984 0.42790 0.46701 0.07s
805 0.23433 0.42064 0.55707 0.07s
806 0.27557 0.43430 0.63451 0.07s
807 0.19160 0.43666 0.43878 0.07s
808 0.20892 0.44405 0.47048 0.07s
809 0.20164 0.44346 0.45468 0.07s
810 0.17076 0.42539 0.40142 0.07s
811 0.18652 0.42108 0.44295 0.07s
812 0.22667 0.42156 0.53768 0.07s
813 0.19022 0.41265 0.46097 0.07s
814 0.22239 0.41478 0.53617 0.07s
815 0.23285 0.42850 0.54341 0.07s
816 0.25040 0.43944 0.56982 0.07s
817 0.18970 0.42595 0.44537 0.07s
818 0.21494 0.41352 0.51977 0.07s
819 0.28834 0.40612 0.70999 0.07s
820 0.21877 0.44153 0.49547 0.07s
821 0.24200 0.44020 0.54975 0.07s
822 0.20524 0.43434 0.47254 0.07s
823 0.19871 0.42421 0.46842 0.07s
824 0.18513 0.40789 0.45388 0.07s
825 0.20147 0.41231 0.48863 0.07s
826 0.16611 0.42901 0.38720 0.07s
827 0.22465 0.42422 0.52956 0.07s
828 0.21270 0.42648 0.49873 0.07s
829 0.21482 0.43020 0.49936 0.07s
830 0.20974 0.41857 0.50108 0.07s
831 0.20964 0.41564 0.50437 0.07s
832 0.24358 0.40750 0.59773 0.07s
833 0.24498 0.40732 0.60144 0.07s
834 0.28161 0.41438 0.67960 0.07s
835 0.23244 0.41639 0.55822 0.07s
836 0.20242 0.42378 0.47767 0.07s
837 0.21067 0.41855 0.50334 0.07s
838 0.25045 0.42210 0.59333 0.07s
839 0.24850 0.40154 0.61887 0.07s
840 0.30553 0.39464 0.77420 0.07s
841 0.27301 0.43236 0.63143 0.07s
842 0.27241 0.47328 0.57558 0.07s
843 0.22265 0.43858 0.50765 0.07s
844 0.27330 0.42994 0.63567 0.07s
845 0.21589 0.43570 0.49550 0.07s
846 0.28200 0.43534 0.64776 0.07s
847 0.20628 0.45321 0.45515 0.07s
848 0.23778 0.42470 0.55989 0.07s
849 0.28940 0.42618 0.67906 0.07s
850 0.24154 0.44415 0.54383 0.07s
851 0.21206 0.45024 0.47099 0.07s
852 0.24634 0.43161 0.57075 0.07s
853 0.19946 0.42489 0.46945 0.07s
854 0.25602 0.41657 0.61460 0.07s
855 0.22074 0.43644 0.50578 0.07s
856 0.24786 0.44629 0.55539 0.07s
857 0.22446 0.43286 0.51854 0.07s
858 0.23771 0.44568 0.53336 0.07s
859 0.28601 0.43113 0.66339 0.07s
860 0.27818 0.42883 0.64870 0.07s
861 0.20956 0.43457 0.48222 0.07s
862 0.21401 0.42141 0.50785 0.07s
863 0.22582 0.43853 0.51493 0.07s
864 0.24930 0.43685 0.57068 0.07s
865 0.22640 0.41352 0.54749 0.07s
866 0.29203 0.46217 0.63188 0.07s
867 0.25690 0.42236 0.60824 0.07s
868 0.23565 0.41091 0.57347 0.07s
869 0.24921 0.42158 0.59112 0.07s
870 0.21044 0.41256 0.51008 0.07s
871 0.22206 0.46395 0.47863 0.07s
872 0.22260 0.45345 0.49092 0.07s
873 0.27492 0.42999 0.63936 0.07s
874 0.30409 0.43854 0.69342 0.07s
875 0.21402 0.40910 0.52316 0.07s
876 0.24706 0.41082 0.60139 0.07s
877 0.25128 0.40164 0.62563 0.07s
878 0.39526 0.48841 0.80928 0.07s
879 0.33254 0.48919 0.67979 0.07s
880 0.24714 0.41348 0.59770 0.07s
881 0.31971 0.42122 0.75903 0.07s
882 0.24029 0.42788 0.56159 0.07s
883 0.21190 0.41783 0.50715 0.07s
884 0.24645 0.41220 0.59790 0.07s
885 0.28555 0.40731 0.70107 0.07s
886 0.24087 0.42092 0.57225 0.07s
887 0.23765 0.39640 0.59951 0.07s
888 0.31768 0.42345 0.75021 0.07s
889 0.23866 0.42189 0.56569 0.07s
890 0.24288 0.42358 0.57339 0.07s
891 0.27866 0.44541 0.62564 0.07s
892 0.28957 0.43188 0.67049 0.07s
893 0.25909 0.43954 0.58945 0.07s
894 0.26633 0.43796 0.60812 0.07s
895 0.30069 0.42190 0.71270 0.07s
896 0.25784 0.43238 0.59633 0.07s
897 0.23803 0.40550 0.58700 0.07s
898 0.22895 0.39855 0.57447 0.07s
899 0.30530 0.40128 0.76082 0.07s
900 0.23539 0.43602 0.53985 0.07s
901 0.21245 0.42880 0.49544 0.07s
902 0.24065 0.41897 0.57437 0.07s
903 0.25600 0.43677 0.58612 0.07s
904 0.28678 0.42198 0.67961 0.07s
905 0.33774 0.42626 0.79233 0.07s
906 0.21727 0.42249 0.51424 0.07s
907 0.32562 0.40891 0.79631 0.07s
908 0.22855 0.40161 0.56909 0.07s
909 0.21988 0.39810 0.55233 0.07s
910 0.30064 0.39718 0.75692 0.07s
911 0.23910 0.42843 0.55807 0.07s
912 0.23784 0.42223 0.56329 0.07s
913 0.30270 0.46103 0.65657 0.07s
914 0.26241 0.42238 0.62126 0.07s
915 0.31278 0.43166 0.72460 0.07s
916 0.25197 0.43218 0.58303 0.07s
917 0.23779 0.43228 0.55008 0.07s
918 0.25139 0.42590 0.59027 0.07s
919 0.31869 0.42922 0.74247 0.07s
920 0.30072 0.43867 0.68552 0.07s
921 0.23378 0.44018 0.53110 0.07s
922 0.23520 0.43823 0.53670 0.07s
923 0.34008 0.42165 0.80654 0.07s
924 0.24282 0.41002 0.59223 0.07s
925 0.27036 0.41547 0.65073 0.07s
926 0.33316 0.45881 0.72614 0.07s
927 0.23982 0.44478 0.53918 0.07s
928 0.26511 0.41130 0.64456 0.07s
929 0.22791 0.44909 0.50749 0.07s
930 0.21767 0.44222 0.49223 0.07s
931 0.26695 0.44260 0.60314 0.07s
932 0.24726 0.43384 0.56992 0.07s
933 0.29326 0.42709 0.68665 0.07s
934 0.23513 0.42035 0.55937 0.07s
935 0.24490 0.43014 0.56935 0.07s
936 0.25245 0.42003 0.60103 0.07s
937 0.17441 0.41959 0.41567 0.07s
938 0.26269 0.40063 0.65570 0.07s
939 0.26174 0.40341 0.64881 0.07s
940 0.26920 0.42401 0.63489 0.07s
941 0.23298 0.42410 0.54936 0.07s
942 0.18038 0.41777 0.43178 0.07s
943 0.25774 0.40724 0.63289 0.07s
944 0.23770 0.40814 0.58241 0.07s
945 0.25186 0.42651 0.59050 0.07s
946 0.22618 0.42848 0.52787 0.07s
947 0.26443 0.43803 0.60368 0.07s
948 0.27460 0.41995 0.65388 0.07s
949 0.29764 0.46933 0.63418 0.07s
950 0.25405 0.42560 0.59692 0.07s
951 0.23133 0.46761 0.49470 0.07s
952 0.24529 0.47069 0.52112 0.07s
953 0.31172 0.41477 0.75155 0.07s
954 0.26669 0.41688 0.63973 0.07s
955 0.28703 0.42916 0.66881 0.07s
956 0.27013 0.43636 0.61905 0.07s
957 0.22571 0.42610 0.52970 0.07s
958 0.29437 0.41542 0.70861 0.07s
959 0.20655 0.41006 0.50371 0.07s
960 0.31027 0.41469 0.74818 0.07s
961 0.25805 0.42560 0.60632 0.07s
962 0.19878 0.43512 0.45683 0.07s
963 0.26587 0.44651 0.59543 0.07s
964 0.24604 0.44828 0.54886 0.07s
965 0.35070 0.45990 0.76256 0.07s
966 0.26098 0.42964 0.60745 0.07s
967 0.23525 0.41393 0.56834 0.07s
968 0.23913 0.42422 0.56369 0.07s
969 0.34878 0.43903 0.79444 0.07s
970 0.24191 0.42863 0.56439 0.07s
971 0.22690 0.44083 0.51472 0.07s
972 0.26918 0.42489 0.63352 0.07s
973 0.23091 0.42296 0.54593 0.07s
974 0.30677 0.45031 0.68125 0.07s
975 0.21073 0.45398 0.46418 0.07s
976 0.21571 0.42343 0.50943 0.07s
977 0.22910 0.41601 0.55071 0.07s
978 0.28315 0.40972 0.69108 0.07s
979 0.30119 0.42295 0.71212 0.07s
980 0.21274 0.42725 0.49792 0.07s
981 0.34022 0.43183 0.78786 0.07s
982 0.21856 0.42180 0.51817 0.07s
983 0.28581 0.41808 0.68362 0.07s
984 0.21923 0.41365 0.53000 0.07s
985 0.25499 0.42969 0.59342 0.07s
986 0.24146 0.42374 0.56982 0.07s
987 0.23951 0.42559 0.56277 0.07s
988 0.19750 0.44588 0.44295 0.07s
989 0.28875 0.45697 0.63187 0.07s
990 0.27422 0.45558 0.60191 0.07s
991 0.29414 0.44186 0.66567 0.07s
992 0.32893 0.47505 0.69242 0.07s
993 0.29469 0.44956 0.65551 0.07s
994 0.26665 0.45267 0.58907 0.07s
995 0.32114 0.43043 0.74608 0.07s
996 0.23907 0.42860 0.55780 0.07s
997 0.28725 0.43183 0.66518 0.07s
998 0.27260 0.42898 0.63546 0.07s
999 0.23333 0.42239 0.55240 0.07s
1000 0.21655 0.41123 0.52660 0.07s
Out[13]:
NeuralNet(X_tensor_type=None,
batch_iterator_test=<nolearn.lasagne.base.BatchIterator object at 0x7f0704a124a8>,
batch_iterator_train=<nolearn.lasagne.base.BatchIterator object at 0x7f0704a12ac8>,
custom_score=None, dropout1_p=0.4,
hidden1_nonlinearity=<function tanh at 0x7f06bbcc3510>,
hidden1_num_units=1980, input_shape=(None, 1980),
layers=[('input', <class 'lasagne.layers.input.InputLayer'>), ('hidden1', <class 'lasagne.layers.dense.DenseLayer'>), ('dropout1', <class 'lasagne.layers.noise.DropoutLayer'>), ('nonlinear', <class 'lasagne.layers.dense.NonlinearityLayer'>), ('output', <class 'lasagne.layers.dense.DenseLayer'>)],
loss=None, max_epochs=1000, more_params={},
objective=<function objective at 0x7f06bb7509d8>,
objective_loss_function=<function squared_error at 0x7f06bbce6d08>,
on_epoch_finished=[<nolearn.lasagne.handlers.PrintLog object at 0x7f06b818bfd0>],
on_training_finished=[],
on_training_started=[<nolearn.lasagne.handlers.PrintLayerInfo object at 0x7f06b818bb00>],
output_nonlinearity=None, output_num_units=1, regression=True,
train_split=<nolearn.lasagne.base.TrainSplit object at 0x7f0704a12240>,
update=<function nesterov_momentum at 0x7f06bbcf0598>,
update_learning_rate=0.01, update_momentum=0.95,
use_label_encoder=False, verbose=1,
y_tensor_type=TensorType(float32, matrix))
In [14]:
# And now let's also look at whether it looks good or not.
nn1_preds = net1.predict(x_test)
nn1_mse = float(mean_squared_error(nn1_preds, y_test))
nn1_r2 = float(sps.pearsonr(nn1_preds, y_test.reshape(y_test.shape[0],1))[0][0])
cf.scatterplot_results(nn1_preds, y_test, nn1_mse, nn1_r2, DRUG, 'Neural Net', figsize=std)
plt.savefig('figures/{0} neural_net_poster.pdf'.format(DRUG), bbox_inches='tight')
/home/ericmjl/anaconda3/lib/python3.4/site-packages/matplotlib/collections.py:590: FutureWarning: elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
if self._edgecolors == str('face'):
In [15]:
# Save the models to disk
joblib.dump(rfr, 'models/{0} random_forest.pkl'.format(DRUG))
joblib.dump(abr, 'models/{0} adaboost.pkl'.format(DRUG))
joblib.dump(etr, 'models/{0} extratrees.pkl'.format(DRUG))
joblib.dump(gbr, 'models/{0} gradient_boost.pkl'.format(DRUG))
joblib.dump(bgr, 'models/{0} bagging.pkl'.format(DRUG))
joblib.dump(net1, 'models/{0} neural_network.pkl'.format(DRUG))
Out[15]:
['models/FPV neural_network.pkl',
'models/FPV neural_network.pkl_01.npy',
'models/FPV neural_network.pkl_02.npy',
'models/FPV neural_network.pkl_03.npy',
'models/FPV neural_network.pkl_04.npy',
'models/FPV neural_network.pkl_05.npy',
'models/FPV neural_network.pkl_06.npy',
'models/FPV neural_network.pkl_07.npy']
In [ ]:
In [ ]:
Content source: ericmjl/hiv-resistance-prediction
Similar notebooks: