As an alternative to spearmint and Gaussian Process Models, try building a tree classifier from the evaluated models and then optimize parameters to that model
In [1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import makemodel
import MySQLdb
from MySQLdb.cursors import DictCursor
%matplotlib inline
In [2]:
def getcursor(host,passwd,db):
'''create a connection and return a cursor;
doing this guards against dropped connections'''
conn = MySQLdb.connect (host = host,user = "opter",passwd=passwd,db=db)
conn.autocommit(True)
cursor = conn.cursor(DictCursor)
return cursor
In [3]:
cursor = getcursor('35.196.158.205','optimize','opt2')
cursor.execute('SELECT * FROM params WHERE id != "REQUESTED"')
rows = cursor.fetchall()
data = pd.DataFrame(list(rows))
#make errors zero - appropriate if error is due to parameters
data.loc[data.id == 'ERROR','R'] = 0
data.loc[data.id == 'ERROR','rmse'] = 0
data.loc[data.id == 'ERROR','top'] = 0
data.loc[data.id == 'ERROR','auc'] = 0
data['Rtop'] = data.R*data.top
data = data.dropna('index').apply(pd.to_numeric, errors='ignore')
In [4]:
data.shape
Out[4]:
(3665, 77)
In [5]:
data.columns
Out[5]:
Index([u'R', u'auc', u'balanced', u'base_lr_exp', u'conv1_func', u'conv1_init',
u'conv1_norm', u'conv1_size', u'conv1_stride', u'conv1_width',
u'conv2_func', u'conv2_init', u'conv2_norm', u'conv2_size',
u'conv2_stride', u'conv2_width', u'conv3_func', u'conv3_init',
u'conv3_norm', u'conv3_size', u'conv3_stride', u'conv3_width',
u'conv4_func', u'conv4_init', u'conv4_norm', u'conv4_size',
u'conv4_stride', u'conv4_width', u'conv5_func', u'conv5_init',
u'conv5_norm', u'conv5_size', u'conv5_stride', u'conv5_width',
u'fc_affinity_func', u'fc_affinity_func2', u'fc_affinity_hidden',
u'fc_affinity_hidden2', u'fc_affinity_init', u'fc_pose_func',
u'fc_pose_func2', u'fc_pose_hidden', u'fc_pose_hidden2',
u'fc_pose_init', u'id', u'jitter', u'loss_delta', u'loss_gap',
u'loss_penalty', u'loss_pseudohuber', u'momentum', u'msg',
u'pool1_size', u'pool1_type', u'pool2_size', u'pool2_type',
u'pool3_size', u'pool3_type', u'pool4_size', u'pool4_type',
u'pool5_size', u'pool5_type', u'ranklossmult', u'ranklossneg',
u'resolution', u'rmse', u'seed', u'serial', u'solver', u'split',
u'stratify_affinity', u'stratify_affinity_step', u'stratify_receptor',
u'time', u'top', u'weight_decay_exp', u'Rtop'],
dtype='object')
In [6]:
import sklearn
from sklearn.ensemble import *
from sklearn.preprocessing import *
from sklearn.feature_extraction import *
In [7]:
notparams = ['R','auc','Rtop','id','msg','rmse','seed','serial','time','top']
X = data.drop(notparams,axis=1)
y = data.Rtop
In [8]:
dictvec = DictVectorizer()
In [9]:
Xv = dictvec.fit_transform(X.to_dict(orient='records'))
In [10]:
dictvec.feature_names_
Out[10]:
['balanced',
'base_lr_exp',
'conv1_func=ELU',
'conv1_func=ReLU',
'conv1_func=Sigmoid',
'conv1_func=TanH',
'conv1_func=leaky',
'conv1_init=gaussian',
'conv1_init=msra',
'conv1_init=positive_unitball',
'conv1_init=radial',
'conv1_init=radial.5',
'conv1_init=uniform',
'conv1_init=xavier',
'conv1_norm=BatchNorm',
'conv1_norm=LRN',
'conv1_norm=none',
'conv1_size',
'conv1_stride',
'conv1_width',
'conv2_func=ELU',
'conv2_func=ReLU',
'conv2_func=Sigmoid',
'conv2_func=TanH',
'conv2_func=leaky',
'conv2_init=gaussian',
'conv2_init=msra',
'conv2_init=positive_unitball',
'conv2_init=radial',
'conv2_init=radial.5',
'conv2_init=uniform',
'conv2_init=xavier',
'conv2_norm=BatchNorm',
'conv2_norm=LRN',
'conv2_norm=none',
'conv2_size',
'conv2_stride',
'conv2_width',
'conv3_func=ELU',
'conv3_func=ReLU',
'conv3_func=Sigmoid',
'conv3_func=TanH',
'conv3_func=leaky',
'conv3_init=gaussian',
'conv3_init=msra',
'conv3_init=positive_unitball',
'conv3_init=radial',
'conv3_init=radial.5',
'conv3_init=uniform',
'conv3_init=xavier',
'conv3_norm=BatchNorm',
'conv3_norm=LRN',
'conv3_norm=none',
'conv3_size',
'conv3_stride',
'conv3_width',
'conv4_func=ELU',
'conv4_func=ReLU',
'conv4_func=Sigmoid',
'conv4_func=TanH',
'conv4_func=leaky',
'conv4_init=gaussian',
'conv4_init=msra',
'conv4_init=positive_unitball',
'conv4_init=radial',
'conv4_init=radial.5',
'conv4_init=uniform',
'conv4_init=xavier',
'conv4_norm=BatchNorm',
'conv4_norm=LRN',
'conv4_norm=none',
'conv4_size',
'conv4_stride',
'conv4_width',
'conv5_func=ELU',
'conv5_func=ReLU',
'conv5_func=Sigmoid',
'conv5_func=TanH',
'conv5_func=leaky',
'conv5_init=gaussian',
'conv5_init=msra',
'conv5_init=positive_unitball',
'conv5_init=radial',
'conv5_init=radial.5',
'conv5_init=uniform',
'conv5_init=xavier',
'conv5_norm=BatchNorm',
'conv5_norm=LRN',
'conv5_norm=none',
'conv5_size',
'conv5_stride',
'conv5_width',
'fc_affinity_func2=ELU',
'fc_affinity_func2=ReLU',
'fc_affinity_func2=Sigmoid',
'fc_affinity_func2=TanH',
'fc_affinity_func2=leaky',
'fc_affinity_func=ELU',
'fc_affinity_func=ReLU',
'fc_affinity_func=Sigmoid',
'fc_affinity_func=TanH',
'fc_affinity_func=leaky',
'fc_affinity_hidden',
'fc_affinity_hidden2',
'fc_affinity_init=gaussian',
'fc_affinity_init=msra',
'fc_affinity_init=positive_unitball',
'fc_affinity_init=uniform',
'fc_affinity_init=xavier',
'fc_pose_func2=ELU',
'fc_pose_func2=ReLU',
'fc_pose_func2=Sigmoid',
'fc_pose_func2=TanH',
'fc_pose_func2=leaky',
'fc_pose_func=ELU',
'fc_pose_func=ReLU',
'fc_pose_func=Sigmoid',
'fc_pose_func=TanH',
'fc_pose_func=leaky',
'fc_pose_hidden',
'fc_pose_hidden2',
'fc_pose_init=gaussian',
'fc_pose_init=msra',
'fc_pose_init=positive_unitball',
'fc_pose_init=uniform',
'fc_pose_init=xavier',
'jitter',
'loss_delta',
'loss_gap',
'loss_penalty',
'loss_pseudohuber',
'momentum',
'pool1_size',
'pool1_type=AVE',
'pool1_type=MAX',
'pool2_size',
'pool2_type=AVE',
'pool2_type=MAX',
'pool3_size',
'pool3_type=AVE',
'pool3_type=MAX',
'pool4_size',
'pool4_type=AVE',
'pool4_type=MAX',
'pool5_size',
'pool5_type=AVE',
'pool5_type=MAX',
'ranklossmult',
'ranklossneg',
'resolution',
'solver=Adam',
'solver=SGD',
'split',
'stratify_affinity',
'stratify_affinity_step',
'stratify_receptor',
'weight_decay_exp']
In [ ]:
In [ ]:
In [11]:
rf = RandomForestRegressor(n_estimators=100)
In [12]:
rf.fit(Xv,y)
Out[12]:
RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=None,
max_features='auto', max_leaf_nodes=None,
min_impurity_decrease=0.0, min_impurity_split=None,
min_samples_leaf=1, min_samples_split=2,
min_weight_fraction_leaf=0.0, n_estimators=100, n_jobs=1,
oob_score=False, random_state=None, verbose=0, warm_start=False)
In [13]:
yfit = rf.predict(Xv)
In [14]:
sns.jointplot(x=y,y=yfit,alpha=.1)
Out[14]:
<seaborn.axisgrid.JointGrid at 0x7f7becd67c10>
In [15]:
from sklearn import datasets, linear_model
from sklearn.model_selection import *
from sklearn.metrics.scorer import make_scorer
In [16]:
cross_validate(rf, Xv, y,scoring='r2')
/usr/local/lib/python2.7/dist-packages/sklearn/utils/deprecation.py:122: FutureWarning: You are accessing a training score ('train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True
warnings.warn(*warn_args, **warn_kwargs)
Out[16]:
{'fit_time': array([12.14322901, 4.43517303, 13.35963607]),
'score_time': array([0.02453089, 0.02836895, 0.02632689]),
'test_score': array([0.65776201, 0.18114012, 0.75835595]),
'train_score': array([0.98856802, 0.99163541, 0.99502525])}
In [17]:
cvpred = cross_val_predict(rf, Xv, y, cv=3)
In [18]:
sns.jointplot(x=y,y=cvpred,alpha=.1)
Out[18]:
<seaborn.axisgrid.JointGrid at 0x7f7bf0066e90>
In [19]:
from sklearn.model_selection import RandomizedSearchCV
# Number of trees in random forest
n_estimators = [10,20,50,100,200,500]
# Number of features to consider at every split
max_features = ['auto', 'sqrt']
# Maximum number of levels in tree
max_depth = [int(x) for x in np.linspace(10, 50, num = 5)]
max_depth.append(None)
# Minimum number of samples required to split a node
min_samples_split = [2, 5, 10]
# Minimum number of samples required at each leaf node
min_samples_leaf = [1, 2, 4]
# Method of selecting samples for training each tree
bootstrap = [True, False]
# Create the random grid
random_grid = {'n_estimators': n_estimators,
'max_features': max_features,
'max_depth': max_depth,
'min_samples_split': min_samples_split,
'min_samples_leaf': min_samples_leaf,
'bootstrap': bootstrap}
In [20]:
rf
Out[20]:
RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=None,
max_features='auto', max_leaf_nodes=None,
min_impurity_decrease=0.0, min_impurity_split=None,
min_samples_leaf=1, min_samples_split=2,
min_weight_fraction_leaf=0.0, n_estimators=100, n_jobs=1,
oob_score=False, random_state=None, verbose=0, warm_start=False)
In [21]:
grid = {'n_estimators': n_estimators,
'max_depth': max_depth}
In [22]:
grid
Out[22]:
{'max_depth': [10, 20, 30, 40, 50, None],
'n_estimators': [10, 20, 50, 100, 200, 500]}
In [23]:
from sklearn.model_selection import GridSearchCV
rf_grid = GridSearchCV(estimator = rf, param_grid=grid, scoring='r2', cv = 3, verbose=2, n_jobs = -1)
In [24]:
rf_grid.fit(Xv,y)
Fitting 3 folds for each of 36 candidates, totalling 108 fits
[CV] n_estimators=10, max_depth=10 ...................................
[CV] n_estimators=10, max_depth=10 ...................................
[CV] n_estimators=10, max_depth=10 ...................................
[CV] n_estimators=20, max_depth=10 ...................................
[CV] n_estimators=20, max_depth=10 ...................................
[CV] n_estimators=20, max_depth=10 ...................................
[CV] n_estimators=50, max_depth=10 ...................................
[CV] n_estimators=50, max_depth=10 ...................................
[CV] n_estimators=50, max_depth=10 ...................................
[CV] n_estimators=100, max_depth=10 ..................................
[CV] n_estimators=100, max_depth=10 ..................................
[CV] n_estimators=100, max_depth=10 ..................................
[CV] n_estimators=200, max_depth=10 ..................................
[CV] n_estimators=200, max_depth=10 ..................................
[CV] n_estimators=200, max_depth=10 ..................................
[CV] n_estimators=500, max_depth=10 ..................................
[CV] n_estimators=500, max_depth=10 ..................................
[CV] n_estimators=500, max_depth=10 ..................................
[CV] .................... n_estimators=10, max_depth=10, total= 0.2s
[CV] n_estimators=10, max_depth=20 ...................................
[CV] n_estimators=10, max_depth=20 ...................................
[CV] n_estimators=10, max_depth=20 ...................................
[CV] n_estimators=20, max_depth=20 ...................................
[CV] n_estimators=20, max_depth=20 ...................................
[CV] .................... n_estimators=10, max_depth=10, total= 0.2s
[CV] n_estimators=20, max_depth=20 ...................................
[CV] n_estimators=50, max_depth=20 ...................................
[CV] n_estimators=50, max_depth=20 ...................................
[CV] .................... n_estimators=10, max_depth=10, total= 0.3s
[CV] n_estimators=50, max_depth=20 ...................................
[CV] .................... n_estimators=20, max_depth=10, total= 0.3s
[CV] n_estimators=100, max_depth=20 ..................................
[CV] .................... n_estimators=20, max_depth=10, total= 0.4s
[CV] n_estimators=100, max_depth=20 ..................................
[CV] .................... n_estimators=10, max_depth=20, total= 0.4s
[CV] .................... n_estimators=10, max_depth=20, total= 0.4s
[CV] n_estimators=200, max_depth=20 ..................................
[CV] n_estimators=100, max_depth=20 ..................................
[CV] .................... n_estimators=10, max_depth=20, total= 0.4s
[CV] n_estimators=200, max_depth=20 ..................................
[CV] .................... n_estimators=20, max_depth=10, total= 0.6s
[CV] n_estimators=200, max_depth=20 ..................................
[CV] .................... n_estimators=20, max_depth=20, total= 0.6s
[CV] n_estimators=500, max_depth=20 ..................................
[CV] .................... n_estimators=20, max_depth=20, total= 0.7s
[CV] .................... n_estimators=50, max_depth=10, total= 0.8s
[CV] n_estimators=500, max_depth=20 ..................................
[CV] n_estimators=500, max_depth=20 ..................................
[CV] .................... n_estimators=50, max_depth=10, total= 1.0s
[CV] n_estimators=10, max_depth=30 ...................................
[CV] .................... n_estimators=20, max_depth=20, total= 0.9s
[CV] n_estimators=10, max_depth=30 ...................................
[CV] .................... n_estimators=50, max_depth=10, total= 1.4s
[CV] n_estimators=10, max_depth=30 ...................................
[CV] .................... n_estimators=10, max_depth=30, total= 0.5s
[CV] n_estimators=20, max_depth=30 ...................................
[CV] .................... n_estimators=10, max_depth=30, total= 0.6s
[CV] n_estimators=20, max_depth=30 ...................................
[CV] ................... n_estimators=100, max_depth=10, total= 1.7s
[CV] n_estimators=20, max_depth=30 ...................................
[CV] .................... n_estimators=50, max_depth=20, total= 1.7s
[CV] ................... n_estimators=100, max_depth=10, total= 1.9s
[CV] n_estimators=50, max_depth=30 ...................................
[CV] n_estimators=50, max_depth=30 ...................................
[CV] .................... n_estimators=10, max_depth=30, total= 0.5s
[CV] n_estimators=50, max_depth=30 ...................................
[CV] .................... n_estimators=50, max_depth=20, total= 1.7s
[CV] n_estimators=100, max_depth=30 ..................................
[CV] .................... n_estimators=50, max_depth=20, total= 2.2s
[CV] n_estimators=100, max_depth=30 ..................................
[CV] .................... n_estimators=20, max_depth=30, total= 1.0s
[CV] n_estimators=100, max_depth=30 ..................................
[CV] .................... n_estimators=20, max_depth=30, total= 1.1s
[CV] n_estimators=200, max_depth=30 ..................................
[CV] ................... n_estimators=100, max_depth=10, total= 2.9s
[CV] n_estimators=200, max_depth=30 ..................................
[CV] .................... n_estimators=20, max_depth=30, total= 1.2s
[CV] n_estimators=200, max_depth=30 ..................................
[CV] ................... n_estimators=100, max_depth=20, total= 3.2s
[CV] n_estimators=500, max_depth=30 ..................................
[CV] ................... n_estimators=200, max_depth=10, total= 3.6s
[CV] n_estimators=500, max_depth=30 ..................................
[CV] ................... n_estimators=100, max_depth=20, total= 3.3s
[CV] n_estimators=500, max_depth=30 ..................................
[CV] ................... n_estimators=200, max_depth=10, total= 3.8s
[CV] n_estimators=10, max_depth=40 ...................................
[CV] .................... n_estimators=50, max_depth=30, total= 2.7s
[CV] n_estimators=10, max_depth=40 ...................................
[CV] .................... n_estimators=50, max_depth=30, total= 2.6s
[CV] n_estimators=10, max_depth=40 ...................................
[CV] ................... n_estimators=100, max_depth=20, total= 4.3s
[CV] n_estimators=20, max_depth=40 ...................................
[CV] .................... n_estimators=10, max_depth=40, total= 0.7s
[CV] n_estimators=20, max_depth=40 ...................................
[CV] .................... n_estimators=50, max_depth=30, total= 2.8s
[CV] n_estimators=20, max_depth=40 ...................................
[CV] .................... n_estimators=10, max_depth=40, total= 0.6s
[CV] n_estimators=50, max_depth=40 ...................................
[CV] .................... n_estimators=10, max_depth=40, total= 0.8s
[CV] n_estimators=50, max_depth=40 ...................................
[CV] ................... n_estimators=200, max_depth=10, total= 5.6s
[CV] n_estimators=50, max_depth=40 ...................................
[CV] .................... n_estimators=20, max_depth=40, total= 1.3s
[CV] n_estimators=100, max_depth=40 ..................................
[CV] .................... n_estimators=20, max_depth=40, total= 1.5s
[CV] n_estimators=100, max_depth=40 ..................................
[CV] .................... n_estimators=20, max_depth=40, total= 1.5s
[CV] n_estimators=100, max_depth=40 ..................................
[CV] ................... n_estimators=200, max_depth=20, total= 6.6s
[CV] n_estimators=200, max_depth=40 ..................................
[CV] ................... n_estimators=200, max_depth=20, total= 6.7s
[CV] n_estimators=200, max_depth=40 ..................................
[CV] ................... n_estimators=100, max_depth=30, total= 5.3s
[CV] n_estimators=200, max_depth=40 ..................................
[CV] ................... n_estimators=100, max_depth=30, total= 5.2s
[CV] n_estimators=500, max_depth=40 ..................................
[CV] ................... n_estimators=100, max_depth=30, total= 5.7s
[CV] n_estimators=500, max_depth=40 ..................................
[CV] .................... n_estimators=50, max_depth=40, total= 3.2s
[CV] n_estimators=500, max_depth=40 ..................................
[CV] .................... n_estimators=50, max_depth=40, total= 3.5s
[CV] n_estimators=10, max_depth=50 ...................................
[CV] ................... n_estimators=500, max_depth=10, total= 8.8s
[CV] n_estimators=10, max_depth=50 ...................................
[CV] ................... n_estimators=200, max_depth=20, total= 8.7s
[CV] n_estimators=10, max_depth=50 ...................................
[CV] .................... n_estimators=50, max_depth=40, total= 3.7s
[CV] n_estimators=20, max_depth=50 ...................................
[CV] ................... n_estimators=500, max_depth=10, total= 9.3s
[CV] n_estimators=20, max_depth=50 ...................................
[CV] .................... n_estimators=10, max_depth=50, total= 0.6s
[CV] n_estimators=20, max_depth=50 ...................................
[CV] .................... n_estimators=10, max_depth=50, total= 0.9s
[CV] n_estimators=50, max_depth=50 ...................................
[CV] .................... n_estimators=10, max_depth=50, total= 1.0s
[CV] n_estimators=50, max_depth=50 ...................................
[CV] .................... n_estimators=20, max_depth=50, total= 1.3s
[CV] n_estimators=50, max_depth=50 ...................................
[CV] .................... n_estimators=20, max_depth=50, total= 1.9s
[CV] n_estimators=100, max_depth=50 ..................................
[CV] .................... n_estimators=20, max_depth=50, total= 2.0s
[CV] n_estimators=100, max_depth=50 ..................................
[CV] ................... n_estimators=100, max_depth=40, total= 6.2s
[CV] n_estimators=100, max_depth=50 ..................................
[CV] ................... n_estimators=200, max_depth=30, total= 10.1s
[CV] n_estimators=200, max_depth=50 ..................................
[CV] ................... n_estimators=100, max_depth=40, total= 7.1s
[CV] n_estimators=200, max_depth=50 ..................................
[CV] ................... n_estimators=100, max_depth=40, total= 7.1s
[CV] n_estimators=200, max_depth=50 ..................................
[CV] .................... n_estimators=50, max_depth=50, total= 3.1s
[CV] n_estimators=500, max_depth=50 ..................................
[CV] ................... n_estimators=200, max_depth=30, total= 10.7s
[CV] n_estimators=500, max_depth=50 ..................................
[CV] ................... n_estimators=200, max_depth=30, total= 11.0s
[CV] n_estimators=500, max_depth=50 ..................................
[CV] .................... n_estimators=50, max_depth=50, total= 4.7s
[CV] n_estimators=10, max_depth=None .................................
[CV] ................... n_estimators=500, max_depth=10, total= 14.3s
[CV] n_estimators=10, max_depth=None .................................
[CV] .................. n_estimators=10, max_depth=None, total= 0.7s
[CV] n_estimators=10, max_depth=None .................................
[CV] .................... n_estimators=50, max_depth=50, total= 4.7s
[CV] n_estimators=20, max_depth=None .................................
[CV] .................. n_estimators=10, max_depth=None, total= 1.7s
[CV] n_estimators=20, max_depth=None .................................
[CV] ................... n_estimators=500, max_depth=20, total= 16.0s
[CV] n_estimators=20, max_depth=None .................................
[CV] ................... n_estimators=500, max_depth=20, total= 16.0s
[CV] n_estimators=50, max_depth=None .................................
[CV] .................. n_estimators=10, max_depth=None, total= 2.0s
[CV] n_estimators=50, max_depth=None .................................
[CV] .................. n_estimators=20, max_depth=None, total= 1.3s
[CV] n_estimators=50, max_depth=None .................................
[CV] ................... n_estimators=100, max_depth=50, total= 6.2s
[CV] n_estimators=100, max_depth=None ................................
[CV] .................. n_estimators=20, max_depth=None, total= 3.4s
[CV] n_estimators=100, max_depth=None ................................
[CV] ................... n_estimators=200, max_depth=40, total= 12.3s
[CV] n_estimators=100, max_depth=None ................................
[CV] .................. n_estimators=50, max_depth=None, total= 3.1s
[CV] n_estimators=200, max_depth=None ................................
[CV] ................... n_estimators=100, max_depth=50, total= 9.2s
[CV] n_estimators=200, max_depth=None ................................
[CV] .................. n_estimators=20, max_depth=None, total= 3.7s
[CV] n_estimators=200, max_depth=None ................................
[CV] ................... n_estimators=200, max_depth=40, total= 14.1s
[CV] n_estimators=500, max_depth=None ................................
[CV] ................... n_estimators=200, max_depth=40, total= 14.4s
[CV] n_estimators=500, max_depth=None ................................
[CV] ................... n_estimators=100, max_depth=50, total= 9.5s
[CV] n_estimators=500, max_depth=None ................................
[CV] ................... n_estimators=500, max_depth=20, total= 21.7s
[CV] ................. n_estimators=100, max_depth=None, total= 6.2s
[CV] .................. n_estimators=50, max_depth=None, total= 8.3s
[CV] ................... n_estimators=200, max_depth=50, total= 12.5s
[CV] .................. n_estimators=50, max_depth=None, total= 8.7s
[CV] ................... n_estimators=500, max_depth=30, total= 24.3s
[CV] ................... n_estimators=500, max_depth=30, total= 25.6s
[CV] ................... n_estimators=500, max_depth=30, total= 26.3s
[CV] ................... n_estimators=200, max_depth=50, total= 18.1s
[CV] ................. n_estimators=200, max_depth=None, total= 10.9s
[CV] ................... n_estimators=200, max_depth=50, total= 18.6s
[CV] ................. n_estimators=100, max_depth=None, total= 14.5s
[CV] ................... n_estimators=500, max_depth=40, total= 27.6s
[CV] ................. n_estimators=100, max_depth=None, total= 16.6s
[CV] ................... n_estimators=500, max_depth=40, total= 30.4s
[CV] ................... n_estimators=500, max_depth=40, total= 30.6s
[CV] ................... n_estimators=500, max_depth=50, total= 26.4s
[CV] ................. n_estimators=200, max_depth=None, total= 23.4s
[CV] ................. n_estimators=500, max_depth=None, total= 22.0s
[CV] ................... n_estimators=500, max_depth=50, total= 33.3s
[CV] ................. n_estimators=200, max_depth=None, total= 27.5s
[CV] ................... n_estimators=500, max_depth=50, total= 36.3s
[CV] ................. n_estimators=500, max_depth=None, total= 55.8s
[CV] ................. n_estimators=500, max_depth=None, total= 1.0min
[Parallel(n_jobs=-1)]: Done 108 out of 108 | elapsed: 1.4min finished
Out[24]:
GridSearchCV(cv=3, error_score='raise',
estimator=RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=None,
max_features='auto', max_leaf_nodes=None,
min_impurity_decrease=0.0, min_impurity_split=None,
min_samples_leaf=1, min_samples_split=2,
min_weight_fraction_leaf=0.0, n_estimators=100, n_jobs=1,
oob_score=False, random_state=None, verbose=0, warm_start=False),
fit_params=None, iid=True, n_jobs=-1,
param_grid={'n_estimators': [10, 20, 50, 100, 200, 500], 'max_depth': [10, 20, 30, 40, 50, None]},
pre_dispatch='2*n_jobs', refit=True, return_train_score='warn',
scoring='r2', verbose=2)
In [20]:
cvres = pd.DataFrame(rf_grid.cv_results_)
NameErrorTraceback (most recent call last)
<ipython-input-20-6008d11b39c6> in <module>()
----> 1 cvres = pd.DataFrame(rf_grid.cv_results_)
NameError: name 'rf_grid' is not defined
In [26]:
cvres
Out[26]:
mean_fit_time
mean_score_time
mean_test_score
mean_train_score
param_max_depth
param_n_estimators
params
rank_test_score
split0_test_score
split0_train_score
split1_test_score
split1_train_score
split2_test_score
split2_train_score
std_fit_time
std_score_time
std_test_score
std_train_score
0
0.196159
0.004778
0.504222
0.920180
10
10
{u'n_estimators': 10, u'max_depth': 10}
36
0.496932
0.910339
0.312819
0.969422
0.702921
0.880780
0.057948
0.000288
0.159320
0.036851
1
0.406629
0.006557
0.508101
0.922314
10
20
{u'n_estimators': 20, u'max_depth': 10}
35
0.494053
0.911507
0.330114
0.972858
0.700147
0.882577
0.101734
0.000184
0.151371
0.037641
2
1.050223
0.013812
0.515906
0.922235
10
50
{u'n_estimators': 50, u'max_depth': 10}
34
0.507327
0.912869
0.343242
0.972807
0.697158
0.881028
0.240942
0.000543
0.144593
0.038050
3
2.118504
0.027929
0.523848
0.923038
10
100
{u'n_estimators': 100, u'max_depth': 10}
31
0.504394
0.913847
0.369556
0.973952
0.697609
0.881317
0.507562
0.003025
0.134614
0.038372
4
4.293842
0.048076
0.521079
0.923230
10
200
{u'n_estimators': 200, u'max_depth': 10}
32
0.494764
0.914186
0.370403
0.973877
0.698093
0.881627
0.877385
0.001779
0.135049
0.038200
5
10.677947
0.114402
0.517631
0.923685
10
500
{u'n_estimators': 500, u'max_depth': 10}
33
0.485166
0.914376
0.369936
0.974331
0.697816
0.882349
2.505646
0.002304
0.135794
0.038124
6
0.377041
0.004568
0.588812
0.983211
20
10
{u'n_estimators': 10, u'max_depth': 20}
28
0.701887
0.979298
0.316139
0.990755
0.748314
0.979581
0.032797
0.000107
0.193698
0.005336
7
0.749846
0.007211
0.594421
0.983967
20
20
{u'n_estimators': 20, u'max_depth': 20}
22
0.694355
0.979511
0.340759
0.991821
0.748065
0.980568
0.124779
0.000363
0.180664
0.005571
8
1.868501
0.014803
0.593961
0.984198
20
50
{u'n_estimators': 50, u'max_depth': 20}
25
0.674948
0.979188
0.350431
0.992113
0.756435
0.981292
0.229115
0.001500
0.175350
0.005662
9
3.561664
0.027764
0.594146
0.984236
20
100
{u'n_estimators': 100, u'max_depth': 20}
24
0.664143
0.979335
0.360526
0.992151
0.757708
0.981222
0.492003
0.000926
0.169519
0.005649
10
7.286601
0.054029
0.592990
0.984311
20
200
{u'n_estimators': 200, u'max_depth': 20}
26
0.665030
0.979444
0.356100
0.992244
0.757780
0.981244
0.950746
0.002057
0.171699
0.005658
11
17.788310
0.128999
0.594412
0.984327
20
500
{u'n_estimators': 500, u'max_depth': 20}
23
0.670183
0.979310
0.355453
0.992214
0.757538
0.981459
2.687390
0.007569
0.172658
0.005645
12
0.537667
0.004683
0.592557
0.989115
30
10
{u'n_estimators': 10, u'max_depth': 30}
27
0.671219
0.984928
0.312347
0.992044
0.794037
0.990373
0.017018
0.000222
0.204344
0.003038
13
1.080449
0.010213
0.604206
0.990021
30
20
{u'n_estimators': 20, u'max_depth': 30}
16
0.662722
0.986100
0.357478
0.992599
0.792368
0.991363
0.062529
0.003351
0.182281
0.002818
14
2.678862
0.017358
0.605918
0.990358
30
50
{u'n_estimators': 50, u'max_depth': 30}
9
0.672022
0.986619
0.354284
0.992836
0.791393
0.991619
0.080105
0.001706
0.184450
0.002690
15
5.355978
0.031724
0.607898
0.990540
30
100
{u'n_estimators': 100, u'max_depth': 30}
6
0.670254
0.986940
0.361877
0.992998
0.791509
0.991683
0.207320
0.001936
0.180834
0.002602
16
10.531279
0.061320
0.604345
0.990511
30
200
{u'n_estimators': 200, u'max_depth': 30}
15
0.678331
0.986759
0.345013
0.992913
0.789628
0.991862
0.375923
0.005199
0.188884
0.002688
17
25.294339
0.094476
0.607230
0.990542
30
500
{u'n_estimators': 500, u'max_depth': 30}
8
0.685245
0.986884
0.347883
0.992947
0.788497
0.991795
0.831023
0.008624
0.188131
0.002629
18
0.706047
0.005000
0.600896
0.990308
40
10
{u'n_estimators': 10, u'max_depth': 40}
20
0.682154
0.986937
0.321305
0.992163
0.799160
0.991823
0.060215
0.000255
0.203350
0.002388
19
1.406332
0.008452
0.605680
0.990928
40
20
{u'n_estimators': 20, u'max_depth': 40}
11
0.651017
0.988142
0.362782
0.992855
0.803203
0.991786
0.087076
0.000585
0.182614
0.002018
20
3.452616
0.017948
0.610490
0.991485
40
50
{u'n_estimators': 50, u'max_depth': 40}
2
0.675223
0.988461
0.356620
0.993012
0.799574
0.992983
0.185097
0.001812
0.186518
0.002139
21
6.765513
0.033712
0.604429
0.991559
40
100
{u'n_estimators': 100, u'max_depth': 40}
14
0.680588
0.988387
0.334351
0.993128
0.798282
0.993164
0.458800
0.003326
0.196887
0.002244
22
13.546003
0.065330
0.610148
0.991584
40
200
{u'n_estimators': 200, u'max_depth': 40}
4
0.683344
0.988385
0.347824
0.993058
0.799215
0.993309
0.933618
0.006360
0.191391
0.002264
23
29.415834
0.103990
0.608383
0.991622
40
500
{u'n_estimators': 500, u'max_depth': 40}
5
0.682526
0.988484
0.345511
0.993080
0.797048
0.993302
1.395007
0.012302
0.191631
0.002220
24
0.835244
0.005220
0.587884
0.990778
50
10
{u'n_estimators': 10, u'max_depth': 50}
29
0.657796
0.987530
0.306970
0.992316
0.798827
0.992489
0.165660
0.000465
0.206773
0.002298
25
1.723023
0.008344
0.601306
0.991359
50
20
{u'n_estimators': 20, u'max_depth': 50}
19
0.687469
0.988423
0.320970
0.992612
0.795407
0.993041
0.327194
0.001221
0.203026
0.002083
26
4.147864
0.019233
0.603752
0.991600
50
50
{u'n_estimators': 50, u'max_depth': 50}
17
0.679080
0.988570
0.338605
0.992829
0.793508
0.993403
0.725678
0.002941
0.193182
0.002156
27
8.255382
0.035238
0.605040
0.992129
50
100
{u'n_estimators': 100, u'max_depth': 50}
13
0.674622
0.989334
0.343886
0.992953
0.796553
0.994101
1.469251
0.004362
0.191219
0.002031
28
16.337772
0.068242
0.607747
0.992147
50
200
{u'n_estimators': 200, u'max_depth': 50}
7
0.680346
0.989262
0.345756
0.993091
0.797076
0.994089
2.763645
0.008193
0.191250
0.002080
29
31.918979
0.110475
0.610236
0.992142
50
500
{u'n_estimators': 500, u'max_depth': 50}
3
0.680195
0.989288
0.353717
0.993110
0.796736
0.994028
4.144524
0.015920
0.187486
0.002053
30
1.469967
0.005644
0.580998
0.991101
None
10
{u'n_estimators': 10, u'max_depth': None}
30
0.637633
0.988022
0.305583
0.992219
0.799732
0.993063
0.566766
0.000210
0.205647
0.002204
31
2.783081
0.009305
0.595898
0.991911
None
20
{u'n_estimators': 20, u'max_depth': None}
21
0.669633
0.989857
0.324748
0.992376
0.793251
0.993501
1.048332
0.000361
0.198225
0.001523
32
6.706746
0.018676
0.601944
0.992587
None
50
{u'n_estimators': 50, u'max_depth': None}
18
0.679886
0.989879
0.332241
0.992904
0.793640
0.994977
2.548873
0.003563
0.196244
0.002093
33
12.392997
0.030579
0.605062
0.992601
None
100
{u'n_estimators': 100, u'max_depth': None}
12
0.681470
0.990048
0.338066
0.992945
0.795584
0.994811
4.513245
0.007874
0.194419
0.001960
34
20.526644
0.050559
0.605848
0.992681
None
200
{u'n_estimators': 200, u'max_depth': None}
10
0.682106
0.990231
0.338903
0.993073
0.796471
0.994739
7.073495
0.003521
0.194409
0.001861
35
46.666782
0.125217
0.610777
0.992775
None
500
{u'n_estimators': 500, u'max_depth': None}
1
0.682503
0.990345
0.353035
0.993114
0.796732
0.994866
17.727534
0.009582
0.188086
0.001861
In [27]:
d = cvres.pivot("param_max_depth","param_n_estimators","mean_test_score")
sns.heatmap(d)
plt.figure()
sns.heatmap(cvres.pivot("param_max_depth","param_n_estimators","std_test_score"))
Out[27]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fec65617e10>
In [28]:
sns.heatmap(cvres.pivot("param_max_depth","param_n_estimators","mean_train_score"))
Out[28]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fec64d2aad0>
In [21]:
rf = RandomForestRegressor(n_estimators=20)
In [22]:
cvpred = cross_val_predict(rf, Xv, y, cv=3)
In [23]:
sns.jointplot(y,cvpred,alpha=.3,xlim=(.3,.45),ylim=(.3,.45))
Out[23]:
<seaborn.axisgrid.JointGrid at 0x7f7bec785310>
In [ ]:
In [24]:
yfit = rf.fit(Xv,y)
In [25]:
tot = 1
for (k,v) in makemodel.getoptions().iteritems():
if isinstance(v,makemodel.Range):
tot *= 9
else:
tot *= len(v)
In [26]:
'%g' % tot
Out[26]:
'2.05457e+44'
In [27]:
import deap
from deap import *
In [ ]:
In [28]:
defaults = dictvec.transform(makemodel.getdefaults())
rf.predict(defaults)
Out[28]:
array([0.37781492])
In [29]:
modeldefaults = makemodel.getdefaults()
def cleanparams(p):
'''standardize params that do not matter'''
for i in xrange(1,6):
if p['conv%d_width'%i] == 0:
for suffix in ['func', 'init', 'norm', 'size', 'stride', 'width']:
name = 'conv%d_%s'%(i,suffix)
p[name] = modeldefaults[name]
if p['pool%d_size'%i] == 0:
name = 'pool%d_type'%i
p[name] = modeldefaults[name]
if p['fc_pose_hidden'] == 0:
p['fc_pose_func'] = modeldefaults['fc_pose_func']
p['fc_pose_hidden2'] = modeldefaults['fc_pose_hidden2']
p['fc_pose_func2'] = modeldefaults['fc_pose_func2']
p['fc_pose_init'] = modeldefaults['fc_pose_init']
elif p['fc_pose_hidden2'] == 0:
p['fc_pose_hidden2'] = modeldefaults['fc_pose_hidden2']
p['fc_pose_func2'] = modeldefaults['fc_pose_func2']
if p['fc_affinity_hidden'] == 0:
p['fc_affinity_func'] = modeldefaults['fc_affinity_func']
p['fc_affinity_hidden2'] = modeldefaults['fc_affinity_hidden2']
p['fc_affinity_func2'] = modeldefaults['fc_affinity_func2']
p['fc_affinity_init'] = modeldefaults['fc_affinity_init']
elif p['fc_affinity_hidden2'] == 0:
p['fc_affinity_hidden2'] = modeldefaults['fc_affinity_hidden2']
p['fc_affinity_func2'] = modeldefaults['fc_affinity_func2']
return p
In [30]:
def randParam(param, choices):
'''randomly select a choice for param'''
if isinstance(choices, makemodel.Range): #discretize
choices = np.linspace(choices.min,choices.max, 9)
return np.asscalar(np.random.choice(choices))
def randomIndividual():
ret = dict()
options = makemodel.getoptions()
for (param,choices) in options.iteritems():
ret[param] = randParam(param, choices)
return cleanparams(ret)
In [31]:
def evaluateIndividual(ind):
x = dictvec.transform(ind)
return [rf.predict(x)[0]]
In [32]:
Xv = dictvec.fit_transform(map(cleanparams,X.to_dict(orient='records')))
In [33]:
rf.fit(Xv,y)
yfit = rf.predict(Xv)
In [34]:
cvpred = cross_val_predict(rf, Xv, y, cv=3)
sns.jointplot(y,cvpred,alpha=.3,xlim=(.3,.45),ylim=(.3,.45))
Out[34]:
<seaborn.axisgrid.JointGrid at 0x7f7bec7eca10>
In [35]:
sns.jointplot(y,yfit,alpha=.3)
Out[35]:
<seaborn.axisgrid.JointGrid at 0x7f7be81f52d0>
In [36]:
evaluateIndividual(randomIndividual())
Out[36]:
[0.2080152646895317]
In [37]:
def mutateIndividual(ind, indpb=0.05):
'''for each param, with prob indpb randomly sample another choice'''
options = makemodel.getoptions()
for (param,choices) in options.iteritems():
if np.random.rand() < indpb:
ind[param] = randParam(param, choices)
return (ind,)
def crossover(ind1, ind2, indpdb=0.5):
'''swap choices with probability indpb'''
options = makemodel.getoptions()
for (param,choices) in options.iteritems():
if np.random.rand() < indpdb:
tmp = ind1[param]
ind1[param] = ind2[param]
ind2[param] = tmp
return (ind1,ind2)
In [ ]:
In [38]:
from deap import base, creator, gp, tools
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", dict, fitness=creator.FitnessMax)
toolbox = base.Toolbox()
toolbox.register("individual", tools.initIterate, creator.Individual, randomIndividual)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("mutate",mutateIndividual)
toolbox.register("mate",crossover)
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("evaluate", evaluateIndividual)
In [39]:
import multiprocessing
pool = multiprocessing.Pool()
toolbox.register("map", pool.map)
In [40]:
randpop = toolbox.population(n=300)
hof = tools.HallOfFame(10)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", np.mean)
stats.register("std", np.std)
stats.register("min", np.min)
stats.register("max", np.max)
In [41]:
initpop = [ creator.Individual(cleanparams(x)) for x in X.to_dict('records')]
In [42]:
from deap import algorithms
pop, log = algorithms.eaSimple(toolbox.clone(randpop), toolbox, cxpb=0.5, mutpb=0.2, ngen=500,
stats=stats, halloffame=hof, verbose=True)
gen nevals avg std min max
0 300 0.125692 0.0862782 -0.0658345 0.382094
1 183 0.193744 0.0815559 0.000900693 0.382094
2 186 0.262406 0.0766599 -0.00898516 0.382094
3 184 0.32215 0.0536855 0.0565018 0.382094
4 178 0.351853 0.035638 0.075722 0.384736
5 162 0.364633 0.0260688 0.143312 0.386628
6 168 0.369501 0.0350143 0.107469 0.386628
7 187 0.379879 0.0175138 0.113041 0.386628
8 155 0.381875 0.0164147 0.221928 0.388027
9 176 0.380617 0.0258379 0.189351 0.388027
10 196 0.385224 0.0147142 0.182124 0.388027
11 171 0.383983 0.0190065 0.185887 0.388027
12 178 0.379098 0.037816 0.105764 0.388027
13 167 0.380646 0.0337444 0.0839966 0.388027
14 185 0.384068 0.0254017 0.13826 0.388027
15 148 0.385934 0.0157514 0.240663 0.388027
16 174 0.38243 0.0332081 0.051005 0.388027
17 185 0.383034 0.0253588 0.205972 0.388027
18 180 0.385544 0.0251225 0.068579 0.388027
19 201 0.385322 0.018069 0.237017 0.388027
20 197 0.384732 0.0239327 0.117137 0.388027
21 164 0.38597 0.0157888 0.238398 0.388027
22 187 0.381876 0.0307143 0.105764 0.388027
23 178 0.384539 0.026735 0.0816527 0.388027
24 173 0.385477 0.0170612 0.239127 0.388027
25 176 0.385592 0.0160273 0.239754 0.388027
26 189 0.383037 0.0286509 0.109277 0.388027
27 183 0.384643 0.0222355 0.116545 0.388027
28 188 0.382167 0.0320011 0.0772484 0.388027
29 163 0.383341 0.031232 0.0464733 0.388027
30 173 0.3838 0.0314389 0.0812844 0.388027
31 177 0.384485 0.0213399 0.223975 0.388027
32 173 0.384505 0.0263483 0.105764 0.388027
33 185 0.385014 0.0212401 0.133725 0.388027
34 195 0.384827 0.0219164 0.127035 0.388027
35 187 0.385662 0.0184269 0.162285 0.388027
36 164 0.383353 0.0322631 0.0495608 0.388027
37 174 0.384018 0.0272608 0.105423 0.388027
38 167 0.385997 0.0192717 0.107246 0.388027
39 190 0.383765 0.0268764 0.0727833 0.388027
40 190 0.384146 0.0254053 0.139047 0.388027
41 178 0.385822 0.0168934 0.177473 0.388027
42 179 0.384388 0.0239063 0.125168 0.388027
43 198 0.382965 0.0307571 0.113843 0.388027
44 187 0.384548 0.0237086 0.137004 0.388027
45 194 0.384733 0.0259598 0.100684 0.388027
46 193 0.384099 0.0237445 0.147409 0.388027
47 178 0.382256 0.0325367 0.105764 0.388027
48 191 0.385673 0.0178818 0.17393 0.388027
49 186 0.382585 0.0320618 0.0897468 0.388027
50 178 0.386133 0.0152176 0.241154 0.388027
51 174 0.385423 0.0205543 0.137004 0.388027
52 186 0.386491 0.0126603 0.226388 0.388027
53 194 0.385001 0.0226251 0.126391 0.388027
54 173 0.385779 0.0162369 0.217243 0.388027
55 170 0.382898 0.0262768 0.182124 0.388027
56 173 0.38508 0.0198041 0.178123 0.388027
57 178 0.382887 0.0300414 0.105143 0.388027
58 187 0.38262 0.0317757 0.069994 0.388027
59 184 0.383469 0.0239167 0.226245 0.388027
60 157 0.382318 0.0284539 0.142955 0.388027
61 180 0.383482 0.027891 0.105423 0.388027
62 179 0.384254 0.0216252 0.22315 0.388027
63 166 0.381847 0.036645 0.105521 0.388027
64 194 0.385074 0.017128 0.233916 0.388027
65 202 0.383672 0.0257503 0.116545 0.388027
66 173 0.385026 0.0220008 0.11445 0.388027
67 168 0.386523 0.0147297 0.187097 0.388027
68 184 0.384012 0.0267669 0.0962431 0.388027
69 193 0.384187 0.0237973 0.128047 0.388027
70 170 0.380963 0.0362656 0.0544514 0.388027
71 168 0.383687 0.0275165 0.129995 0.388027
72 179 0.380294 0.0382357 0.105764 0.388027
73 165 0.381622 0.0379506 0.0455896 0.388027
74 196 0.382961 0.0327737 0.069994 0.388027
75 195 0.382267 0.0268993 0.186891 0.388027
76 195 0.383382 0.0270863 0.142955 0.388027
77 187 0.381339 0.0340757 0.136198 0.388027
78 160 0.383304 0.0279786 0.142955 0.388027
79 172 0.383546 0.0256182 0.167652 0.388027
80 182 0.384167 0.0247465 0.117137 0.388027
81 171 0.383786 0.0276578 0.124468 0.388027
82 162 0.385252 0.0221704 0.139047 0.388027
83 178 0.384947 0.0244749 0.136198 0.388027
84 180 0.38428 0.0259009 0.143816 0.388027
85 175 0.385089 0.0212736 0.133596 0.388027
86 165 0.384355 0.0223588 0.1769 0.388027
87 176 0.382581 0.0292072 0.101589 0.388027
88 192 0.384498 0.0263924 0.100377 0.388027
89 169 0.381264 0.0360322 0.0610351 0.388027
90 186 0.383723 0.0277304 0.117137 0.388027
91 175 0.384978 0.0225562 0.107131 0.388027
92 168 0.384598 0.0243027 0.124603 0.388027
93 188 0.384196 0.0226195 0.145755 0.388027
94 181 0.384506 0.022865 0.167652 0.388027
95 185 0.38335 0.0281977 0.105423 0.388027
96 178 0.381346 0.0350718 0.109277 0.388027
97 172 0.384459 0.0236046 0.178966 0.388027
98 156 0.38582 0.0213897 0.142955 0.388027
99 197 0.383235 0.0260974 0.161297 0.388027
100 183 0.384725 0.0237986 0.136916 0.388027
101 176 0.380814 0.037429 0.0933395 0.388027
102 186 0.385575 0.014624 0.255548 0.388027
103 172 0.384279 0.0246796 0.124017 0.388027
104 198 0.38516 0.0238314 0.105764 0.388027
105 175 0.385218 0.0238295 0.107131 0.388027
106 172 0.382593 0.0280413 0.136198 0.388027
107 182 0.384163 0.0217138 0.228862 0.388027
108 194 0.384569 0.0244661 0.123909 0.388027
109 188 0.387235 0.00728926 0.272033 0.388027
110 184 0.385181 0.0167501 0.237508 0.388027
111 182 0.386231 0.0148592 0.176085 0.388027
112 169 0.385197 0.0213348 0.151549 0.388027
113 178 0.384831 0.0217678 0.130041 0.388027
114 191 0.382542 0.0343901 0.069994 0.388027
115 181 0.382581 0.0288096 0.105764 0.388027
116 165 0.384862 0.0212219 0.117137 0.388027
117 193 0.383869 0.021427 0.226388 0.388027
118 184 0.386172 0.0148373 0.223975 0.388027
119 159 0.386743 0.0156355 0.133596 0.388027
120 181 0.384925 0.0235845 0.136198 0.388027
121 183 0.382451 0.0295975 0.137362 0.388027
122 171 0.385142 0.0209467 0.107246 0.388027
123 187 0.383991 0.0229871 0.160485 0.388027
124 174 0.381685 0.0330613 0.117137 0.388027
125 189 0.384879 0.0229635 0.133596 0.388027
126 191 0.381976 0.0302262 0.109393 0.388027
127 191 0.385644 0.0168695 0.236223 0.388027
128 177 0.385414 0.0201145 0.139047 0.388027
129 186 0.38482 0.0243596 0.130041 0.388027
130 182 0.3851 0.0251697 0.105423 0.388027
131 183 0.384597 0.0248896 0.0975683 0.388027
132 175 0.384624 0.0224847 0.130041 0.388027
133 187 0.383597 0.024483 0.133596 0.388027
134 183 0.383079 0.027985 0.137004 0.388027
135 183 0.383255 0.0313035 0.109277 0.388027
136 188 0.385194 0.0196961 0.169204 0.388027
137 169 0.383132 0.0291806 0.107131 0.388027
138 187 0.387071 0.00943613 0.240663 0.388027
139 174 0.38632 0.016794 0.18276 0.388027
140 198 0.383263 0.0308456 0.0926891 0.388027
141 176 0.385488 0.020152 0.105764 0.388027
142 182 0.384769 0.0217591 0.139047 0.388027
143 180 0.385058 0.0205693 0.13247 0.388027
144 178 0.384338 0.0221253 0.157292 0.388027
145 161 0.386873 0.00981658 0.277215 0.388027
146 199 0.381409 0.0303619 0.137004 0.388027
147 177 0.384649 0.0231818 0.104986 0.388027
148 179 0.384493 0.0229579 0.182124 0.388027
149 174 0.384032 0.022946 0.162754 0.388027
150 167 0.384384 0.0234013 0.137004 0.388027
151 186 0.383173 0.0287284 0.100377 0.388027
152 169 0.383658 0.0278108 0.069994 0.388027
153 185 0.385004 0.0243136 0.0999965 0.388027
154 180 0.383729 0.0291368 0.100377 0.388027
155 172 0.386819 0.0117775 0.220738 0.388027
156 176 0.38352 0.0281541 0.105423 0.388027
157 200 0.381171 0.0318325 0.142955 0.388027
158 181 0.386694 0.00997847 0.275702 0.388027
159 184 0.385269 0.0204371 0.139047 0.388027
160 192 0.38495 0.0231427 0.132464 0.388027
161 187 0.38373 0.0244916 0.13826 0.388027
162 180 0.385614 0.0197072 0.136198 0.388027
163 195 0.383513 0.0243055 0.18276 0.388027
164 180 0.385469 0.0198585 0.105764 0.388027
165 200 0.382612 0.0316779 0.0739959 0.388027
166 170 0.382881 0.0307445 0.0997641 0.388027
167 180 0.383924 0.0219571 0.137004 0.388027
168 168 0.38441 0.0199374 0.222742 0.388027
169 202 0.386567 0.0114625 0.277016 0.388027
170 188 0.385016 0.0226145 0.142955 0.388027
171 181 0.380498 0.0344918 0.0752516 0.388027
172 190 0.384305 0.0252459 0.115491 0.388027
173 188 0.382424 0.0297474 0.114052 0.388027
174 179 0.382406 0.0286751 0.124468 0.388027
175 189 0.384626 0.0244823 0.105423 0.388027
176 177 0.38291 0.0274354 0.139047 0.388027
177 183 0.385349 0.01998 0.130041 0.388027
178 178 0.384808 0.0192006 0.188702 0.388027
179 167 0.382432 0.0291736 0.138421 0.388027
180 171 0.383083 0.0263149 0.133596 0.388027
181 182 0.382696 0.0290975 0.133596 0.388027
182 175 0.386467 0.0115359 0.239579 0.388027
183 172 0.38304 0.0280868 0.130041 0.388027
184 183 0.384668 0.0213852 0.139047 0.388027
185 169 0.383379 0.0293246 0.117534 0.388027
186 191 0.382568 0.0296374 0.0693033 0.388027
187 180 0.384449 0.0221994 0.18276 0.388027
188 180 0.383877 0.0306816 0.0593436 0.388027
189 179 0.382599 0.0259869 0.145755 0.388027
190 183 0.385692 0.0159585 0.236128 0.388027
191 193 0.3851 0.0233809 0.135963 0.388027
192 171 0.386036 0.0145144 0.244403 0.388027
193 186 0.385075 0.0183525 0.238742 0.388027
194 166 0.384792 0.0202284 0.199666 0.388027
195 177 0.381967 0.0330639 0.107131 0.388027
196 194 0.38526 0.0180377 0.182124 0.388027
197 184 0.384572 0.0249596 0.0908639 0.388027
198 171 0.38343 0.029229 0.117382 0.388027
199 193 0.384597 0.0217078 0.185887 0.388027
200 173 0.382215 0.0361778 -0.0183266 0.388027
201 179 0.381432 0.0335245 0.0856179 0.388027
202 167 0.386415 0.0116452 0.257267 0.388027
203 206 0.384465 0.0216693 0.135578 0.388027
204 193 0.38525 0.0178502 0.225102 0.388027
205 189 0.384598 0.0189871 0.240663 0.388027
206 193 0.383916 0.0265232 0.124468 0.388027
207 174 0.385939 0.0165596 0.199666 0.388027
208 188 0.384245 0.0253054 0.0715838 0.388027
209 163 0.384204 0.0269788 0.087764 0.388027
210 182 0.384431 0.0216383 0.182124 0.388027
211 187 0.384221 0.0211617 0.182124 0.388027
212 179 0.383242 0.0281326 0.0879537 0.388027
213 173 0.383285 0.0331826 -0.0021985 0.388027
214 190 0.382239 0.0320829 0.10051 0.388027
215 155 0.383436 0.0308076 0.0869061 0.388027
216 189 0.383289 0.0266118 0.167652 0.388027
217 193 0.380526 0.0404297 0.088897 0.388027
218 183 0.383217 0.0228432 0.223975 0.388027
219 191 0.385574 0.0166507 0.185843 0.388027
220 185 0.386457 0.0128425 0.218817 0.388027
221 186 0.386174 0.013465 0.255322 0.388027
222 189 0.385041 0.0197575 0.182124 0.388027
223 205 0.382187 0.0272066 0.200425 0.388027
224 168 0.385177 0.0195794 0.151765 0.388027
225 154 0.380611 0.0333438 0.0810764 0.388027
226 204 0.382911 0.0278862 0.109277 0.388027
227 187 0.382558 0.0295675 0.130019 0.388027
228 174 0.38364 0.027671 0.121872 0.388027
229 153 0.383783 0.0231527 0.18276 0.388027
230 180 0.383311 0.0249122 0.182124 0.388027
231 179 0.383486 0.0272847 0.107131 0.388027
232 173 0.383493 0.024192 0.132227 0.388027
233 179 0.381019 0.0325274 0.104986 0.388027
234 180 0.383557 0.0269994 0.0945602 0.388027
235 187 0.384387 0.0236632 0.133596 0.388027
236 163 0.381866 0.0308219 0.104986 0.388027
237 185 0.384397 0.0225329 0.181315 0.388027
238 192 0.383332 0.029773 0.107577 0.388027
239 191 0.380818 0.0380646 0.036121 0.388027
240 173 0.383841 0.0245121 0.105423 0.388027
241 171 0.386536 0.0130537 0.191697 0.388027
242 168 0.382052 0.0325658 0.0945602 0.388027
243 183 0.383479 0.0265656 0.130041 0.388027
244 181 0.383183 0.0273956 0.133596 0.388027
245 195 0.383731 0.026851 0.117137 0.388027
246 167 0.384685 0.0255542 0.126391 0.388027
247 195 0.383187 0.0274992 0.13826 0.388027
248 176 0.383431 0.0307384 0.0602903 0.388027
249 169 0.385626 0.0232566 0.104986 0.388027
250 168 0.3838 0.0311041 0.0686695 0.388027
251 178 0.383445 0.0245952 0.13826 0.388027
252 161 0.385966 0.017507 0.11557 0.388027
253 168 0.383263 0.0245444 0.17856 0.388027
254 198 0.384196 0.0263254 0.105764 0.388027
255 193 0.381729 0.0324069 0.104986 0.388027
256 191 0.387222 0.00702827 0.296583 0.388027
257 202 0.386419 0.0161387 0.13826 0.388027
258 167 0.381984 0.03431 0.0805735 0.388027
259 183 0.385633 0.0161475 0.196121 0.388027
260 182 0.385559 0.018164 0.117137 0.388027
261 162 0.382489 0.032887 0.0544514 0.388027
262 174 0.38385 0.0264942 0.109277 0.388027
263 204 0.377477 0.0459618 0.105764 0.388027
264 187 0.384964 0.0224025 0.126391 0.388027
265 185 0.381734 0.034968 0.0728395 0.388027
266 174 0.383329 0.0274383 0.108197 0.388027
267 191 0.38611 0.0142398 0.252733 0.388027
268 181 0.385815 0.0146448 0.241154 0.388027
269 211 0.384391 0.0211962 0.194109 0.388027
270 171 0.383899 0.025032 0.123909 0.388027
271 165 0.3842 0.0244293 0.161596 0.388027
272 173 0.385925 0.0163764 0.226388 0.388027
273 185 0.384369 0.0211994 0.166737 0.388027
274 188 0.384806 0.0178152 0.253508 0.388027
275 167 0.385539 0.0198139 0.117704 0.388027
276 189 0.384421 0.0211973 0.117137 0.388027
277 196 0.38377 0.0279322 0.104986 0.388027
278 188 0.382799 0.0329169 0.100768 0.388027
279 157 0.385511 0.0197002 0.101589 0.388027
280 171 0.382998 0.0303161 0.0584235 0.388027
281 185 0.384031 0.0272527 0.069994 0.388027
282 175 0.384914 0.0234541 0.130041 0.388027
283 176 0.383945 0.031254 0.0592297 0.388027
284 198 0.384648 0.0245441 0.109277 0.388027
285 167 0.384072 0.0217557 0.162285 0.388027
286 182 0.383194 0.0256475 0.143289 0.388027
287 179 0.384943 0.0192075 0.200502 0.388027
288 171 0.382093 0.0314588 0.117137 0.388027
289 172 0.382505 0.0317839 0.0798281 0.388027
290 169 0.385111 0.0207088 0.107131 0.388027
291 190 0.38519 0.0239618 0.107131 0.388027
292 176 0.384941 0.0215814 0.18276 0.388027
293 174 0.384767 0.021742 0.139047 0.388027
294 177 0.385258 0.0186317 0.199666 0.388027
295 170 0.385567 0.0229343 0.105423 0.388027
296 177 0.384441 0.0265742 0.104986 0.388027
297 161 0.384925 0.0193756 0.186295 0.388027
298 193 0.380616 0.0366136 0.0694563 0.388027
299 184 0.383532 0.032831 0.0856179 0.388027
300 192 0.384187 0.0263081 0.134512 0.388027
301 174 0.383328 0.0290564 0.104986 0.388027
302 161 0.38542 0.0180209 0.199856 0.388027
303 169 0.382679 0.030707 0.133596 0.388027
304 157 0.384741 0.0250932 0.105423 0.388027
305 170 0.383824 0.0240304 0.15122 0.388027
306 163 0.384419 0.0214708 0.216405 0.388027
307 192 0.383662 0.0255277 0.140144 0.388027
308 187 0.385281 0.0178815 0.22033 0.388027
309 166 0.383148 0.0277358 0.124603 0.388027
310 190 0.384443 0.0259398 0.109277 0.388027
311 181 0.384471 0.0263483 0.0214028 0.388027
312 192 0.385254 0.0210998 0.105764 0.388027
313 190 0.384879 0.0240602 0.126391 0.388027
314 170 0.384221 0.0260478 0.103174 0.388027
315 179 0.384537 0.02153 0.117137 0.388027
316 191 0.384809 0.0194043 0.200425 0.388027
317 156 0.385903 0.0159552 0.240517 0.388027
318 192 0.382311 0.0323804 0.106318 0.388027
319 166 0.385384 0.0191932 0.167652 0.388027
320 172 0.385812 0.0215115 0.117137 0.388027
321 180 0.383543 0.0291864 0.136198 0.388027
322 171 0.383625 0.0284696 0.127041 0.388027
323 163 0.384154 0.0202428 0.221229 0.388027
324 169 0.385023 0.0219565 0.137004 0.388027
325 175 0.382276 0.0328041 0.0643503 0.388027
326 178 0.382989 0.0282873 0.105764 0.388027
327 181 0.383983 0.0270096 0.137004 0.388027
328 175 0.384582 0.0209711 0.167652 0.388027
329 174 0.386177 0.0138066 0.22033 0.388027
330 164 0.38618 0.0165362 0.142955 0.388027
331 167 0.382902 0.0301611 0.0920691 0.388027
332 181 0.383191 0.0303545 0.0731897 0.388027
333 172 0.382579 0.0279484 0.104792 0.388027
334 185 0.38151 0.0353429 0.107131 0.388027
335 177 0.381479 0.0319154 0.138421 0.388027
336 189 0.38371 0.0249941 0.136198 0.388027
337 193 0.38489 0.0213976 0.18276 0.388027
338 173 0.385462 0.0193707 0.131429 0.388027
339 176 0.38504 0.0182634 0.231171 0.388027
340 178 0.385702 0.0145135 0.252274 0.388027
341 188 0.382524 0.0290029 0.144909 0.388027
342 182 0.385304 0.0151445 0.2634 0.388027
343 174 0.38449 0.0241981 0.0986891 0.388027
344 172 0.38637 0.0160043 0.206463 0.388027
345 199 0.384614 0.0239379 0.0908639 0.388027
346 191 0.384726 0.0218072 0.142955 0.388027
347 198 0.382854 0.0323359 0.108118 0.388027
348 201 0.384584 0.0250353 0.108118 0.388027
349 190 0.38297 0.0257343 0.137004 0.388027
350 177 0.384722 0.0220272 0.104986 0.388027
351 194 0.383774 0.027671 0.0982308 0.388027
352 191 0.38557 0.0175646 0.202817 0.388027
353 188 0.379366 0.0376952 0.105764 0.388027
354 187 0.385397 0.0200947 0.162495 0.388027
355 199 0.382695 0.0301861 0.0970646 0.388027
356 182 0.386228 0.0143531 0.213993 0.388027
357 172 0.383874 0.0303088 0.0296809 0.388027
358 170 0.384369 0.0248617 0.137004 0.388027
359 191 0.384256 0.024197 0.117534 0.388027
360 175 0.383479 0.0284581 0.0669011 0.388027
361 183 0.386019 0.0154771 0.188051 0.388027
362 191 0.383877 0.0209782 0.208875 0.388027
363 204 0.384179 0.0259451 0.117137 0.388027
364 192 0.381079 0.036895 0.0264324 0.388027
365 162 0.385809 0.0170865 0.180907 0.388027
366 156 0.38401 0.0238525 0.147011 0.388027
367 179 0.384627 0.025447 0.101589 0.388027
368 177 0.38404 0.0267349 0.107131 0.388027
369 202 0.383396 0.0259764 0.142955 0.388027
370 167 0.386589 0.0172211 0.106056 0.388027
371 169 0.385573 0.0150237 0.237342 0.388027
372 175 0.38472 0.0238444 0.109277 0.388027
373 181 0.381721 0.0335613 0.126391 0.388027
374 168 0.383579 0.0289077 0.100377 0.388027
375 171 0.384051 0.0263752 0.137004 0.388027
376 172 0.384638 0.0223865 0.17393 0.388027
377 187 0.384331 0.0230784 0.149755 0.388027
378 179 0.383329 0.029928 0.0982308 0.388027
379 185 0.383788 0.0263411 0.117704 0.388027
380 186 0.383275 0.0343114 0.0151341 0.388027
381 204 0.384047 0.026102 0.117704 0.388027
382 176 0.384108 0.024209 0.185639 0.388027
383 191 0.383301 0.0272571 0.110695 0.388027
384 191 0.384172 0.02441 0.129995 0.388027
385 193 0.383598 0.02742 0.0860028 0.388027
386 173 0.385784 0.0160261 0.204051 0.388027
387 181 0.387094 0.00787438 0.292708 0.388027
388 182 0.386358 0.0114448 0.245145 0.388027
389 181 0.383483 0.0289901 0.104986 0.388027
390 179 0.385978 0.0180915 0.189285 0.388027
391 191 0.384881 0.0206491 0.210796 0.388027
392 193 0.383245 0.0291771 0.138618 0.388027
393 198 0.381918 0.0328153 0.109392 0.388027
394 193 0.382722 0.0315148 0.109277 0.388027
395 187 0.383825 0.0263514 0.137362 0.388027
396 162 0.38544 0.0229353 0.056801 0.388027
397 176 0.385272 0.018748 0.185843 0.388027
398 191 0.384541 0.0196733 0.235096 0.388027
399 175 0.384005 0.0246394 0.130041 0.388027
400 194 0.383817 0.0265952 0.137004 0.388027
401 185 0.385225 0.0207353 0.180907 0.388027
402 151 0.384054 0.0278588 0.104986 0.388027
403 167 0.382966 0.0282563 0.131247 0.388027
404 178 0.378283 0.0429611 0.101589 0.388027
405 179 0.386382 0.012911 0.232195 0.388027
406 191 0.383107 0.0310386 0.136198 0.388027
407 188 0.385062 0.0221977 0.107246 0.388027
408 186 0.382093 0.0305632 0.125619 0.388027
409 173 0.384899 0.0223985 0.137004 0.388027
410 161 0.381773 0.0352404 0.104986 0.388027
411 172 0.387009 0.0100629 0.245971 0.388027
412 187 0.38494 0.0225375 0.151765 0.388027
413 211 0.38532 0.019365 0.199914 0.388027
414 177 0.385184 0.0219882 0.117137 0.388027
415 167 0.37906 0.0432912 0.0551109 0.388027
416 187 0.384363 0.0266058 0.0909486 0.388027
417 188 0.384472 0.0246329 0.109277 0.388027
418 165 0.383761 0.0266642 0.0997641 0.388027
419 192 0.381637 0.0341629 0.0997641 0.388027
420 176 0.383231 0.0281287 0.107131 0.388027
421 191 0.384738 0.0251006 0.0941528 0.388027
422 193 0.384648 0.0228922 0.185843 0.388027
423 197 0.383362 0.0284441 0.137004 0.388027
424 160 0.38573 0.0202038 0.104986 0.388027
425 177 0.383659 0.0272294 0.117137 0.388027
426 176 0.386322 0.0130269 0.23087 0.388027
427 190 0.382707 0.0331727 0.0056404 0.388027
428 164 0.386354 0.0141142 0.185639 0.388027
429 184 0.383689 0.0286931 0.105764 0.388027
430 168 0.383724 0.0268983 0.107131 0.388027
431 161 0.382563 0.0329229 0.0582044 0.388027
432 166 0.381769 0.03981 0.0200931 0.388027
433 164 0.385717 0.017811 0.189351 0.388027
434 194 0.383808 0.0244459 0.137004 0.388027
435 180 0.383468 0.0283322 0.109277 0.388027
436 161 0.385138 0.0201458 0.197993 0.388027
437 194 0.384416 0.0251848 0.137004 0.388027
438 180 0.380449 0.0369333 0.090368 0.388027
439 190 0.383639 0.0290915 0.0632788 0.388027
440 181 0.382053 0.0330149 0.104986 0.388027
441 154 0.384764 0.022995 0.151765 0.388027
442 193 0.384995 0.0218473 0.137004 0.388027
443 177 0.386512 0.0156635 0.189285 0.388027
444 197 0.384769 0.0252032 0.104986 0.388027
445 181 0.383968 0.0287386 0.0997641 0.388027
446 167 0.385466 0.0249271 0.104986 0.388027
447 165 0.384326 0.0289705 0.0800762 0.388027
448 189 0.382358 0.0350372 0.0859215 0.388027
449 179 0.385133 0.0216263 0.16875 0.388027
450 178 0.383761 0.0326148 0.0507779 0.388027
451 171 0.385184 0.022761 0.109277 0.388027
452 182 0.384155 0.0259161 0.107246 0.388027
453 174 0.382945 0.0338782 0.0465851 0.388027
454 176 0.384653 0.0264389 0.105143 0.388027
455 179 0.380198 0.041639 0.0536386 0.388027
456 209 0.381687 0.0355969 0.0795069 0.388027
457 180 0.380326 0.0428586 0.0517177 0.388027
458 190 0.383185 0.028896 0.137004 0.388027
459 179 0.381619 0.0360356 0.0684048 0.388027
460 164 0.386071 0.0184457 0.101589 0.388027
461 181 0.383659 0.0295918 0.104986 0.388027
462 184 0.383715 0.0293049 0.100122 0.388027
463 186 0.381721 0.0353662 0.104986 0.388027
464 177 0.384957 0.021209 0.137004 0.388027
465 180 0.384098 0.0276804 0.0297153 0.388027
466 190 0.382812 0.0297675 0.101589 0.388027
467 170 0.381941 0.0331172 0.0856179 0.388027
468 161 0.383993 0.0241331 0.137004 0.388027
469 195 0.384229 0.0253538 0.144752 0.388027
470 175 0.382606 0.0319226 0.104986 0.388027
471 182 0.381872 0.0316798 0.107131 0.388027
472 187 0.382583 0.0335595 0.0759293 0.388027
473 186 0.386502 0.0131046 0.194109 0.388027
474 177 0.38463 0.0266495 0.104986 0.388027
475 164 0.385705 0.0220226 0.0684048 0.388027
476 184 0.38439 0.0242269 0.117137 0.388027
477 160 0.385274 0.0211212 0.117137 0.388027
478 190 0.380909 0.0387912 0.0752516 0.388027
479 183 0.381775 0.03649 0.0690056 0.388027
480 186 0.384917 0.020657 0.189285 0.388027
481 184 0.380627 0.0379035 0.0995044 0.388027
482 174 0.382321 0.0326893 0.109669 0.388027
483 194 0.384668 0.0202078 0.22315 0.388027
484 193 0.383323 0.0268971 0.0982308 0.388027
485 155 0.385461 0.0162397 0.232193 0.388027
486 179 0.383713 0.0268168 0.117137 0.388027
487 173 0.38595 0.0134049 0.229636 0.388027
488 166 0.382883 0.0320794 0.0995044 0.388027
489 174 0.385305 0.0214294 0.117137 0.388027
490 165 0.381419 0.0356942 0.0578141 0.388027
491 165 0.384588 0.0256965 0.086743 0.388027
492 166 0.385142 0.0228862 0.117137 0.388027
493 204 0.383545 0.0273915 0.117137 0.388027
494 190 0.383691 0.0265885 0.0997641 0.388027
495 172 0.382189 0.0332988 0.117137 0.388027
496 186 0.385547 0.0184003 0.219641 0.388027
497 178 0.382573 0.0323336 0.104986 0.388027
498 168 0.382951 0.0315729 0.117137 0.388027
499 171 0.383649 0.0288348 0.105764 0.388027
500 163 0.383209 0.0272381 0.107131 0.388027
In [50]:
hof2 = tools.HallOfFame(10)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", np.mean)
stats.register("std", np.std)
stats.register("min", np.min)
stats.register("max", np.max)
pop2, log = algorithms.eaMuPlusLambda(toolbox.clone(randpop), toolbox, mu=300, lambda_=300, cxpb=0.5, mutpb=0.2, ngen=500,
stats=stats, halloffame=hof2, verbose=True)
gen nevals avg std min max
0 300 0.117062 0.0865216 -0.0967767 0.385691
1 217 0.190001 0.0687066 0.0124255 0.385691
2 208 0.248002 0.0563308 0.106956 0.387033
3 214 0.286248 0.0514506 0.149389 0.387033
4 206 0.332004 0.0425736 0.217472 0.387033
5 193 0.364065 0.0284417 0.273538 0.387033
6 211 0.382841 0.00995757 0.32659 0.387033
7 198 0.386784 0.000899152 0.376748 0.387033
8 219 0.387016 0.000235537 0.383169 0.387033
9 215 0.387033 5.55112e-17 0.387033 0.387033
10 215 0.387033 5.55112e-17 0.387033 0.387033
11 224 0.387033 5.55112e-17 0.387033 0.387033
12 199 0.387033 5.55112e-17 0.387033 0.387033
13 205 0.387033 5.55112e-17 0.387033 0.387033
14 216 0.387033 5.55112e-17 0.387033 0.387033
15 203 0.387033 5.55112e-17 0.387033 0.387033
16 214 0.387033 5.55112e-17 0.387033 0.387033
17 199 0.387033 5.55112e-17 0.387033 0.387033
18 210 0.387033 5.55112e-17 0.387033 0.387033
19 211 0.387033 5.55112e-17 0.387033 0.387033
20 212 0.387033 5.55112e-17 0.387033 0.387033
21 207 0.387033 5.55112e-17 0.387033 0.387033
22 209 0.387033 5.55112e-17 0.387033 0.387033
23 216 0.387033 5.55112e-17 0.387033 0.387033
24 206 0.387033 5.55112e-17 0.387033 0.387033
25 199 0.387033 5.55112e-17 0.387033 0.387033
26 198 0.387033 5.55112e-17 0.387033 0.387033
27 207 0.387033 5.55112e-17 0.387033 0.387033
28 203 0.387033 5.55112e-17 0.387033 0.387033
29 207 0.387033 5.55112e-17 0.387033 0.387033
30 203 0.387033 5.55112e-17 0.387033 0.387033
31 213 0.387033 5.55112e-17 0.387033 0.387033
32 215 0.387033 5.55112e-17 0.387033 0.387033
33 216 0.387033 5.55112e-17 0.387033 0.387033
34 211 0.387033 5.55112e-17 0.387033 0.387033
35 210 0.387033 5.55112e-17 0.387033 0.387033
36 196 0.387033 5.55112e-17 0.387033 0.387033
37 203 0.387033 5.55112e-17 0.387033 0.387033
38 215 0.387033 5.55112e-17 0.387033 0.387033
39 217 0.387033 5.55112e-17 0.387033 0.387033
40 210 0.387033 5.55112e-17 0.387033 0.387033
41 217 0.387033 5.55112e-17 0.387033 0.387033
42 202 0.387033 5.55112e-17 0.387033 0.387033
43 207 0.387033 5.55112e-17 0.387033 0.387033
44 207 0.387033 5.55112e-17 0.387033 0.387033
45 208 0.387033 5.55112e-17 0.387033 0.387033
46 223 0.387033 5.55112e-17 0.387033 0.387033
47 209 0.387033 5.55112e-17 0.387033 0.387033
48 210 0.387033 5.55112e-17 0.387033 0.387033
49 224 0.387033 5.55112e-17 0.387033 0.387033
50 206 0.387033 5.55112e-17 0.387033 0.387033
51 208 0.387033 5.55112e-17 0.387033 0.387033
52 214 0.387033 5.55112e-17 0.387033 0.387033
53 223 0.387033 5.55112e-17 0.387033 0.387033
54 209 0.387033 5.55112e-17 0.387033 0.387033
55 208 0.387033 5.55112e-17 0.387033 0.387033
56 203 0.387033 5.55112e-17 0.387033 0.387033
57 195 0.387033 5.55112e-17 0.387033 0.387033
58 212 0.387033 5.55112e-17 0.387033 0.387033
59 212 0.387033 5.55112e-17 0.387033 0.387033
60 204 0.387033 5.55112e-17 0.387033 0.387033
61 204 0.387033 5.55112e-17 0.387033 0.387033
62 204 0.387033 5.55112e-17 0.387033 0.387033
63 209 0.387033 5.55112e-17 0.387033 0.387033
64 194 0.387033 5.55112e-17 0.387033 0.387033
65 208 0.387033 5.55112e-17 0.387033 0.387033
66 209 0.387033 5.55112e-17 0.387033 0.387033
67 215 0.387033 5.55112e-17 0.387033 0.387033
68 199 0.387033 5.55112e-17 0.387033 0.387033
69 204 0.387033 5.55112e-17 0.387033 0.387033
70 203 0.387033 5.55112e-17 0.387033 0.387033
71 205 0.387033 5.55112e-17 0.387033 0.387033
72 212 0.387033 5.55112e-17 0.387033 0.387033
73 198 0.387033 5.55112e-17 0.387033 0.387033
74 199 0.387033 5.55112e-17 0.387033 0.387033
75 206 0.387033 5.55112e-17 0.387033 0.387033
76 212 0.387033 5.55112e-17 0.387033 0.387033
77 206 0.387033 5.55112e-17 0.387033 0.387033
78 208 0.387033 5.55112e-17 0.387033 0.387033
79 215 0.387033 5.55112e-17 0.387033 0.387033
80 219 0.387033 5.55112e-17 0.387033 0.387033
81 217 0.387033 5.55112e-17 0.387033 0.387033
82 202 0.387033 5.55112e-17 0.387033 0.387033
83 213 0.387033 5.55112e-17 0.387033 0.387033
84 220 0.387033 5.55112e-17 0.387033 0.387033
85 210 0.387033 5.55112e-17 0.387033 0.387033
86 199 0.387033 5.55112e-17 0.387033 0.387033
87 220 0.387033 5.55112e-17 0.387033 0.387033
88 216 0.387033 5.55112e-17 0.387033 0.387033
89 206 0.387033 5.55112e-17 0.387033 0.387033
90 209 0.387033 5.55112e-17 0.387033 0.387033
91 193 0.387033 5.55112e-17 0.387033 0.387033
92 215 0.387033 5.55112e-17 0.387033 0.387033
93 196 0.387033 5.55112e-17 0.387033 0.387033
94 218 0.387033 5.55112e-17 0.387033 0.387033
95 213 0.387033 5.55112e-17 0.387033 0.387033
96 199 0.387033 5.55112e-17 0.387033 0.387033
97 222 0.387033 5.55112e-17 0.387033 0.387033
98 205 0.387033 5.55112e-17 0.387033 0.387033
99 219 0.387033 5.55112e-17 0.387033 0.387033
100 212 0.387033 5.55112e-17 0.387033 0.387033
101 207 0.387033 5.55112e-17 0.387033 0.387033
102 196 0.387033 5.55112e-17 0.387033 0.387033
103 203 0.387033 5.55112e-17 0.387033 0.387033
104 202 0.387033 5.55112e-17 0.387033 0.387033
105 205 0.387033 5.55112e-17 0.387033 0.387033
106 211 0.387033 5.55112e-17 0.387033 0.387033
107 216 0.387033 5.55112e-17 0.387033 0.387033
108 211 0.387033 5.55112e-17 0.387033 0.387033
109 202 0.387033 5.55112e-17 0.387033 0.387033
110 216 0.387033 5.55112e-17 0.387033 0.387033
111 195 0.387033 5.55112e-17 0.387033 0.387033
112 206 0.387033 5.55112e-17 0.387033 0.387033
113 208 0.387033 5.55112e-17 0.387033 0.387033
114 196 0.387033 5.55112e-17 0.387033 0.387033
115 210 0.387033 5.55112e-17 0.387033 0.387033
116 199 0.387033 5.55112e-17 0.387033 0.387033
117 218 0.387033 5.55112e-17 0.387033 0.387033
118 206 0.387033 5.55112e-17 0.387033 0.387033
119 219 0.387033 5.55112e-17 0.387033 0.387033
120 208 0.387033 5.55112e-17 0.387033 0.387033
121 198 0.387033 5.55112e-17 0.387033 0.387033
122 206 0.387033 5.55112e-17 0.387033 0.387033
123 220 0.387033 5.55112e-17 0.387033 0.387033
124 215 0.387033 5.55112e-17 0.387033 0.387033
125 204 0.387033 5.55112e-17 0.387033 0.387033
126 209 0.387033 5.55112e-17 0.387033 0.387033
127 211 0.387033 5.55112e-17 0.387033 0.387033
128 232 0.387033 5.55112e-17 0.387033 0.387033
129 210 0.387033 5.55112e-17 0.387033 0.387033
130 205 0.387033 5.55112e-17 0.387033 0.387033
131 218 0.387033 5.55112e-17 0.387033 0.387033
132 225 0.387033 5.55112e-17 0.387033 0.387033
133 207 0.387033 5.55112e-17 0.387033 0.387033
134 213 0.387033 5.55112e-17 0.387033 0.387033
135 205 0.387033 5.55112e-17 0.387033 0.387033
136 210 0.387033 5.55112e-17 0.387033 0.387033
137 211 0.387033 5.55112e-17 0.387033 0.387033
138 203 0.387033 5.55112e-17 0.387033 0.387033
139 205 0.387033 5.55112e-17 0.387033 0.387033
140 212 0.387033 5.55112e-17 0.387033 0.387033
141 203 0.387033 5.55112e-17 0.387033 0.387033
142 204 0.387033 5.55112e-17 0.387033 0.387033
143 210 0.387033 5.55112e-17 0.387033 0.387033
144 212 0.387033 5.55112e-17 0.387033 0.387033
145 209 0.387033 5.55112e-17 0.387033 0.387033
146 201 0.387033 5.55112e-17 0.387033 0.387033
147 211 0.387033 5.55112e-17 0.387033 0.387033
148 209 0.387033 5.55112e-17 0.387033 0.387033
149 213 0.387033 5.55112e-17 0.387033 0.387033
150 213 0.387033 5.55112e-17 0.387033 0.387033
151 214 0.387033 5.55112e-17 0.387033 0.387033
152 220 0.387033 5.55112e-17 0.387033 0.387033
153 199 0.387033 5.55112e-17 0.387033 0.387033
154 190 0.387033 5.55112e-17 0.387033 0.387033
155 213 0.387033 5.55112e-17 0.387033 0.387033
156 203 0.387033 5.55112e-17 0.387033 0.387033
157 207 0.387033 5.55112e-17 0.387033 0.387033
158 205 0.387033 5.55112e-17 0.387033 0.387033
159 206 0.387033 5.55112e-17 0.387033 0.387033
160 203 0.387033 5.55112e-17 0.387033 0.387033
161 198 0.387033 5.55112e-17 0.387033 0.387033
162 232 0.387033 5.55112e-17 0.387033 0.387033
163 202 0.387033 5.55112e-17 0.387033 0.387033
164 191 0.387033 5.55112e-17 0.387033 0.387033
165 216 0.387033 5.55112e-17 0.387033 0.387033
166 218 0.387033 5.55112e-17 0.387033 0.387033
167 196 0.387033 5.55112e-17 0.387033 0.387033
168 206 0.387033 5.55112e-17 0.387033 0.387033
169 214 0.387033 5.55112e-17 0.387033 0.387033
170 218 0.387033 5.55112e-17 0.387033 0.387033
171 208 0.387033 5.55112e-17 0.387033 0.387033
172 218 0.387033 5.55112e-17 0.387033 0.387033
173 217 0.387033 5.55112e-17 0.387033 0.387033
174 204 0.387033 5.55112e-17 0.387033 0.387033
175 217 0.387033 5.55112e-17 0.387033 0.387033
176 207 0.387033 5.55112e-17 0.387033 0.387033
177 223 0.387033 5.55112e-17 0.387033 0.387033
178 214 0.387033 5.55112e-17 0.387033 0.387033
179 224 0.387033 5.55112e-17 0.387033 0.387033
180 207 0.387033 5.55112e-17 0.387033 0.387033
181 207 0.387033 5.55112e-17 0.387033 0.387033
182 202 0.387033 5.55112e-17 0.387033 0.387033
183 217 0.387033 5.55112e-17 0.387033 0.387033
184 209 0.387033 5.55112e-17 0.387033 0.387033
185 218 0.387033 5.55112e-17 0.387033 0.387033
186 211 0.387033 5.55112e-17 0.387033 0.387033
187 207 0.387033 5.55112e-17 0.387033 0.387033
188 215 0.387033 5.55112e-17 0.387033 0.387033
189 204 0.387033 5.55112e-17 0.387033 0.387033
190 212 0.387033 5.55112e-17 0.387033 0.387033
191 208 0.387033 5.55112e-17 0.387033 0.387033
192 219 0.387033 5.55112e-17 0.387033 0.387033
193 206 0.387033 5.55112e-17 0.387033 0.387033
194 203 0.387033 5.55112e-17 0.387033 0.387033
195 214 0.387033 5.55112e-17 0.387033 0.387033
196 204 0.387033 5.55112e-17 0.387033 0.387033
197 201 0.387033 5.55112e-17 0.387033 0.387033
198 218 0.387033 5.55112e-17 0.387033 0.387033
199 214 0.387033 5.55112e-17 0.387033 0.387033
200 205 0.387033 5.55112e-17 0.387033 0.387033
201 206 0.387033 5.55112e-17 0.387033 0.387033
202 215 0.387033 5.55112e-17 0.387033 0.387033
203 221 0.387033 5.55112e-17 0.387033 0.387033
204 210 0.387033 5.55112e-17 0.387033 0.387033
205 217 0.387033 5.55112e-17 0.387033 0.387033
206 200 0.387033 5.55112e-17 0.387033 0.387033
207 211 0.387033 5.55112e-17 0.387033 0.387033
208 216 0.387033 5.55112e-17 0.387033 0.387033
209 215 0.387033 5.55112e-17 0.387033 0.387033
210 221 0.387033 5.55112e-17 0.387033 0.387033
211 205 0.387033 5.55112e-17 0.387033 0.387033
212 218 0.387033 5.55112e-17 0.387033 0.387033
213 215 0.387033 5.55112e-17 0.387033 0.387033
214 214 0.387033 5.55112e-17 0.387033 0.387033
215 210 0.387033 5.55112e-17 0.387033 0.387033
216 224 0.387033 5.55112e-17 0.387033 0.387033
217 201 0.387033 5.55112e-17 0.387033 0.387033
218 223 0.387033 5.55112e-17 0.387033 0.387033
219 208 0.387033 5.55112e-17 0.387033 0.387033
220 218 0.387033 5.55112e-17 0.387033 0.387033
221 230 0.387033 5.55112e-17 0.387033 0.387033
222 228 0.387033 5.55112e-17 0.387033 0.387033
223 208 0.387033 5.55112e-17 0.387033 0.387033
224 207 0.387033 5.55112e-17 0.387033 0.387033
225 220 0.387033 5.55112e-17 0.387033 0.387033
226 225 0.387033 5.55112e-17 0.387033 0.387033
227 207 0.387033 5.55112e-17 0.387033 0.387033
228 215 0.387033 5.55112e-17 0.387033 0.387033
229 205 0.387033 5.55112e-17 0.387033 0.387033
230 203 0.387033 5.55112e-17 0.387033 0.387033
231 204 0.387033 5.55112e-17 0.387033 0.387033
232 212 0.387033 5.55112e-17 0.387033 0.387033
233 214 0.387033 5.55112e-17 0.387033 0.387033
234 207 0.387033 5.55112e-17 0.387033 0.387033
235 227 0.387033 5.55112e-17 0.387033 0.387033
236 220 0.387033 5.55112e-17 0.387033 0.387033
237 214 0.387033 5.55112e-17 0.387033 0.387033
238 222 0.387033 5.55112e-17 0.387033 0.387033
239 217 0.387033 5.55112e-17 0.387033 0.387033
240 205 0.387033 5.55112e-17 0.387033 0.387033
241 204 0.387033 5.55112e-17 0.387033 0.387033
242 217 0.387033 5.55112e-17 0.387033 0.387033
243 191 0.387033 5.55112e-17 0.387033 0.387033
244 218 0.387033 5.55112e-17 0.387033 0.387033
245 202 0.387033 5.55112e-17 0.387033 0.387033
246 218 0.387033 5.55112e-17 0.387033 0.387033
247 205 0.387033 5.55112e-17 0.387033 0.387033
248 204 0.387033 5.55112e-17 0.387033 0.387033
249 213 0.387033 5.55112e-17 0.387033 0.387033
250 210 0.387033 5.55112e-17 0.387033 0.387033
251 213 0.387033 5.55112e-17 0.387033 0.387033
252 198 0.387033 5.55112e-17 0.387033 0.387033
253 207 0.387033 5.55112e-17 0.387033 0.387033
254 209 0.387033 5.55112e-17 0.387033 0.387033
255 202 0.387033 5.55112e-17 0.387033 0.387033
256 212 0.387033 5.55112e-17 0.387033 0.387033
257 209 0.387033 5.55112e-17 0.387033 0.387033
258 221 0.387033 5.55112e-17 0.387033 0.387033
259 206 0.387033 5.55112e-17 0.387033 0.387033
260 198 0.387033 5.55112e-17 0.387033 0.387033
261 203 0.387033 5.55112e-17 0.387033 0.387033
262 216 0.387033 5.55112e-17 0.387033 0.387033
263 203 0.387033 5.55112e-17 0.387033 0.387033
264 198 0.387033 5.55112e-17 0.387033 0.387033
265 216 0.387033 5.55112e-17 0.387033 0.387033
266 211 0.387033 5.55112e-17 0.387033 0.387033
267 197 0.387033 5.55112e-17 0.387033 0.387033
268 220 0.387033 5.55112e-17 0.387033 0.387033
269 227 0.387033 5.55112e-17 0.387033 0.387033
270 200 0.387033 5.55112e-17 0.387033 0.387033
271 212 0.387033 5.55112e-17 0.387033 0.387033
272 197 0.387033 5.55112e-17 0.387033 0.387033
273 209 0.387033 5.55112e-17 0.387033 0.387033
274 200 0.387033 5.55112e-17 0.387033 0.387033
275 221 0.387033 5.55112e-17 0.387033 0.387033
276 196 0.387033 5.55112e-17 0.387033 0.387033
277 204 0.387033 5.55112e-17 0.387033 0.387033
278 214 0.387033 5.55112e-17 0.387033 0.387033
279 217 0.387033 5.55112e-17 0.387033 0.387033
280 213 0.387033 5.55112e-17 0.387033 0.387033
281 214 0.387033 5.55112e-17 0.387033 0.387033
282 210 0.387033 5.55112e-17 0.387033 0.387033
283 207 0.387033 5.55112e-17 0.387033 0.387033
284 193 0.387033 5.55112e-17 0.387033 0.387033
285 198 0.387033 5.55112e-17 0.387033 0.387033
286 204 0.387033 5.55112e-17 0.387033 0.387033
287 208 0.387033 5.55112e-17 0.387033 0.387033
288 202 0.387033 5.55112e-17 0.387033 0.387033
289 212 0.387033 5.55112e-17 0.387033 0.387033
290 209 0.387033 5.55112e-17 0.387033 0.387033
291 225 0.387033 5.55112e-17 0.387033 0.387033
292 227 0.387033 5.55112e-17 0.387033 0.387033
293 218 0.387033 5.55112e-17 0.387033 0.387033
294 213 0.387033 5.55112e-17 0.387033 0.387033
295 210 0.387033 5.55112e-17 0.387033 0.387033
296 199 0.387033 5.55112e-17 0.387033 0.387033
297 211 0.387033 5.55112e-17 0.387033 0.387033
298 208 0.387033 5.55112e-17 0.387033 0.387033
299 205 0.387033 5.55112e-17 0.387033 0.387033
300 220 0.387033 5.55112e-17 0.387033 0.387033
301 214 0.387033 5.55112e-17 0.387033 0.387033
302 197 0.387033 5.55112e-17 0.387033 0.387033
303 206 0.387033 5.55112e-17 0.387033 0.387033
304 212 0.387033 5.55112e-17 0.387033 0.387033
305 189 0.387033 5.55112e-17 0.387033 0.387033
306 215 0.387033 5.55112e-17 0.387033 0.387033
307 217 0.387033 5.55112e-17 0.387033 0.387033
308 201 0.387033 5.55112e-17 0.387033 0.387033
309 217 0.387033 5.55112e-17 0.387033 0.387033
310 207 0.387033 5.55112e-17 0.387033 0.387033
311 210 0.387033 5.55112e-17 0.387033 0.387033
312 202 0.387033 5.55112e-17 0.387033 0.387033
313 217 0.387033 5.55112e-17 0.387033 0.387033
314 210 0.387033 5.55112e-17 0.387033 0.387033
315 213 0.387033 5.55112e-17 0.387033 0.387033
316 208 0.387033 5.55112e-17 0.387033 0.387033
317 199 0.387033 5.55112e-17 0.387033 0.387033
318 206 0.387033 5.55112e-17 0.387033 0.387033
319 195 0.387033 5.55112e-17 0.387033 0.387033
320 197 0.387033 5.55112e-17 0.387033 0.387033
321 199 0.387033 5.55112e-17 0.387033 0.387033
322 209 0.387033 5.55112e-17 0.387033 0.387033
323 205 0.387033 5.55112e-17 0.387033 0.387033
324 203 0.387033 5.55112e-17 0.387033 0.387033
325 217 0.387033 5.55112e-17 0.387033 0.387033
326 226 0.387033 5.55112e-17 0.387033 0.387033
327 209 0.387033 5.55112e-17 0.387033 0.387033
328 206 0.387033 5.55112e-17 0.387033 0.387033
329 224 0.387033 5.55112e-17 0.387033 0.387033
330 197 0.387033 5.55112e-17 0.387033 0.387033
331 208 0.387033 5.55112e-17 0.387033 0.387033
332 219 0.387033 5.55112e-17 0.387033 0.387033
333 203 0.387033 5.55112e-17 0.387033 0.387033
334 206 0.387033 5.55112e-17 0.387033 0.387033
335 200 0.387033 5.55112e-17 0.387033 0.387033
336 199 0.387033 5.55112e-17 0.387033 0.387033
337 209 0.387033 5.55112e-17 0.387033 0.387033
338 194 0.387033 5.55112e-17 0.387033 0.387033
339 216 0.387033 5.55112e-17 0.387033 0.387033
340 203 0.387033 5.55112e-17 0.387033 0.387033
341 217 0.387033 5.55112e-17 0.387033 0.387033
342 215 0.387033 5.55112e-17 0.387033 0.387033
343 216 0.387033 5.55112e-17 0.387033 0.387033
344 208 0.387033 5.55112e-17 0.387033 0.387033
345 219 0.387033 5.55112e-17 0.387033 0.387033
346 214 0.387033 5.55112e-17 0.387033 0.387033
347 193 0.387033 5.55112e-17 0.387033 0.387033
348 210 0.387033 5.55112e-17 0.387033 0.387033
349 198 0.387033 5.55112e-17 0.387033 0.387033
350 215 0.387033 5.55112e-17 0.387033 0.387033
351 217 0.387033 5.55112e-17 0.387033 0.387033
352 216 0.387033 5.55112e-17 0.387033 0.387033
353 215 0.387033 5.55112e-17 0.387033 0.387033
354 204 0.387033 5.55112e-17 0.387033 0.387033
355 220 0.387033 5.55112e-17 0.387033 0.387033
356 212 0.387033 5.55112e-17 0.387033 0.387033
357 202 0.387033 5.55112e-17 0.387033 0.387033
358 209 0.387033 5.55112e-17 0.387033 0.387033
359 230 0.387033 5.55112e-17 0.387033 0.387033
360 215 0.387033 5.55112e-17 0.387033 0.387033
361 200 0.387033 5.55112e-17 0.387033 0.387033
362 207 0.387033 5.55112e-17 0.387033 0.387033
363 204 0.387033 5.55112e-17 0.387033 0.387033
364 203 0.387033 5.55112e-17 0.387033 0.387033
365 206 0.387033 5.55112e-17 0.387033 0.387033
366 213 0.387033 5.55112e-17 0.387033 0.387033
367 209 0.387033 5.55112e-17 0.387033 0.387033
368 207 0.387033 5.55112e-17 0.387033 0.387033
369 219 0.387033 5.55112e-17 0.387033 0.387033
370 217 0.387033 5.55112e-17 0.387033 0.387033
371 210 0.387033 5.55112e-17 0.387033 0.387033
372 216 0.387033 5.55112e-17 0.387033 0.387033
373 217 0.387033 5.55112e-17 0.387033 0.387033
374 219 0.387033 5.55112e-17 0.387033 0.387033
375 225 0.387033 5.55112e-17 0.387033 0.387033
376 222 0.387033 5.55112e-17 0.387033 0.387033
377 222 0.387033 5.55112e-17 0.387033 0.387033
378 200 0.387033 5.55112e-17 0.387033 0.387033
379 212 0.387033 5.55112e-17 0.387033 0.387033
380 210 0.387033 5.55112e-17 0.387033 0.387033
381 199 0.387033 5.55112e-17 0.387033 0.387033
382 222 0.387033 5.55112e-17 0.387033 0.387033
383 218 0.387033 5.55112e-17 0.387033 0.387033
384 202 0.387033 5.55112e-17 0.387033 0.387033
385 208 0.387033 5.55112e-17 0.387033 0.387033
386 211 0.387033 5.55112e-17 0.387033 0.387033
387 206 0.387033 5.55112e-17 0.387033 0.387033
388 216 0.387033 5.55112e-17 0.387033 0.387033
389 216 0.387033 5.55112e-17 0.387033 0.387033
390 204 0.387033 5.55112e-17 0.387033 0.387033
391 215 0.387033 5.55112e-17 0.387033 0.387033
392 219 0.387033 5.55112e-17 0.387033 0.387033
393 204 0.387033 5.55112e-17 0.387033 0.387033
394 211 0.387033 5.55112e-17 0.387033 0.387033
395 221 0.387033 5.55112e-17 0.387033 0.387033
396 205 0.387033 5.55112e-17 0.387033 0.387033
397 220 0.387033 5.55112e-17 0.387033 0.387033
398 206 0.387033 5.55112e-17 0.387033 0.387033
399 208 0.387033 5.55112e-17 0.387033 0.387033
400 206 0.387033 5.55112e-17 0.387033 0.387033
401 219 0.387033 5.55112e-17 0.387033 0.387033
402 213 0.387033 5.55112e-17 0.387033 0.387033
403 199 0.387033 5.55112e-17 0.387033 0.387033
404 219 0.387033 5.55112e-17 0.387033 0.387033
405 211 0.387033 5.55112e-17 0.387033 0.387033
406 204 0.387033 5.55112e-17 0.387033 0.387033
407 211 0.387033 5.55112e-17 0.387033 0.387033
408 202 0.387033 5.55112e-17 0.387033 0.387033
409 213 0.387033 5.55112e-17 0.387033 0.387033
410 215 0.387033 5.55112e-17 0.387033 0.387033
411 203 0.387033 5.55112e-17 0.387033 0.387033
412 213 0.387033 5.55112e-17 0.387033 0.387033
413 209 0.387033 5.55112e-17 0.387033 0.387033
414 219 0.387033 5.55112e-17 0.387033 0.387033
415 218 0.387033 5.55112e-17 0.387033 0.387033
416 227 0.387033 5.55112e-17 0.387033 0.387033
417 212 0.387033 5.55112e-17 0.387033 0.387033
418 216 0.387033 5.55112e-17 0.387033 0.387033
419 203 0.387033 5.55112e-17 0.387033 0.387033
420 208 0.387033 5.55112e-17 0.387033 0.387033
421 206 0.387033 5.55112e-17 0.387033 0.387033
422 199 0.387033 5.55112e-17 0.387033 0.387033
423 207 0.387033 5.55112e-17 0.387033 0.387033
424 209 0.387033 5.55112e-17 0.387033 0.387033
425 227 0.387033 5.55112e-17 0.387033 0.387033
426 217 0.387033 5.55112e-17 0.387033 0.387033
427 221 0.387033 5.55112e-17 0.387033 0.387033
428 206 0.387033 5.55112e-17 0.387033 0.387033
429 203 0.387033 5.55112e-17 0.387033 0.387033
430 221 0.387033 5.55112e-17 0.387033 0.387033
431 205 0.387033 5.55112e-17 0.387033 0.387033
432 199 0.387033 5.55112e-17 0.387033 0.387033
433 206 0.387033 5.55112e-17 0.387033 0.387033
434 208 0.387033 5.55112e-17 0.387033 0.387033
435 216 0.387033 5.55112e-17 0.387033 0.387033
436 215 0.387033 5.55112e-17 0.387033 0.387033
437 197 0.387033 5.55112e-17 0.387033 0.387033
438 211 0.387033 5.55112e-17 0.387033 0.387033
439 207 0.387033 5.55112e-17 0.387033 0.387033
440 228 0.387033 5.55112e-17 0.387033 0.387033
441 221 0.387033 5.55112e-17 0.387033 0.387033
442 214 0.387033 5.55112e-17 0.387033 0.387033
443 213 0.387033 5.55112e-17 0.387033 0.387033
444 224 0.387033 5.55112e-17 0.387033 0.387033
445 215 0.387033 5.55112e-17 0.387033 0.387033
446 217 0.387033 5.55112e-17 0.387033 0.387033
447 198 0.387033 5.55112e-17 0.387033 0.387033
448 217 0.387033 5.55112e-17 0.387033 0.387033
449 214 0.387033 5.55112e-17 0.387033 0.387033
450 198 0.387033 5.55112e-17 0.387033 0.387033
451 201 0.387033 5.55112e-17 0.387033 0.387033
452 219 0.387033 5.55112e-17 0.387033 0.387033
453 205 0.387033 5.55112e-17 0.387033 0.387033
454 209 0.387033 5.55112e-17 0.387033 0.387033
455 224 0.387033 5.55112e-17 0.387033 0.387033
456 197 0.387033 5.55112e-17 0.387033 0.387033
457 209 0.387033 5.55112e-17 0.387033 0.387033
458 212 0.387033 5.55112e-17 0.387033 0.387033
459 221 0.387033 5.55112e-17 0.387033 0.387033
460 203 0.387033 5.55112e-17 0.387033 0.387033
461 214 0.387033 5.55112e-17 0.387033 0.387033
462 217 0.387033 5.55112e-17 0.387033 0.387033
463 204 0.387033 5.55112e-17 0.387033 0.387033
464 205 0.387033 5.55112e-17 0.387033 0.387033
465 209 0.387033 5.55112e-17 0.387033 0.387033
466 204 0.387033 5.55112e-17 0.387033 0.387033
467 201 0.387033 5.55112e-17 0.387033 0.387033
468 219 0.387033 5.55112e-17 0.387033 0.387033
469 221 0.387033 5.55112e-17 0.387033 0.387033
470 205 0.387033 5.55112e-17 0.387033 0.387033
471 199 0.387033 5.55112e-17 0.387033 0.387033
472 223 0.387033 5.55112e-17 0.387033 0.387033
473 211 0.387033 5.55112e-17 0.387033 0.387033
474 223 0.387033 5.55112e-17 0.387033 0.387033
475 215 0.387033 5.55112e-17 0.387033 0.387033
476 208 0.387033 5.55112e-17 0.387033 0.387033
477 209 0.387033 5.55112e-17 0.387033 0.387033
478 224 0.387033 5.55112e-17 0.387033 0.387033
479 214 0.387033 5.55112e-17 0.387033 0.387033
480 217 0.387033 5.55112e-17 0.387033 0.387033
481 199 0.387033 5.55112e-17 0.387033 0.387033
482 203 0.387033 5.55112e-17 0.387033 0.387033
483 208 0.387033 5.55112e-17 0.387033 0.387033
484 214 0.387033 5.55112e-17 0.387033 0.387033
485 212 0.387033 5.55112e-17 0.387033 0.387033
486 214 0.387033 5.55112e-17 0.387033 0.387033
487 211 0.387033 5.55112e-17 0.387033 0.387033
488 206 0.387033 5.55112e-17 0.387033 0.387033
489 219 0.387033 5.55112e-17 0.387033 0.387033
490 203 0.387033 5.55112e-17 0.387033 0.387033
491 202 0.387033 5.55112e-17 0.387033 0.387033
492 211 0.387033 5.55112e-17 0.387033 0.387033
493 222 0.387033 5.55112e-17 0.387033 0.387033
494 197 0.387033 5.55112e-17 0.387033 0.387033
495 221 0.387033 5.55112e-17 0.387033 0.387033
496 205 0.387033 5.55112e-17 0.387033 0.387033
497 217 0.387033 5.55112e-17 0.387033 0.387033
498 220 0.387033 5.55112e-17 0.387033 0.387033
499 211 0.387033 5.55112e-17 0.387033 0.387033
500 222 0.387033 5.55112e-17 0.387033 0.387033
In [106]:
hof3 = tools.HallOfFame(10)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", np.mean)
stats.register("std", np.std)
stats.register("min", np.min)
stats.register("max", np.max)
pop3, log = algorithms.eaMuPlusLambda(toolbox.clone(randpop+initpop), toolbox, mu=300, lambda_=300, cxpb=0.5, mutpb=0.2, ngen=500,
stats=stats, halloffame=hof3, verbose=True)
gen nevals avg std min max
0 3885 0.268636 0.139677 -0.104469 0.43835
1 206 0.375477 0.0659599 0.0749015 0.43835
2 201 0.413691 0.0193256 0.337176 0.434914
3 198 0.427223 0.00898158 0.372444 0.434914
4 206 0.432788 0.00390736 0.41105 0.434914
5 210 0.434769 0.000959089 0.427952 0.435391
6 209 0.434919 4.75056e-05 0.434914 0.435391
7 221 0.43494 0.000193896 0.434914 0.436771
8 200 0.435027 0.000419591 0.434914 0.436771
9 211 0.435281 0.00071803 0.434914 0.436771
10 218 0.43567 0.000869872 0.434914 0.436771
11 199 0.436357 0.000736157 0.434914 0.437462
12 199 0.436773 0.000231465 0.434914 0.437462
13 218 0.436838 0.000163846 0.436771 0.437462
14 211 0.43692 0.000228591 0.436771 0.43794
15 216 0.437081 0.000251408 0.436771 0.43794
16 203 0.437316 0.00022871 0.436771 0.43794
17 217 0.437527 0.000491331 0.430339 0.43794
18 210 0.437802 0.00022298 0.437249 0.438165
19 199 0.437957 0.000115849 0.437462 0.438165
20 197 0.43803 0.000106252 0.43794 0.438165
21 211 0.438121 8.2642e-05 0.43794 0.438258
22 198 0.438157 9.11228e-05 0.436646 0.438258
23 210 0.438169 8.07154e-05 0.436986 0.438484
24 216 0.438186 6.78509e-05 0.438165 0.438484
25 205 0.438226 0.000110696 0.438165 0.438484
26 220 0.438306 0.00013851 0.438165 0.438484
27 211 0.438419 0.000110646 0.438165 0.438484
28 210 0.438476 4.10786e-05 0.438258 0.438484
29 225 0.438484 1.11022e-16 0.438484 0.438484
30 217 0.438484 1.11022e-16 0.438484 0.438484
31 218 0.438484 1.11022e-16 0.438484 0.438484
32 210 0.438484 1.11022e-16 0.438484 0.438484
33 198 0.438484 1.11022e-16 0.438484 0.438484
34 211 0.438484 1.11022e-16 0.438484 0.438484
35 217 0.438484 1.11022e-16 0.438484 0.438484
36 216 0.438484 1.11022e-16 0.438484 0.438484
37 208 0.438484 1.11022e-16 0.438484 0.438484
38 209 0.438484 1.11022e-16 0.438484 0.438484
39 212 0.438484 1.11022e-16 0.438484 0.438484
40 220 0.438484 1.11022e-16 0.438484 0.438484
41 196 0.438484 1.11022e-16 0.438484 0.438484
42 217 0.438484 1.11022e-16 0.438484 0.438484
43 201 0.438484 1.11022e-16 0.438484 0.438484
44 203 0.438484 1.11022e-16 0.438484 0.438484
45 207 0.438484 1.11022e-16 0.438484 0.438484
46 211 0.438484 1.11022e-16 0.438484 0.438484
47 205 0.438484 1.11022e-16 0.438484 0.438484
48 214 0.438484 1.11022e-16 0.438484 0.438484
49 208 0.438484 1.11022e-16 0.438484 0.438484
50 202 0.438484 1.11022e-16 0.438484 0.438484
51 200 0.438484 1.11022e-16 0.438484 0.438484
52 202 0.438484 1.11022e-16 0.438484 0.438484
53 216 0.438484 1.11022e-16 0.438484 0.438484
54 198 0.438484 1.11022e-16 0.438484 0.438484
55 214 0.438484 1.11022e-16 0.438484 0.438484
56 206 0.438484 1.11022e-16 0.438484 0.438484
57 225 0.438484 1.11022e-16 0.438484 0.438484
58 208 0.438484 1.11022e-16 0.438484 0.438484
59 215 0.438484 1.11022e-16 0.438484 0.438484
60 208 0.438484 1.11022e-16 0.438484 0.438484
61 205 0.438484 1.11022e-16 0.438484 0.438484
62 199 0.438484 1.11022e-16 0.438484 0.438484
63 207 0.438484 1.11022e-16 0.438484 0.438484
64 227 0.438461 0.000397252 0.431591 0.438484
65 197 0.438484 1.11022e-16 0.438484 0.438484
66 205 0.438484 1.11022e-16 0.438484 0.438484
67 208 0.438484 1.11022e-16 0.438484 0.438484
68 209 0.438484 1.11022e-16 0.438484 0.438484
69 212 0.438484 1.11022e-16 0.438484 0.438484
70 209 0.438484 1.11022e-16 0.438484 0.438484
71 214 0.438484 1.11022e-16 0.438484 0.438484
72 215 0.438484 1.11022e-16 0.438484 0.438484
73 217 0.438484 1.11022e-16 0.438484 0.438484
74 207 0.438484 1.11022e-16 0.438484 0.438484
75 196 0.438484 1.11022e-16 0.438484 0.438484
76 201 0.438484 1.11022e-16 0.438484 0.438484
77 228 0.438484 1.11022e-16 0.438484 0.438484
78 216 0.438484 1.11022e-16 0.438484 0.438484
79 213 0.438484 1.11022e-16 0.438484 0.438484
80 219 0.438484 1.11022e-16 0.438484 0.438484
81 185 0.438484 1.11022e-16 0.438484 0.438484
82 218 0.438358 0.00217697 0.400714 0.438484
83 202 0.438484 1.11022e-16 0.438484 0.438484
84 204 0.438484 1.11022e-16 0.438484 0.438484
85 209 0.438484 1.11022e-16 0.438484 0.438484
86 211 0.438484 1.11022e-16 0.438484 0.438484
87 211 0.438484 1.11022e-16 0.438484 0.438484
88 200 0.438484 1.11022e-16 0.438484 0.438484
89 201 0.438484 1.11022e-16 0.438484 0.438484
90 200 0.438484 1.11022e-16 0.438484 0.438484
91 219 0.438484 1.11022e-16 0.438484 0.438484
92 196 0.438484 1.11022e-16 0.438484 0.438484
93 203 0.438484 1.11022e-16 0.438484 0.438484
94 197 0.438484 1.11022e-16 0.438484 0.438484
95 201 0.438484 1.11022e-16 0.438484 0.438484
96 205 0.438484 1.11022e-16 0.438484 0.438484
97 198 0.438484 1.11022e-16 0.438484 0.438484
98 218 0.438484 1.11022e-16 0.438484 0.438484
99 202 0.438484 1.11022e-16 0.438484 0.438484
100 205 0.438484 1.11022e-16 0.438484 0.438484
101 197 0.438484 1.11022e-16 0.438484 0.438484
102 209 0.438484 1.11022e-16 0.438484 0.438484
103 211 0.438484 1.11022e-16 0.438484 0.438484
104 204 0.438484 1.11022e-16 0.438484 0.438484
105 222 0.438484 1.11022e-16 0.438484 0.438484
106 212 0.438484 1.11022e-16 0.438484 0.438484
107 196 0.438484 1.11022e-16 0.438484 0.438484
108 218 0.438484 1.11022e-16 0.438484 0.438484
109 224 0.438484 1.11022e-16 0.438484 0.438484
110 210 0.438484 1.11022e-16 0.438484 0.438484
111 219 0.438484 1.11022e-16 0.438484 0.438484
112 211 0.438484 1.11022e-16 0.438484 0.438484
113 206 0.438484 1.11022e-16 0.438484 0.438484
114 223 0.438478 0.000103486 0.436688 0.438484
115 220 0.438484 1.11022e-16 0.438484 0.438484
116 215 0.438484 1.11022e-16 0.438484 0.438484
117 206 0.438484 1.11022e-16 0.438484 0.438484
118 214 0.438484 1.11022e-16 0.438484 0.438484
119 215 0.438484 1.11022e-16 0.438484 0.438484
120 215 0.438484 1.11022e-16 0.438484 0.438484
121 213 0.438484 1.11022e-16 0.438484 0.438484
122 209 0.438484 1.11022e-16 0.438484 0.438484
123 210 0.438484 1.11022e-16 0.438484 0.438484
124 215 0.438484 1.11022e-16 0.438484 0.438484
125 208 0.438484 1.11022e-16 0.438484 0.438484
126 190 0.438484 1.11022e-16 0.438484 0.438484
127 207 0.438484 1.11022e-16 0.438484 0.438484
128 202 0.438484 1.11022e-16 0.438484 0.438484
129 215 0.438484 1.11022e-16 0.438484 0.438484
130 209 0.438484 1.11022e-16 0.438484 0.438484
131 205 0.438484 1.11022e-16 0.438484 0.438484
132 210 0.438484 1.11022e-16 0.438484 0.438484
133 203 0.438484 1.11022e-16 0.438484 0.438484
134 207 0.438484 1.11022e-16 0.438484 0.438484
135 220 0.438484 1.11022e-16 0.438484 0.438484
136 194 0.438484 1.11022e-16 0.438484 0.438484
137 199 0.438484 1.11022e-16 0.438484 0.438484
138 218 0.438484 1.11022e-16 0.438484 0.438484
139 214 0.438484 1.11022e-16 0.438484 0.438484
140 215 0.438484 1.11022e-16 0.438484 0.438484
141 223 0.438484 1.11022e-16 0.438484 0.438484
142 216 0.438484 1.11022e-16 0.438484 0.438484
143 200 0.438484 1.11022e-16 0.438484 0.438484
144 205 0.438484 1.11022e-16 0.438484 0.438484
145 205 0.438484 1.11022e-16 0.438484 0.438484
146 212 0.438484 1.11022e-16 0.438484 0.438484
147 212 0.438484 1.11022e-16 0.438484 0.438484
148 218 0.438484 1.11022e-16 0.438484 0.438484
149 218 0.438484 1.11022e-16 0.438484 0.438484
150 210 0.438484 1.11022e-16 0.438484 0.438484
151 218 0.438484 1.11022e-16 0.438484 0.438484
152 203 0.438484 1.11022e-16 0.438484 0.438484
153 199 0.438484 1.11022e-16 0.438484 0.438484
154 223 0.438484 1.11022e-16 0.438484 0.438484
155 199 0.438484 1.11022e-16 0.438484 0.438484
156 215 0.438484 1.11022e-16 0.438484 0.438484
157 208 0.438484 1.11022e-16 0.438484 0.438484
158 226 0.438484 1.11022e-16 0.438484 0.438484
159 209 0.438484 1.11022e-16 0.438484 0.438484
160 211 0.438484 1.11022e-16 0.438484 0.438484
161 209 0.438484 1.11022e-16 0.438484 0.438484
162 216 0.438484 1.11022e-16 0.438484 0.438484
163 199 0.438484 1.11022e-16 0.438484 0.438484
164 214 0.438484 1.11022e-16 0.438484 0.438484
165 208 0.438484 1.11022e-16 0.438484 0.438484
166 206 0.438484 1.11022e-16 0.438484 0.438484
167 219 0.438484 1.11022e-16 0.438484 0.438484
168 204 0.438484 1.11022e-16 0.438484 0.438484
169 201 0.438484 1.11022e-16 0.438484 0.438484
170 214 0.438484 1.11022e-16 0.438484 0.438484
171 217 0.438484 1.11022e-16 0.438484 0.438484
172 215 0.437759 0.0125237 0.221204 0.438484
173 205 0.438484 1.11022e-16 0.438484 0.438484
174 214 0.438484 1.11022e-16 0.438484 0.438484
175 215 0.438484 1.11022e-16 0.438484 0.438484
176 212 0.438484 1.11022e-16 0.438484 0.438484
177 205 0.438484 1.11022e-16 0.438484 0.438484
178 226 0.438484 1.11022e-16 0.438484 0.438484
179 211 0.438484 1.11022e-16 0.438484 0.438484
180 201 0.438484 1.11022e-16 0.438484 0.438484
181 210 0.438484 1.11022e-16 0.438484 0.438484
182 210 0.438484 1.11022e-16 0.438484 0.438484
183 215 0.438484 1.11022e-16 0.438484 0.438484
184 204 0.438484 1.11022e-16 0.438484 0.438484
185 209 0.438484 1.11022e-16 0.438484 0.438484
186 202 0.438484 1.11022e-16 0.438484 0.438484
187 207 0.438484 1.11022e-16 0.438484 0.438484
188 207 0.438484 1.11022e-16 0.438484 0.438484
189 201 0.438484 1.11022e-16 0.438484 0.438484
190 222 0.438484 1.11022e-16 0.438484 0.438484
191 225 0.438484 1.11022e-16 0.438484 0.438484
192 193 0.438484 1.11022e-16 0.438484 0.438484
193 220 0.438484 1.11022e-16 0.438484 0.438484
194 196 0.438484 1.11022e-16 0.438484 0.438484
195 228 0.438484 1.11022e-16 0.438484 0.438484
196 211 0.438484 1.11022e-16 0.438484 0.438484
197 212 0.438484 1.11022e-16 0.438484 0.438484
198 193 0.438484 1.11022e-16 0.438484 0.438484
199 220 0.438484 1.11022e-16 0.438484 0.438484
200 223 0.438484 1.11022e-16 0.438484 0.438484
201 215 0.438484 1.11022e-16 0.438484 0.438484
202 211 0.438484 1.11022e-16 0.438484 0.438484
203 207 0.438484 1.11022e-16 0.438484 0.438484
204 208 0.438484 1.11022e-16 0.438484 0.438484
205 214 0.438484 1.11022e-16 0.438484 0.438484
206 200 0.438484 1.11022e-16 0.438484 0.438484
207 192 0.438484 1.11022e-16 0.438484 0.438484
208 209 0.438484 1.11022e-16 0.438484 0.438484
209 213 0.438484 1.11022e-16 0.438484 0.438484
210 213 0.438484 1.11022e-16 0.438484 0.438484
211 207 0.438484 1.11022e-16 0.438484 0.438484
212 227 0.438484 1.11022e-16 0.438484 0.438484
213 219 0.438484 1.11022e-16 0.438484 0.438484
214 208 0.438484 1.11022e-16 0.438484 0.438484
215 208 0.438476 0.000128027 0.436262 0.438484
216 224 0.438484 1.11022e-16 0.438484 0.438484
217 206 0.438484 1.11022e-16 0.438484 0.438484
218 208 0.438484 1.11022e-16 0.438484 0.438484
219 207 0.438484 1.11022e-16 0.438484 0.438484
220 216 0.438484 1.11022e-16 0.438484 0.438484
221 210 0.438484 1.11022e-16 0.438484 0.438484
222 209 0.438484 1.11022e-16 0.438484 0.438484
223 203 0.438484 1.11022e-16 0.438484 0.438484
224 211 0.438484 1.11022e-16 0.438484 0.438484
225 203 0.438484 1.11022e-16 0.438484 0.438484
226 219 0.438484 1.11022e-16 0.438484 0.438484
227 206 0.438484 1.11022e-16 0.438484 0.438484
228 213 0.438484 1.11022e-16 0.438484 0.438484
229 210 0.438484 1.11022e-16 0.438484 0.438484
230 211 0.438484 1.11022e-16 0.438484 0.438484
231 202 0.438484 1.11022e-16 0.438484 0.438484
232 201 0.438484 1.11022e-16 0.438484 0.438484
233 220 0.438484 1.11022e-16 0.438484 0.438484
234 209 0.438484 1.11022e-16 0.438484 0.438484
235 219 0.438484 1.11022e-16 0.438484 0.438484
236 210 0.438484 1.11022e-16 0.438484 0.438484
237 201 0.438484 1.11022e-16 0.438484 0.438484
238 216 0.438484 1.11022e-16 0.438484 0.438484
239 220 0.438484 1.11022e-16 0.438484 0.438484
240 205 0.438484 1.11022e-16 0.438484 0.438484
241 200 0.438484 1.11022e-16 0.438484 0.438484
242 218 0.438484 1.11022e-16 0.438484 0.438484
243 216 0.438484 1.11022e-16 0.438484 0.438484
244 213 0.438484 1.11022e-16 0.438484 0.438484
245 220 0.438484 1.11022e-16 0.438484 0.438484
246 214 0.438484 1.11022e-16 0.438484 0.438484
247 207 0.438484 1.11022e-16 0.438484 0.438484
248 231 0.43848 6.39194e-05 0.437375 0.438484
249 201 0.438484 1.11022e-16 0.438484 0.438484
250 215 0.43823 0.00437692 0.362546 0.438484
251 206 0.438484 1.11022e-16 0.438484 0.438484
252 217 0.438484 1.11022e-16 0.438484 0.438484
253 218 0.438484 1.11022e-16 0.438484 0.438484
254 210 0.438484 1.11022e-16 0.438484 0.438484
255 205 0.438484 1.11022e-16 0.438484 0.438484
256 210 0.438484 1.11022e-16 0.438484 0.438484
257 218 0.438484 1.11022e-16 0.438484 0.438484
258 223 0.438484 1.11022e-16 0.438484 0.438484
259 208 0.438484 1.11022e-16 0.438484 0.438484
260 211 0.438484 1.11022e-16 0.438484 0.438484
261 216 0.438484 1.11022e-16 0.438484 0.438484
262 201 0.438468 0.000264851 0.433889 0.438484
263 212 0.438484 1.11022e-16 0.438484 0.438484
264 206 0.438484 1.11022e-16 0.438484 0.438484
265 213 0.438484 1.11022e-16 0.438484 0.438484
266 216 0.438484 1.11022e-16 0.438484 0.438484
267 220 0.438484 1.11022e-16 0.438484 0.438484
268 205 0.438484 1.11022e-16 0.438484 0.438484
269 216 0.438484 1.11022e-16 0.438484 0.438484
270 211 0.438484 1.11022e-16 0.438484 0.438484
271 212 0.438484 1.11022e-16 0.438484 0.438484
272 214 0.438484 1.11022e-16 0.438484 0.438484
273 209 0.438484 1.11022e-16 0.438484 0.438484
274 199 0.438484 1.11022e-16 0.438484 0.438484
275 205 0.438484 1.11022e-16 0.438484 0.438484
276 211 0.438222 0.00444812 0.361316 0.438484
277 219 0.438478 8.75521e-05 0.436965 0.438484
278 202 0.438484 1.11022e-16 0.438484 0.438484
279 204 0.438484 1.11022e-16 0.438484 0.438484
280 200 0.438484 1.11022e-16 0.438484 0.438484
281 222 0.438484 1.11022e-16 0.438484 0.438484
282 215 0.438484 1.11022e-16 0.438484 0.438484
283 211 0.438484 1.11022e-16 0.438484 0.438484
284 216 0.438484 1.11022e-16 0.438484 0.438484
285 202 0.438484 1.11022e-16 0.438484 0.438484
286 213 0.438484 1.11022e-16 0.438484 0.438484
287 204 0.438484 1.11022e-16 0.438484 0.438484
288 203 0.438469 0.000254013 0.434077 0.438484
289 209 0.438484 1.11022e-16 0.438484 0.438484
290 210 0.438484 1.11022e-16 0.438484 0.438484
291 212 0.438474 0.000161229 0.435686 0.438484
292 216 0.438484 1.11022e-16 0.438484 0.438484
293 202 0.438484 1.11022e-16 0.438484 0.438484
294 217 0.438484 1.11022e-16 0.438484 0.438484
295 213 0.438484 1.11022e-16 0.438484 0.438484
296 211 0.438484 1.11022e-16 0.438484 0.438484
297 207 0.438484 1.11022e-16 0.438484 0.438484
298 215 0.438484 1.11022e-16 0.438484 0.438484
299 210 0.438484 1.11022e-16 0.438484 0.438484
300 208 0.438484 1.11022e-16 0.438484 0.438484
301 202 0.438484 1.11022e-16 0.438484 0.438484
302 214 0.43823 0.00437692 0.362546 0.438484
303 204 0.438484 1.11022e-16 0.438484 0.438484
304 216 0.438483 7.70767e-06 0.43835 0.438484
305 221 0.438482 3.52272e-05 0.437872 0.438484
306 208 0.438484 1.11022e-16 0.438484 0.438484
307 203 0.438481 3.98341e-05 0.437792 0.438484
308 199 0.438484 1.11022e-16 0.438484 0.438484
309 213 0.438484 1.11022e-16 0.438484 0.438484
310 209 0.438484 1.11022e-16 0.438484 0.438484
311 203 0.438484 1.11022e-16 0.438484 0.438484
312 210 0.438484 1.11022e-16 0.438484 0.438484
313 217 0.437673 0.0140128 0.19537 0.438484
314 214 0.438484 1.11022e-16 0.438484 0.438484
315 202 0.438484 1.11022e-16 0.438484 0.438484
316 215 0.438484 1.11022e-16 0.438484 0.438484
317 218 0.438484 1.11022e-16 0.438484 0.438484
318 211 0.438484 1.11022e-16 0.438484 0.438484
319 208 0.438484 1.11022e-16 0.438484 0.438484
320 214 0.438484 1.11022e-16 0.438484 0.438484
321 214 0.438465 0.000313031 0.433053 0.438484
322 197 0.438484 1.11022e-16 0.438484 0.438484
323 214 0.438484 1.11022e-16 0.438484 0.438484
324 195 0.438484 1.11022e-16 0.438484 0.438484
325 218 0.438484 1.11022e-16 0.438484 0.438484
326 221 0.438484 1.11022e-16 0.438484 0.438484
327 210 0.438484 1.11022e-16 0.438484 0.438484
328 204 0.438484 1.11022e-16 0.438484 0.438484
329 197 0.438484 1.11022e-16 0.438484 0.438484
330 209 0.438484 1.11022e-16 0.438484 0.438484
331 215 0.438471 0.000214997 0.434753 0.438484
332 195 0.438484 1.11022e-16 0.438484 0.438484
333 207 0.438484 1.11022e-16 0.438484 0.438484
334 212 0.438484 1.11022e-16 0.438484 0.438484
335 219 0.438484 1.11022e-16 0.438484 0.438484
336 204 0.438484 1.11022e-16 0.438484 0.438484
337 215 0.438452 0.000390339 0.433344 0.438484
338 216 0.438484 1.11022e-16 0.438484 0.438484
339 214 0.438484 1.11022e-16 0.438484 0.438484
340 208 0.438484 1.11022e-16 0.438484 0.438484
341 215 0.438484 1.11022e-16 0.438484 0.438484
342 221 0.438484 1.11022e-16 0.438484 0.438484
343 224 0.438484 1.11022e-16 0.438484 0.438484
344 233 0.438484 1.11022e-16 0.438484 0.438484
345 207 0.438484 1.11022e-16 0.438484 0.438484
346 199 0.438484 1.11022e-16 0.438484 0.438484
347 212 0.438484 1.11022e-16 0.438484 0.438484
348 210 0.438484 1.11022e-16 0.438484 0.438484
349 224 0.438484 1.11022e-16 0.438484 0.438484
350 202 0.438484 1.11022e-16 0.438484 0.438484
351 210 0.438484 1.11022e-16 0.438484 0.438484
352 203 0.438484 1.11022e-16 0.438484 0.438484
353 224 0.438484 1.11022e-16 0.438484 0.438484
354 223 0.438448 0.000440377 0.432827 0.438484
355 195 0.438484 1.11022e-16 0.438484 0.438484
356 209 0.438484 1.11022e-16 0.438484 0.438484
357 216 0.438484 1.11022e-16 0.438484 0.438484
358 216 0.438484 1.11022e-16 0.438484 0.438484
359 205 0.438484 1.11022e-16 0.438484 0.438484
360 214 0.438484 1.11022e-16 0.438484 0.438484
361 219 0.438484 1.11022e-16 0.438484 0.438484
362 219 0.438479 7.64833e-05 0.437157 0.438484
363 218 0.438484 1.11022e-16 0.438484 0.438484
364 207 0.438484 1.11022e-16 0.438484 0.438484
365 214 0.437883 0.0103761 0.258465 0.438484
366 197 0.438484 1.11022e-16 0.438484 0.438484
367 225 0.438484 1.11022e-16 0.438484 0.438484
368 221 0.438484 1.11022e-16 0.438484 0.438484
369 200 0.438484 1.11022e-16 0.438484 0.438484
370 203 0.438484 1.11022e-16 0.438484 0.438484
371 216 0.438484 1.11022e-16 0.438484 0.438484
372 208 0.438484 1.11022e-16 0.438484 0.438484
373 220 0.438484 1.11022e-16 0.438484 0.438484
374 218 0.438484 1.11022e-16 0.438484 0.438484
375 209 0.438484 1.11022e-16 0.438484 0.438484
376 208 0.438484 1.11022e-16 0.438484 0.438484
377 216 0.438484 1.11022e-16 0.438484 0.438484
378 203 0.438484 1.11022e-16 0.438484 0.438484
379 219 0.438484 1.11022e-16 0.438484 0.438484
380 207 0.438484 1.11022e-16 0.438484 0.438484
381 200 0.438484 1.11022e-16 0.438484 0.438484
382 213 0.438484 1.11022e-16 0.438484 0.438484
383 215 0.438484 1.11022e-16 0.438484 0.438484
384 212 0.438484 1.11022e-16 0.438484 0.438484
385 205 0.438484 1.11022e-16 0.438484 0.438484
386 225 0.438484 1.11022e-16 0.438484 0.438484
387 214 0.438484 1.11022e-16 0.438484 0.438484
388 215 0.438484 1.11022e-16 0.438484 0.438484
389 213 0.438484 1.11022e-16 0.438484 0.438484
390 216 0.438484 1.11022e-16 0.438484 0.438484
391 196 0.438484 1.11022e-16 0.438484 0.438484
392 224 0.438484 1.11022e-16 0.438484 0.438484
393 209 0.438484 1.11022e-16 0.438484 0.438484
394 223 0.438484 1.11022e-16 0.438484 0.438484
395 207 0.438484 1.11022e-16 0.438484 0.438484
396 204 0.438484 1.11022e-16 0.438484 0.438484
397 211 0.438484 1.11022e-16 0.438484 0.438484
398 213 0.438484 1.11022e-16 0.438484 0.438484
399 205 0.438484 1.11022e-16 0.438484 0.438484
400 215 0.438484 1.11022e-16 0.438484 0.438484
401 227 0.438478 8.75521e-05 0.436965 0.438484
402 208 0.438484 1.11022e-16 0.438484 0.438484
403 206 0.438484 1.11022e-16 0.438484 0.438484
404 218 0.438484 1.11022e-16 0.438484 0.438484
405 218 0.438484 1.11022e-16 0.438484 0.438484
406 210 0.438484 1.11022e-16 0.438484 0.438484
407 205 0.438484 1.11022e-16 0.438484 0.438484
408 212 0.438484 1.11022e-16 0.438484 0.438484
409 211 0.438484 1.11022e-16 0.438484 0.438484
410 207 0.438484 1.11022e-16 0.438484 0.438484
411 205 0.438484 1.11022e-16 0.438484 0.438484
412 210 0.438484 1.11022e-16 0.438484 0.438484
413 208 0.438484 1.11022e-16 0.438484 0.438484
414 203 0.438484 1.11022e-16 0.438484 0.438484
415 209 0.438484 1.11022e-16 0.438484 0.438484
416 219 0.43846 0.000399336 0.431555 0.438484
417 202 0.438484 1.11022e-16 0.438484 0.438484
418 202 0.438484 1.11022e-16 0.438484 0.438484
419 216 0.438465 0.000302482 0.433254 0.438484
420 225 0.438484 1.11022e-16 0.438484 0.438484
421 220 0.438484 1.11022e-16 0.438484 0.438484
422 213 0.438484 1.11022e-16 0.438484 0.438484
423 213 0.438484 1.11022e-16 0.438484 0.438484
424 220 0.438432 0.000895331 0.42295 0.438484
425 207 0.438484 1.11022e-16 0.438484 0.438484
426 212 0.438484 1.11022e-16 0.438484 0.438484
427 208 0.438484 1.11022e-16 0.438484 0.438484
428 214 0.438484 1.11022e-16 0.438484 0.438484
429 204 0.438484 1.11022e-16 0.438484 0.438484
430 219 0.438484 1.11022e-16 0.438484 0.438484
431 207 0.438478 8.75521e-05 0.436965 0.438484
432 223 0.438484 1.11022e-16 0.438484 0.438484
433 204 0.438484 1.11022e-16 0.438484 0.438484
434 193 0.438484 1.11022e-16 0.438484 0.438484
435 204 0.438484 1.11022e-16 0.438484 0.438484
436 208 0.438484 1.11022e-16 0.438484 0.438484
437 200 0.438469 0.000254013 0.434077 0.438484
438 221 0.438484 1.11022e-16 0.438484 0.438484
439 219 0.438484 1.11022e-16 0.438484 0.438484
440 212 0.438484 1.11022e-16 0.438484 0.438484
441 199 0.438484 1.11022e-16 0.438484 0.438484
442 188 0.438484 1.11022e-16 0.438484 0.438484
443 221 0.438484 1.11022e-16 0.438484 0.438484
444 222 0.438484 1.11022e-16 0.438484 0.438484
445 203 0.438484 1.11022e-16 0.438484 0.438484
446 215 0.438484 1.11022e-16 0.438484 0.438484
447 216 0.438484 1.11022e-16 0.438484 0.438484
448 215 0.438484 1.11022e-16 0.438484 0.438484
449 211 0.438484 1.11022e-16 0.438484 0.438484
450 208 0.438484 1.11022e-16 0.438484 0.438484
451 210 0.438484 1.11022e-16 0.438484 0.438484
452 207 0.438484 1.11022e-16 0.438484 0.438484
453 204 0.438484 1.11022e-16 0.438484 0.438484
454 220 0.438484 1.11022e-16 0.438484 0.438484
455 207 0.438484 1.11022e-16 0.438484 0.438484
456 209 0.438484 1.11022e-16 0.438484 0.438484
457 212 0.438484 1.11022e-16 0.438484 0.438484
458 220 0.438484 1.11022e-16 0.438484 0.438484
459 207 0.438484 1.11022e-16 0.438484 0.438484
460 205 0.438484 1.11022e-16 0.438484 0.438484
461 218 0.438484 1.11022e-16 0.438484 0.438484
462 208 0.438484 1.11022e-16 0.438484 0.438484
463 215 0.438484 1.11022e-16 0.438484 0.438484
464 219 0.438484 1.11022e-16 0.438484 0.438484
465 219 0.438484 1.11022e-16 0.438484 0.438484
466 204 0.438484 1.11022e-16 0.438484 0.438484
467 212 0.438484 1.11022e-16 0.438484 0.438484
468 192 0.438484 1.11022e-16 0.438484 0.438484
469 205 0.438484 1.11022e-16 0.438484 0.438484
470 203 0.438484 1.11022e-16 0.438484 0.438484
471 215 0.438484 1.11022e-16 0.438484 0.438484
472 224 0.438484 1.11022e-16 0.438484 0.438484
473 208 0.438457 0.000460666 0.430491 0.438484
474 211 0.438484 1.11022e-16 0.438484 0.438484
475 210 0.438484 1.11022e-16 0.438484 0.438484
476 196 0.438484 1.11022e-16 0.438484 0.438484
477 212 0.438484 1.11022e-16 0.438484 0.438484
478 224 0.438484 1.11022e-16 0.438484 0.438484
479 217 0.438484 1.11022e-16 0.438484 0.438484
480 208 0.438484 1.11022e-16 0.438484 0.438484
481 205 0.438484 1.11022e-16 0.438484 0.438484
482 213 0.438484 1.11022e-16 0.438484 0.438484
483 204 0.438484 1.11022e-16 0.438484 0.438484
484 192 0.438484 1.11022e-16 0.438484 0.438484
485 202 0.438484 1.11022e-16 0.438484 0.438484
486 204 0.438484 1.11022e-16 0.438484 0.438484
487 205 0.438484 1.11022e-16 0.438484 0.438484
488 213 0.438484 1.11022e-16 0.438484 0.438484
489 221 0.438484 1.11022e-16 0.438484 0.438484
490 216 0.438481 3.98341e-05 0.437792 0.438484
491 222 0.438484 1.11022e-16 0.438484 0.438484
492 208 0.438462 0.000364386 0.432162 0.438484
493 187 0.438484 1.11022e-16 0.438484 0.438484
494 225 0.438484 1.11022e-16 0.438484 0.438484
495 208 0.438484 1.11022e-16 0.438484 0.438484
496 217 0.438484 1.11022e-16 0.438484 0.438484
497 213 0.438484 1.11022e-16 0.438484 0.438484
498 191 0.438484 1.11022e-16 0.438484 0.438484
499 218 0.438484 1.11022e-16 0.438484 0.438484
500 214 0.438484 1.11022e-16 0.438484 0.438484
In [43]:
def runGA(pop):
'''run GA with early stopping if not improving'''
hof = tools.HallOfFame(10)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", np.mean)
stats.register("std", np.std)
stats.register("min", np.min)
stats.register("max", np.max)
best = 0
pop = toolbox.clone(pop)
for i in xrange(40):
pop, log = algorithms.eaMuPlusLambda(pop, toolbox, mu=300, lambda_=300, cxpb=0.5, mutpb=0.2, ngen=25,
stats=stats, halloffame=hof, verbose=True)
newmax = log[-1]['max']
if best == newmax:
break
best = newmax
return pop
In [44]:
pop = runGA(initpop+randpop)
gen nevals avg std min max
0 3965 0.270707 0.140356 -0.113348 0.44694
1 204 0.378137 0.0601037 0.127002 0.44441
2 216 0.412261 0.020045 0.26819 0.44441
3 216 0.425565 0.0103653 0.390974 0.44441
4 224 0.432211 0.00647664 0.415433 0.44441
5 214 0.436541 0.0036709 0.42785 0.44441
6 205 0.44 0.00337743 0.435104 0.44694
7 202 0.442945 0.00239223 0.435104 0.44694
8 219 0.444452 0.00117933 0.435104 0.44694
9 232 0.445205 0.00105935 0.443207 0.44694
10 203 0.445986 0.00107466 0.44441 0.44694
11 209 0.44672 0.000621305 0.44441 0.447691
12 211 0.446966 0.000178694 0.445437 0.447691
13 230 0.447013 0.000221713 0.44694 0.447691
14 216 0.447115 0.000317336 0.44694 0.447691
15 218 0.447326 0.000375011 0.44694 0.447691
16 220 0.447598 0.000246709 0.44694 0.447691
17 203 0.447691 5.55112e-17 0.447691 0.447691
18 208 0.447691 5.55112e-17 0.447691 0.447691
19 211 0.447691 5.55112e-17 0.447691 0.447691
20 221 0.447691 5.55112e-17 0.447691 0.447691
21 208 0.447691 5.55112e-17 0.447691 0.447691
22 215 0.447691 5.55112e-17 0.447691 0.447691
23 204 0.447691 5.55112e-17 0.447691 0.447691
24 199 0.447691 5.55112e-17 0.447691 0.447691
25 210 0.447691 5.55112e-17 0.447691 0.447691
gen nevals avg std min max
0 0 0.447691 5.55112e-17 0.447691 0.447691
1 210 0.447691 5.55112e-17 0.447691 0.447691
2 208 0.447691 5.55112e-17 0.447691 0.447691
3 210 0.447691 5.55112e-17 0.447691 0.447691
4 207 0.447691 5.55112e-17 0.447691 0.447691
5 213 0.447691 5.55112e-17 0.447691 0.447691
6 212 0.447689 2.60439e-05 0.447239 0.447691
7 217 0.447691 5.55112e-17 0.447691 0.447691
8 214 0.447691 5.55112e-17 0.447691 0.447691
9 218 0.447691 5.55112e-17 0.447691 0.447691
10 229 0.447691 5.55112e-17 0.447691 0.447691
11 204 0.447691 5.55112e-17 0.447691 0.447691
12 194 0.447691 5.55112e-17 0.447691 0.447691
13 206 0.447691 5.55112e-17 0.447691 0.447691
14 208 0.447691 5.55112e-17 0.447691 0.447691
15 216 0.44754 0.00234921 0.40719 0.447691
16 208 0.447492 0.00344098 0.387992 0.447691
17 203 0.447691 5.55112e-17 0.447691 0.447691
18 198 0.447691 5.55112e-17 0.447691 0.447691
19 209 0.447691 5.55112e-17 0.447691 0.447691
20 217 0.447665 0.000450192 0.43988 0.447691
21 204 0.447691 5.55112e-17 0.447691 0.447691
22 213 0.447691 5.55112e-17 0.447691 0.447691
23 206 0.447446 0.00422419 0.374403 0.447691
24 217 0.447691 5.55112e-17 0.447691 0.447691
25 214 0.447691 5.55112e-17 0.447691 0.447691
In [109]:
[x.fitness for x in pop]
Out[109]:
[deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,)),
deap.creator.FitnessMax((0.4384835577313811,))]
In [ ]:
In [130]:
import deepdiff
import pprint
pp = pprint.PrettyPrinter()
In [125]:
best = data.sort_values(by='Rtop',ascending=False).to_dict('records')[0]
bestrop = best['Rtop']
for p in notparams:
del best[p]
best = deap.creator.Individual(cleanparams(best))
In [132]:
cleanparams(pop[0])
Out[132]:
{'balanced': 1,
'base_lr_exp': -1.875,
'conv1_func': 'ReLU',
'conv1_init': 'uniform',
'conv1_norm': 'none',
'conv1_size': 3,
'conv1_stride': 1,
'conv1_width': 32,
'conv2_func': 'ReLU',
'conv2_init': 'xavier',
'conv2_norm': 'none',
'conv2_size': 3,
'conv2_stride': 1,
'conv2_width': 64,
'conv3_func': 'ReLU',
'conv3_init': 'xavier',
'conv3_norm': 'none',
'conv3_size': 3,
'conv3_stride': 1,
'conv3_width': 128,
'conv4_func': 'ReLU',
'conv4_init': 'xavier',
'conv4_norm': 'none',
'conv4_size': 3,
'conv4_stride': 1,
'conv4_width': 0,
'conv5_func': 'ReLU',
'conv5_init': 'xavier',
'conv5_norm': 'none',
'conv5_size': 3,
'conv5_stride': 1,
'conv5_width': 0,
'fc_affinity_func': 'ReLU',
'fc_affinity_func2': 'ReLU',
'fc_affinity_hidden': 0,
'fc_affinity_hidden2': 0,
'fc_affinity_init': 'xavier',
'fc_pose_func': 'ReLU',
'fc_pose_func2': 'ReLU',
'fc_pose_hidden': 0,
'fc_pose_hidden2': 0,
'fc_pose_init': 'xavier',
'jitter': 0.0,
'loss_delta': 4.0,
'loss_gap': 0.0,
'loss_penalty': 3.75,
'loss_pseudohuber': 1,
'momentum': 0.9,
'pool1_size': 0,
'pool1_type': 'MAX',
'pool2_size': 0,
'pool2_type': 'MAX',
'pool3_size': 8,
'pool3_type': 'AVE',
'pool4_size': 0,
'pool4_type': 'MAX',
'pool5_size': 8,
'pool5_type': 'MAX',
'ranklossmult': 0.0,
'ranklossneg': 0,
'resolution': 0.5,
'solver': 'SGD',
'split': 2,
'stratify_affinity': 0,
'stratify_affinity_step': 1,
'stratify_receptor': 1,
'weight_decay_exp': -7.5}
In [133]:
for p in pop[:10]:
pp.pprint(deepdiff.DeepDiff(best,cleanparams(p),verbose_level=0))
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0},
"root['weight_decay_exp']": {'new_value': -7.5,
'old_value': -3.0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_delta']": {'new_value': 3.0,
'old_value': 4.0},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 2, 'old_value': 0},
"root['weight_decay_exp']": {'new_value': -7.5,
'old_value': -3.0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0},
"root['weight_decay_exp']": {'new_value': -7.5,
'old_value': -3.0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 4, 'old_value': 0},
"root['weight_decay_exp']": {'new_value': -7.5,
'old_value': -3.0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_delta']": {'new_value': 3.0,
'old_value': 4.0},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0},
"root['weight_decay_exp']": {'new_value': -7.5,
'old_value': -3.0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['loss_delta']": {'new_value': 5.0,
'old_value': 4.0},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0}}}
{'values_changed': {"root['base_lr_exp']": {'new_value': -1.875,
'old_value': -2.0},
"root['conv1_init']": {'new_value': 'uniform',
'old_value': 'xavier'},
"root['conv2_width']": {'new_value': 512,
'old_value': 64},
"root['loss_delta']": {'new_value': 3.0,
'old_value': 4.0},
"root['loss_penalty']": {'new_value': 3.75,
'old_value': 0.0},
"root['pool5_size']": {'new_value': 8, 'old_value': 0}}}
In [57]:
[h.fitness for h in hof2]
Out[57]:
[deap.creator.FitnessMax((0.3880651730511736,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,)),
deap.creator.FitnessMax((0.3870333565446721,))]
In [134]:
rf.feature_importances_
Out[134]:
array([2.20780973e-04, 1.70722657e-01, 2.64407473e-05, 6.43765203e-04,
8.88708501e-03, 1.09740684e-05, 2.70650510e-05, 2.30343619e-06,
8.33993099e-06, 6.64863603e-04, 2.32089817e-06, 9.94115329e-05,
1.01259295e-05, 8.00239187e-04, 3.03788056e-05, 1.55247262e-04,
5.06185707e-04, 9.95754784e-04, 1.67438542e-03, 5.05102927e-03,
6.46959434e-06, 7.09764468e-04, 1.06361381e-05, 9.39010978e-06,
1.31304257e-05, 4.98109030e-06, 4.40222499e-06, 1.21523220e-05,
1.08683546e-05, 1.62358341e-05, 7.83145027e-06, 7.31458827e-04,
1.74404034e-05, 8.01202168e-04, 1.39043024e-03, 1.04404653e-02,
5.59743971e-04, 5.22371112e-03, 1.86453305e-05, 9.44093591e-04,
1.20216253e-04, 4.07793808e-05, 1.32525683e-04, 1.08343239e-05,
6.86509586e-06, 3.18454238e-04, 7.63510006e-06, 8.27222651e-05,
2.85092866e-06, 9.63261195e-04, 1.35404908e-04, 2.04892818e-04,
3.05024350e-04, 1.07332440e-03, 3.09806795e-03, 3.33169414e-03,
2.05853420e-04, 2.47214265e-07, 3.43451783e-06, 2.63649586e-04,
0.00000000e+00, 1.77801145e-05, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 3.81223391e-07, 0.00000000e+00, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00, 9.29825402e-05, 1.65118719e-03,
9.65979537e-05, 0.00000000e+00, 7.19184236e-04, 1.21012589e-06,
9.60251588e-05, 1.01078914e-04, 0.00000000e+00, 2.05218087e-05,
0.00000000e+00, 2.82525074e-07, 1.53079781e-06, 6.09808894e-05,
1.74779942e-06, 1.06689076e-01, 4.91696484e-08, 1.15112260e-04,
6.17192837e-05, 0.00000000e+00, 0.00000000e+00, 3.51808814e-06,
1.52463511e-04, 0.00000000e+00, 5.53405715e-03, 2.18552728e-04,
5.76745538e-07, 5.01857589e-06, 7.48492670e-04, 1.07112889e-05,
3.45111135e-06, 8.74479416e-08, 3.92001727e-06, 8.09653476e-07,
0.00000000e+00, 2.87553267e-04, 1.54131408e-04, 6.97596280e-06,
2.21297869e-03, 2.09410367e-05, 2.59391807e-05, 2.20690884e-03,
8.50675403e-04, 1.09026328e-03, 1.05300858e-02, 1.61243312e-03,
8.55096675e-04, 4.37436072e-04, 3.95923349e-02, 3.17385784e-01,
1.03121500e-03, 8.46063166e-04, 3.71680669e-02, 1.55128117e-02,
6.40901048e-03, 5.21928659e-03, 1.29276739e-02, 1.63844513e-02,
7.25445646e-04, 0.00000000e+00, 3.62548511e-06, 3.70791784e-03,
3.65984378e-06, 2.36985846e-04, 3.64525630e-02, 5.95075203e-04,
1.40904064e-04, 4.91389465e-03, 5.76465922e-03, 1.43911095e-02,
3.43830543e-04, 6.38971950e-04, 3.42553827e-04, 1.20781435e-01])
In [60]:
importances[indices]
Out[60]:
array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])
In [82]:
importances = rf.feature_importances_
std = np.std([tree.feature_importances_ for tree in rf.estimators_],
axis=0)
indices = np.argsort(importances)[-8:]
# Print the feature ranking
print("Feature ranking:")
num = Xv.shape[1]
#for f in range(num):
# print("%s\t (%f)" % (dictvec.feature_names_[indices[f]], importances[indices[f]]))
# Plot the feature importances of the forest
plt.figure(figsize=(5.5,6))
plt.xlabel("Random Forest Feature Importances",fontsize=16)
plt.barh(range(len(indices)), importances[indices],
color="crimson", xerr=std[indices], align="center")
plt.yticks(range(len(indices)), np.array(dictvec.feature_names_)[indices])
#plt.ylim(-.5,30.5)
#plt.tick_params(labelsize=14)
plt.tick_params(axis='y',labelsize=22)
plt.savefig('improt.pdf',bbox_inches='tight')
Feature ranking:
In [ ]:
In [61]:
seen = set(map(frozendict.frozendict,initpop))
In [62]:
for p in pop:
print frozendict.frozendict(p) in seen, p.fitness
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
True (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
False (0.435140260538471,)
In [63]:
p.fitness
Out[63]:
deap.creator.FitnessMax((0.435140260538471,))
In [64]:
initpop[0].fitness
Out[64]:
deap.creator.FitnessMax(())
In [65]:
frozendict.frozendict(initpop[0])
Out[65]:
<frozendict {'conv4_norm': 'none', 'loss_pseudohuber': 1, 'jitter': 0.0, 'conv2_width': 64, 'conv5_norm': 'none', 'conv3_size': 3, 'balanced': 1, 'pool5_type': 'MAX', 'stratify_affinity_step': 1, 'conv5_width': 0, 'conv2_size': 3, 'loss_gap': 0.0, 'conv2_stride': 1, 'conv3_init': 'xavier', 'fc_affinity_func': 'ReLU', 'momentum': 0.9, 'conv3_width': 128, 'fc_affinity_init': 'xavier', 'conv5_size': 3, 'loss_delta': 4.0, 'conv5_func': 'ReLU', 'pool5_size': 0, 'solver': 'SGD', 'conv5_stride': 1, 'fc_pose_hidden2': 0, 'conv4_size': 3, 'stratify_affinity': 0, 'fc_affinity_hidden': 0, 'conv4_init': 'xavier', 'ranklossneg': 0, 'pool4_size': 0, 'conv1_stride': 1, 'split': 0, 'conv4_func': 'ReLU', 'pool4_type': 'MAX', 'conv1_norm': 'none', 'pool2_type': 'MAX', 'loss_penalty': 0.0, 'fc_pose_init': 'xavier', 'conv1_size': 3, 'conv3_norm': 'none', 'fc_pose_hidden': 0, 'conv4_width': 0, 'conv1_width': 32, 'conv2_init': 'xavier', 'pool1_type': 'MAX', 'pool2_size': 2, 'conv4_stride': 1, 'pool1_size': 2, 'pool3_size': 2, 'weight_decay_exp': -3.0, 'conv1_func': 'ReLU', 'conv2_norm': 'none', 'ranklossmult': 0.0, 'conv3_func': 'ReLU', 'pool3_type': 'MAX', 'conv5_init': 'xavier', 'fc_pose_func2': 'ReLU', 'fc_affinity_func2': 'ReLU', 'conv3_stride': 1, 'fc_affinity_hidden2': 0, 'base_lr_exp': -2.0, 'stratify_receptor': 1, 'conv1_init': 'xavier', 'fc_pose_func': 'ReLU', 'conv2_func': 'ReLU', 'resolution': 0.5}>
In [66]:
len(filter(lambda p: frozendict.frozendict(p) not in seen, pop))
Out[66]:
282
In [67]:
p.items()
Out[67]:
[('conv4_norm', 'none'),
('loss_pseudohuber', 1),
('jitter', 0.0),
('conv2_width', 64),
('pool4_size', 0),
('conv1_stride', 1),
('conv5_norm', 'none'),
('conv3_size', 3),
('pool4_type', 'MAX'),
('balanced', 1),
('ranklossneg', 0),
('stratify_affinity_step', 1),
('base_lr_exp', -2.0),
('conv1_norm', 'none'),
('conv5_width', 0),
('conv2_size', 3),
('conv3_stride', 1),
('pool2_type', 'AVE'),
('loss_penalty', 0.0),
('fc_pose_init', 'xavier'),
('conv1_size', 3),
('conv3_norm', 'none'),
('fc_pose_hidden', 0),
('loss_gap', 0.0),
('split', 2),
('conv2_init', 'xavier'),
('pool1_type', 'MAX'),
('conv4_width', 0),
('pool2_size', 0),
('conv2_stride', 1),
('conv3_init', 'xavier'),
('fc_affinity_func', 'ReLU'),
('momentum', 0.9),
('conv3_width', 128),
('fc_affinity_init', 'xavier'),
('conv5_size', 3),
('conv4_stride', 3),
('pool1_size', 0),
('loss_delta', 4.0),
('pool3_size', 8),
('weight_decay_exp', -3.0),
('conv1_func', 'ReLU'),
('conv2_norm', 'none'),
('ranklossmult', 0.0),
('conv5_func', 'ReLU'),
('conv3_func', 'ReLU'),
('pool3_type', 'MAX'),
('pool5_size', 0),
('conv5_init', 'xavier'),
('fc_pose_func2', 'ReLU'),
('fc_affinity_func2', 'ReLU'),
('conv4_func', 'ReLU'),
('fc_affinity_hidden2', 0),
('conv5_stride', 1),
('resolution', 0.5),
('solver', 'SGD'),
('fc_pose_hidden2', 0),
('conv1_width', 32),
('stratify_receptor', 1),
('conv4_size', 3),
('stratify_affinity', 0),
('conv1_init', 'xavier'),
('fc_pose_func', 'ReLU'),
('pool5_type', 'MAX'),
('conv2_func', 'ReLU'),
('fc_affinity_hidden', 0),
('conv4_init', 'xavier')]
In [68]:
initpop[0].fitness
Out[68]:
deap.creator.FitnessMax(())
In [69]:
evals = pool.map(toolbox.evaluate, initpop)
In [70]:
top = sorted([l[0] for l in evals],reverse=True)[0]
In [71]:
top
Out[71]:
0.435140260538471
In [72]:
Xv.shape
Out[72]:
(3565, 144)
In [73]:
dictvec.feature_names_
Out[73]:
['balanced',
'base_lr_exp',
'conv1_func=ELU',
'conv1_func=ReLU',
'conv1_func=Sigmoid',
'conv1_func=TanH',
'conv1_func=leaky',
'conv1_init=gaussian',
'conv1_init=msra',
'conv1_init=positive_unitball',
'conv1_init=radial',
'conv1_init=radial.5',
'conv1_init=uniform',
'conv1_init=xavier',
'conv1_norm=BatchNorm',
'conv1_norm=LRN',
'conv1_norm=none',
'conv1_size',
'conv1_stride',
'conv1_width',
'conv2_func=ELU',
'conv2_func=ReLU',
'conv2_func=Sigmoid',
'conv2_func=TanH',
'conv2_func=leaky',
'conv2_init=gaussian',
'conv2_init=msra',
'conv2_init=positive_unitball',
'conv2_init=radial',
'conv2_init=radial.5',
'conv2_init=uniform',
'conv2_init=xavier',
'conv2_norm=BatchNorm',
'conv2_norm=LRN',
'conv2_norm=none',
'conv2_size',
'conv2_stride',
'conv2_width',
'conv3_func=ELU',
'conv3_func=ReLU',
'conv3_func=Sigmoid',
'conv3_func=TanH',
'conv3_func=leaky',
'conv3_init=gaussian',
'conv3_init=msra',
'conv3_init=positive_unitball',
'conv3_init=radial',
'conv3_init=radial.5',
'conv3_init=uniform',
'conv3_init=xavier',
'conv3_norm=BatchNorm',
'conv3_norm=LRN',
'conv3_norm=none',
'conv3_size',
'conv3_stride',
'conv3_width',
'conv4_func=ELU',
'conv4_func=ReLU',
'conv4_func=Sigmoid',
'conv4_func=TanH',
'conv4_func=leaky',
'conv4_init=gaussian',
'conv4_init=msra',
'conv4_init=positive_unitball',
'conv4_init=radial',
'conv4_init=xavier',
'conv4_norm=BatchNorm',
'conv4_norm=LRN',
'conv4_norm=none',
'conv4_size',
'conv4_stride',
'conv4_width',
'conv5_func=ReLU',
'conv5_func=Sigmoid',
'conv5_func=TanH',
'conv5_func=leaky',
'conv5_init=gaussian',
'conv5_init=msra',
'conv5_init=radial.5',
'conv5_init=xavier',
'conv5_norm=BatchNorm',
'conv5_norm=LRN',
'conv5_norm=none',
'conv5_size',
'conv5_stride',
'conv5_width',
'fc_affinity_func2=ELU',
'fc_affinity_func2=ReLU',
'fc_affinity_func2=TanH',
'fc_affinity_func2=leaky',
'fc_affinity_func=ELU',
'fc_affinity_func=ReLU',
'fc_affinity_func=TanH',
'fc_affinity_func=leaky',
'fc_affinity_hidden',
'fc_affinity_hidden2',
'fc_affinity_init=gaussian',
'fc_affinity_init=msra',
'fc_affinity_init=uniform',
'fc_affinity_init=xavier',
'fc_pose_func2=ELU',
'fc_pose_func2=ReLU',
'fc_pose_func2=Sigmoid',
'fc_pose_func2=TanH',
'fc_pose_func=ELU',
'fc_pose_func=ReLU',
'fc_pose_func=TanH',
'fc_pose_func=leaky',
'fc_pose_hidden',
'fc_pose_hidden2',
'fc_pose_init=msra',
'fc_pose_init=uniform',
'fc_pose_init=xavier',
'jitter',
'loss_delta',
'loss_gap',
'loss_penalty',
'loss_pseudohuber',
'momentum',
'pool1_size',
'pool1_type=AVE',
'pool1_type=MAX',
'pool2_size',
'pool2_type=AVE',
'pool2_type=MAX',
'pool3_size',
'pool3_type=AVE',
'pool3_type=MAX',
'pool4_size',
'pool4_type=AVE',
'pool4_type=MAX',
'pool5_size',
'pool5_type=AVE',
'pool5_type=MAX',
'ranklossmult',
'ranklossneg',
'resolution',
'solver=Adam',
'solver=SGD',
'split',
'stratify_affinity',
'stratify_affinity_step',
'stratify_receptor',
'weight_decay_exp']
In [74]:
dictvec.inverse_transform(dict(pop[0]))
TypeErrorTraceback (most recent call last)
<ipython-input-74-245fdb36bfa4> in <module>()
----> 1 dictvec.inverse_transform(dict(pop[0]))
/usr/local/lib/python2.7/dist-packages/sklearn/feature_extraction/dict_vectorizer.pyc in inverse_transform(self, X, dict_type)
254 """
255 # COO matrix is not subscriptable
--> 256 X = check_array(X, accept_sparse=['csr', 'csc'])
257 n_samples = X.shape[0]
258
/usr/local/lib/python2.7/dist-packages/sklearn/utils/validation.pyc in check_array(array, accept_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd, ensure_min_samples, ensure_min_features, warn_on_dtype, estimator)
446 # make sure we actually converted to numeric:
447 if dtype_numeric and array.dtype.kind == "O":
--> 448 array = array.astype(np.float64)
449 if not allow_nd and array.ndim >= 3:
450 raise ValueError("Found array with dim %d. %s expected <= 2."
TypeError: float() argument must be a string or a number
In [ ]:
pop[0]
In [ ]:
Content source: gnina/scripts
Similar notebooks: