In [97]:
import numpy as np
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import os
from datetime import datetime
import seaborn as sns
%matplotlib inline
from sklearn.base import BaseEstimator, TransformerMixin
class DataFrameSelector(BaseEstimator, TransformerMixin):
    def __init__(self, attribute_names):
        self.attribute_names = attribute_names
    def fit(self, X, y=None):
        return self
    def transform(self, X):
        return X[self.attribute_names].values
    
plt.rcParams['figure.figsize'] = (10,6.180)    #golden ratio

In [98]:
def read_data(name):
#     name="tr872"
    name_list = ["Step" , "Chain" , "Shake" , "Chi" , "Rama", "Excluded", "DSSP", "P_AP", "Water" ,"Burial", "Helix", "AMH_Go", "Frag_Mem", "Vec_FM", "Membrane", "SSB","VTotal"]


    location = f"/Users/weilu/Research/server_backup/sep_2018/02_week/{name}/"
    RMSD = pd.read_table(location+"rmsd-angstrom.xvg", names=["i", "RMSD"], sep="\s+")
    bias = pd.read_table(location+"bias.log", names=["i", "biasQ", "bias"], sep="\s+").drop("i", axis=1)
    awsem = pd.read_table(location+"awsem.log", names=name_list)
    rw = pd.read_table(location+"rwplusScore.txt", names=["i", "Rw"], sep="\s+").drop("i", axis=1)
    raw_data = pd.concat([RMSD, rw, bias, awsem], axis=1)
    return raw_data.assign(name=name).reset_index().rename(columns={"index":"folder"})

def choose_top(data,col="RMSD", n=5, ascending=True):
    return data.assign(chosen=pd.DataFrame.rank(data[col], ascending=ascending, method='dense')<=n)

In [123]:
folder_list = ["tr894", "tr882", "tr594", "tr898", "tr862", "tr877", "tr872", "tr885", "tr866", "tr868", "tr884", "tr895", "tr896", "tr870", "tr921", "tr922", "tr891", "tr948"]
# folder_list = [ "tr862", "tr877", "tr872", "tr885", "tr866", "tr868", "tr884", "tr895", "tr896", "tr870", "tr921", "tr922", "tr891", "tr948"]
# folder_list = ["tr862", "tr872", "tr885", "tr866", "tr868" , "tr895", "tr896", "tr870", "tr921", "tr891", "tr948"]
# "tr877","tr884", "tr922"
# "tr869"
data_list = []
for name in folder_list:
    tmp = read_data(name)
    data_list.append(tmp)
raw_data_all = pd.concat(data_list)
n = 20
raw_data_all = raw_data_all.reset_index(drop=True).groupby("name").apply(choose_top, n=n).reset_index(drop=True)

In [124]:
# train_name_list = ["tr872", "tr885", "tr948"]
# train_name_list = ["tr862", "tr872", "tr885", "tr866", "tr868" , "tr895", "tr896", "tr870", "tr921", "tr891", "tr948"]
train_name_list = ["tr866"]
# train_name_list = ["tr948"]
raw_data = raw_data_all.reset_index(drop=True).query(f'name in {train_name_list}')

In [132]:
len(folder_list)


Out[132]:
18

In [122]:
raw_data_all.query("AMH_Go != '0.000000' and AMH_Go != 0.0")


Out[122]:
folder i RMSD Rw biasQ bias Step Chain Shake Chi ... Burial Helix AMH_Go Frag_Mem Vec_FM Membrane SSB VTotal name chosen
16625 5102 65200.0 12.8680 -15242.476341 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
16808 5285 83500.0 11.8102 -16282.053802 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
16815 5292 84200.0 11.8117 -16073.869550 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
16890 5367 91700.0 11.6338 -16303.595507 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
16916 5393 94300.0 12.1357 -15768.170100 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
16936 5413 96300.0 12.0980 -15888.030090 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
16958 5435 98500.0 11.8734 -16357.034878 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17021 5498 104800.0 11.8503 -16328.889216 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17026 5503 105300.0 12.0528 -16215.346854 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17028 5505 105500.0 12.2931 -16058.282334 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17029 5506 105600.0 12.0870 -15766.890195 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17083 5560 111000.0 12.1826 -16051.862582 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17094 5571 112100.0 12.1521 -16012.043705 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17098 5575 112500.0 11.9693 -15785.368128 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17114 5591 114100.0 11.7680 -16187.036882 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17147 5624 117400.0 12.3725 -16090.826119 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17176 5653 120300.0 11.9791 -16251.003015 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17186 5663 121300.0 12.1100 -15840.572351 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17194 5671 122100.0 11.9792 -15694.224056 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17215 5692 124200.0 12.4058 -15978.788006 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17265 5742 129200.0 11.9123 -16149.437602 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17289 5766 131600.0 11.9091 -16073.630028 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17291 5768 131800.0 11.8340 -15914.534513 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17324 5801 135100.0 11.7515 -15814.298306 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17356 5833 138300.0 11.8239 -15920.355190 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17358 5835 138500.0 11.7193 -15924.405236 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17377 5854 140400.0 11.8329 -16062.984968 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17382 5859 140900.0 11.7462 -15861.989255 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17396 5873 142300.0 11.9641 -15821.703750 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17406 5883 143300.0 11.8414 -16044.379855 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17407 5884 143400.0 11.9347 -16030.918428 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17421 5898 144800.0 11.8655 -15873.771364 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17427 5904 145400.0 11.9891 -15868.620287 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17463 5940 600.0 12.2862 -16467.035752 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17464 5941 700.0 11.9575 -16562.128473 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17510 5987 5300.0 11.9177 -15966.785638 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17529 6006 7200.0 11.5769 -16229.908653 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17531 6008 7400.0 11.7393 -16134.613055 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17540 6017 8300.0 11.5674 -15979.106604 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17542 6019 8500.0 11.6831 -16178.549226 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17572 6049 11500.0 11.6786 -15908.243298 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17606 6083 14900.0 11.7530 -15840.707092 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17607 6084 15000.0 11.5533 -15745.591816 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17617 6094 16000.0 11.7703 -15443.490305 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17625 6102 16800.0 11.6768 -16264.621514 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17630 6107 17300.0 11.5524 -15808.176460 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17644 6121 18700.0 11.5276 -16193.851978 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17665 6142 20800.0 11.2334 -15929.855341 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17666 6143 20900.0 11.1371 -15555.023312 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17669 6146 21200.0 11.2703 -15627.059542 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17684 6161 22700.0 11.8033 -14901.856966 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17688 6165 23100.0 11.6650 -15638.309668 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17693 6170 23600.0 12.6146 -15198.903783 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17720 6197 26300.0 12.0535 -15373.267140 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17827 6304 37000.0 13.0106 -15056.567087 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
17862 6339 40500.0 12.3335 -15476.019493 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False
18910 7387 145300.0 11.3064 -16314.464107 0.480298 54.018002 Step Chain Shake Chi ... Burial Helix AMH-Go Frag_Mem Vec_FM Membrane SSB VTotal tr869 False

57 rows × 25 columns


In [113]:
raw_data_all.query("VTotal")


Out[113]:
folder i RMSD Rw biasQ bias Step Chain Shake Chi ... Burial Helix AMH_Go Frag_Mem Vec_FM Membrane SSB VTotal name chosen
4711 703 20200.0 5.02009 -13885.902573 0.740600 13.457624 0 0 0 0 ... 0 0 0 0 0 0 0 0 tr862 False
32151 728 22700.0 2.55317 -12089.853954 0.550322 40.442041 0 0 0 0 ... 0 0 0 0 0 0 0 0 tr882 False
44650 201 20100.0 6.49244 -6037.842326 0.387617 75.002595 0 0 0 0 ... 0 0 0 0 0 0 0 0 tr894 False

3 rows × 25 columns


In [101]:
raw_data.head()


Out[101]:
folder i RMSD Rw biasQ bias Step Chain Shake Chi ... Burial Helix AMH_Go Frag_Mem Vec_FM Membrane SSB VTotal name chosen
6513 0 0.0 3.23637 -18113.750445 0.611015 30.261837 0 10.7488 0 1.21955 ... -100.023 -5.7832 0 -722.417 0 0 0 -1382.94 tr866 False
6514 1 100.0 3.17862 -16908.884540 0.596365 32.584284 0 9.32173 0 0.792174 ... -100.846 -4.64428 0 -725.298 0 0 0 -1386.37 tr866 False
6515 2 200.0 3.28087 -17018.179532 0.578606 35.514664 0 11.2294 0 1.10207 ... -100.858 -4.55144 0 -719.994 0 0 0 -1374.77 tr866 False
6516 3 300.0 3.33421 -16951.810512 0.590004 33.619339 0 11.1201 0 1.17873 ... -99.9775 -4.11031 0 -725.642 0 0 0 -1388.17 tr866 False
6517 4 400.0 3.30271 -17276.455098 0.578927 35.460509 0 11.6704 0 0.938973 ... -99.248 -2.87373 0 -722.911 0 0 0 -1370.67 tr866 False

5 rows × 25 columns


In [102]:
raw_data.shape


Out[102]:
(2505, 25)

In [103]:
raw_data_all.columns


Out[103]:
Index(['folder', 'i', 'RMSD', 'Rw', 'biasQ', 'bias', 'Step', 'Chain', 'Shake',
       'Chi', 'Rama', 'Excluded', 'DSSP', 'P_AP', 'Water', 'Burial', 'Helix',
       'AMH_Go', 'Frag_Mem', 'Vec_FM', 'Membrane', 'SSB', 'VTotal', 'name',
       'chosen'],
      dtype='object')

In [125]:
raw_data.plot.scatter("bias", "RMSD")


Out[125]:
<matplotlib.axes._subplots.AxesSubplot at 0x1a32a8ccc0>

In [126]:
raw_data.plot.scatter("VTotal", "RMSD")


Out[126]:
<matplotlib.axes._subplots.AxesSubplot at 0x1a1ad2f4a8>

In [20]:
raw_data.plot.scatter("Rw", "RMSD")


Out[20]:
<matplotlib.axes._subplots.AxesSubplot at 0x1a1a81b7f0>

In [107]:
import seaborn as sns
# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=raw_data_all.reset_index(), hue='name', height=8, aspect=1.63)
fg.map(plt.scatter, 'index', 'RMSD').add_legend(fontsize=20)


Out[107]:
<seaborn.axisgrid.FacetGrid at 0x1a1be3a668>

In [23]:
import seaborn as sns
# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=raw_data_all.reset_index(), hue='chosen', size=8, aspect=1.63)
fg.map(plt.scatter, 'index', 'RMSD').add_legend()


/Users/weilu/anaconda3/lib/python3.6/site-packages/seaborn/axisgrid.py:230: UserWarning: The `size` paramter has been renamed to `height`; please update your code.
  warnings.warn(msg, UserWarning)
Out[23]:
<seaborn.axisgrid.FacetGrid at 0x1a1a840b70>

In [136]:
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import FeatureUnion
from sklearn.preprocessing import PolynomialFeatures
from sklearn.metrics import confusion_matrix
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
def my_transform(data, label, degree, FEATURES):

    # LABEL = "Qw"
    LABEL = label
    PolynomialDegree = degree

    num_attribs = FEATURES
    cat_attribs = [LABEL]
    num_pipeline = Pipeline([
            ('selector', DataFrameSelector(num_attribs)),
            ('std_scaler', StandardScaler()),
            ('poly', PolynomialFeatures(degree=PolynomialDegree, include_bias=False))
        ])
    cat_pipeline = Pipeline([
            ('selector', DataFrameSelector(cat_attribs))
        ])

    full_pipeline = FeatureUnion(transformer_list=[
            ("num_pipeline", num_pipeline),
            ("cat_pipeline", cat_pipeline),
        ])
    return full_pipeline.fit_transform(data)


# FEATURES = ["eigenvalues", "entropy", "pca"]
# FEATURES = ["eigenvalues", "entropy", "diffRMSD"]
# FEATURES = ["eigenvalues", "entropy"]
FEATURES = ["biasQ",
    'Rw',
     'VTotal',
#     'RMSD', # test
#      'Burial',
#      'Water',
#      'Rama',
#      'DSSP',
#      'P_AP',
#      'Helix',
#      'Frag_Mem'
               ]
# FEATURES = ["eigenvalues"]
# LABEL = "diffRMSD"
# LABEL = "RMSD"
LABEL = "chosen"
DEGREE = 1

def pred_from_raw(a):
    data = my_transform(a, label=LABEL, degree=DEGREE, FEATURES=FEATURES)
    test_y = data[:,-1]
    test_set = data[:,:-1]
    prob= clf.predict_proba(test_set)[:,1]
    return a.assign(prob=prob)

# data = my_transform(raw_data, label=LABEL, degree=DEGREE, FEATURES=FEATURES)
data = raw_data.groupby('name').apply(my_transform, label=LABEL, degree=DEGREE, FEATURES=FEATURES)[0]
train_y = data[:,-1]
train_set = data[:,:-1]
from sklearn import svm
p = 0.5
# clf = svm.SVC(probability=True)
clf = LogisticRegression(random_state=1142)
clf.fit(train_set, train_y)
y_pred_svm = clf.predict(train_set)


raw_data_all = raw_data_all.reset_index(drop=True).groupby("name").apply(pred_from_raw).reset_index(drop=True)


picked_n = 1
best = raw_data_all.groupby("name").apply(choose_top, col="RMSD"
                                            , n=1, ascending=True).reset_index(drop=True).query("chosen==True")
picked = raw_data_all.groupby("name").apply(choose_top, col="prob"
                                            , n=picked_n, ascending=False).reset_index(drop=True).query("chosen==True")
init = raw_data_all.query("i == 0.0")
# all_results = pd.concat([best.assign(result='best'), 
#                          picked.assign(result='picked'), init.assign(result='init')])
all_results = pd.concat([best.assign(result='best'), 
                         picked.assign(result='picked')])
# picked.to_csv("/Users/weilu/Desktop/picked.csv")

# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=all_results.reset_index(), hue='result', height=8, aspect=1.63)
fg.map(plt.scatter, 'name', 'RMSD').add_legend(fontsize=20)
fg.set(ylim=(0, 15))


Out[136]:
<seaborn.axisgrid.FacetGrid at 0x105d72b00>

In [128]:


In [129]:


In [130]:


In [88]:
picked.to_csv("/Users/weilu/Desktop/picked.csv")

In [131]:
# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=all_results.reset_index(), hue='result', height=8, aspect=1.63)
fg.map(plt.scatter, 'name', 'RMSD').add_legend(fontsize=20)


Out[131]:
<seaborn.axisgrid.FacetGrid at 0x1a32a8c828>

In [73]:
# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=all_results.reset_index(), hue='result', height=8, aspect=1.63)
fg.map(plt.scatter, 'name', 'RMSD').add_legend(fontsize=20)


Out[73]:
<seaborn.axisgrid.FacetGrid at 0x1a1d098a58>

In [36]:
import seaborn as sns
# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=all_results.reset_index(), hue='result', height=8, aspect=1.63)
fg.map(plt.scatter, 'name', 'RMSD').add_legend(fontsize=20)


Out[36]:
<seaborn.axisgrid.FacetGrid at 0x1a239456a0>

In [27]:
import seaborn as sns
# sns.set(rc={'figure.figsize':(20,30)})
# plt.figure(figsize=(15,8))
fg = sns.FacetGrid(data=picked.reset_index(), hue='name', height=8, aspect=1.63)
fg.map(plt.scatter, 'index', 'RMSD').add_legend(fontsize=20)


Out[27]:
<seaborn.axisgrid.FacetGrid at 0x1a25352860>

In [44]:
top_n = 20
pred_list = []
for a, raw in raw_data_all.reset_index(drop=True).groupby("name"):
    print(a)
    data = my_transform(raw, label=LABEL, degree=DEGREE, FEATURES=FEATURES)
    test_y = data[:,-1]
    test_set = data[:,:-1]
#     print("svm mean square error:", np.sum((test_y-y_pred_svm)**2)/len(test_y))
    
    prob= clf.predict_proba(test_set)[:,1]
    raw["prob"] = prob
    position_of_top_n = prob.argsort()[-top_n:][::-1]
    threshold = prob[position_of_top_n][-1]
    predict_y = np.zeros(len(test_y),)
    predict_y[position_of_top_n] = 1
#     predict_y = clf.predict(test_set)
    print(confusion_matrix(test_y, predict_y))


tr862
[[2466   19]
 [  19    1]]
tr866
[[2466   19]
 [  19    1]]
tr868
[[2465   20]
 [  20    0]]
tr870
[[2461   20]
 [  24    0]]
tr872
[[4469   20]
 [  20    0]]
tr885
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
[[2465   20]
 [  20    0]]
tr891
[[2462   19]
 [  23    1]]
tr895
[[2465   20]
 [  20    0]]
tr896
[[3468   19]
 [  19    1]]
tr921
[[2465   20]
 [  20    0]]
tr948
[[2460   20]
 [  20    0]]
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()
/Users/weilu/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:11: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  # This is added back by InteractiveShellApp.init_path()

In [ ]:


In [43]:
plt.plot(y_pred_svm, train_y, ".")
print("mean square error:", np.sum((train_y-y_pred_svm)**2)/len(y_pred_svm))


mean square error: 0.00798403193613

In [245]:
data.shape


Out[245]:
(4509, 4)

In [246]:
train_y.shape


Out[246]:
(4509,)

In [247]:
y_pred_svm.shape


Out[247]:
(4509,)

In [248]:
y_pred_svm.shape


Out[248]:
(4509,)

In [249]:
plt.plot(y_pred_svm-train_y, ".")
print("mean square error:", np.sum((train_y-y_pred_svm)**2)/len(y_pred_svm))


mean square error: 1.2008104586

In [250]:
from sklearn.linear_model import LinearRegression
lin_reg = LinearRegression()


lin_reg.fit(train_set, train_y)
y_pred = lin_reg.predict(train_set)

In [251]:
plt.plot(y_pred, train_y, ".")
print("mean square error:", np.sum((train_y-y_pred)**2)/len(y_pred))


mean square error: 1.24828941534

In [252]:
plt.plot(y_pred-train_y, ".")
print("mean square error:", np.sum((train_y-y_pred)**2)/len(y_pred))


mean square error: 1.24828941534

In [255]:
for name, raw in raw_data_all.groupby("name"):
    print(name)
    data = my_transform(raw, label=LABEL, degree=DEGREE, FEATURES=FEATURES)
    test_y = data[:,-1]
    test_set = data[:,:-1]
    y_pred_svm = clf.predict(test_set)
#     print("svm mean square error:", np.sum((test_y-y_pred_svm)**2)/len(test_y))
    y_pred = lin_reg.predict(test_set)
    print("linear mean square error:", np.sum((test_y-y_pred)**2)/len(test_y))


tr862
linear mean square error: 1.49723410055
tr866
linear mean square error: 5.98538431515
tr868
linear mean square error: 9.02355704396
tr870
linear mean square error: 11.8755754536
tr872
linear mean square error: 1.24828941534
tr885
linear mean square error: 5.61811245228
tr891
linear mean square error: 9.92526421
tr895
linear mean square error: 3.23645241068
tr896
linear mean square error: 24.410818636
tr921
linear mean square error: 4.28738563158
tr948
linear mean square error: 2.49373343177

In [254]:
for name, raw in raw_data_all.groupby("name"):
    print(name)
    data = my_transform(raw, label=LABEL, degree=DEGREE, FEATURES=FEATURES)
    test_y = data[:,-1]
    test_set = data[:,:-1]
    y_pred_svm = clf.predict(test_set)
    print("svm mean square error:", np.sum((test_y-y_pred_svm)**2)/len(test_y))
    y_pred = lin_reg.predict(test_set)
    print("linear mean square error:", np.sum((test_y-y_pred)**2)/len(test_y))


tr862
svm mean square error: 1.72174477408
linear mean square error: 1.49723410055
tr866
svm mean square error: 6.20499917353
linear mean square error: 5.98538431515
tr868
svm mean square error: 8.5947418306
linear mean square error: 9.02355704396
tr870
svm mean square error: 12.8842316427
linear mean square error: 11.8755754536
tr872
svm mean square error: 1.2008104586
linear mean square error: 1.24828941534
tr885
svm mean square error: 5.62054361252
linear mean square error: 5.61811245228
tr891
svm mean square error: 9.49002608182
linear mean square error: 9.92526421
tr895
svm mean square error: 3.39272367709
linear mean square error: 3.23645241068
tr896
svm mean square error: 25.9000515954
linear mean square error: 24.410818636
tr921
svm mean square error: 4.88728819291
linear mean square error: 4.28738563158
tr948
svm mean square error: 2.86872645287
linear mean square error: 2.49373343177

In [215]:
for name, raw in raw_data_all.groupby("name"):
    print(name)
    data = my_transform(raw, label=LABEL, degree=1, FEATURES=FEATURES)
    test_y = data[:,-1]
    test_set = data[:,:-1]
    y_pred_svm = clf.predict(test_set)
    print("svm mean square error:", np.sum((test_y-y_pred_svm)**2)/len(test_y))
    y_pred = lin_reg.predict(test_set)
    print("linear mean square error:", np.sum((test_y-y_pred)**2)/len(test_y))


tr862
svm mean square error: 1.72174477408
linear mean square error: 1.49723410055
tr866
svm mean square error: 6.20499917353
linear mean square error: 5.98538431515
tr868
svm mean square error: 8.5947418306
linear mean square error: 9.02355704396
tr870
svm mean square error: 12.8842316427
linear mean square error: 11.8755754536
tr872
svm mean square error: 1.2008104586
linear mean square error: 1.24828941534
tr885
svm mean square error: 5.62054361252
linear mean square error: 5.61811245228
tr891
svm mean square error: 9.49002608182
linear mean square error: 9.92526421
tr895
svm mean square error: 3.39272367709
linear mean square error: 3.23645241068
tr896
svm mean square error: 25.9000515954
linear mean square error: 24.410818636
tr921
svm mean square error: 4.88728819291
linear mean square error: 4.28738563158
tr948
svm mean square error: 2.86872645287
linear mean square error: 2.49373343177

In [ ]:


In [ ]:


In [96]:
cutoff = 10
a = pd.DataFrame(y_pred, columns=["pred"])
a["large"] = a["pred"].rank(method="first") <= cutoff

b = pd.DataFrame(train_y, columns=["train"])
b["gt"] = b["train"].rank(method="first") <= cutoff # ground truth

In [97]:
picked = pd.concat([a,b], axis=1).query("large == True and gt == True")
print("out of the top 10, picked out ", picked.shape[0])


out of the top 10, picked out  2

In [98]:
picked = pd.concat([a,b], axis=1).query("large == True and gt == True")
print("out of the top 10, picked out ", picked.shape[0])


out of the top 10, picked out  2

In [252]:
picked.sum()


Out[252]:
pred    -1.596755
large    8.000000
train   -1.629867
gt       8.000000
dtype: float64

In [ ]:


In [221]:
pd.concat([a,b], axis=1).sum()


Out[221]:
pred     -4.31886
large    10.00000
train    -4.31886
gt       10.00000
dtype: float64

In [194]:
pd.concat([a,b], axis=1).query("large == True")


Out[194]:
pred large train gt
0 -0.757080 True -0.814549 True
1 -0.399522 True -0.457784 True
2 -0.323967 True -0.095139 True
3 -0.237361 True -0.559770 True
4 -0.188904 True -0.168058 True
5 -0.174447 True -0.013000 False
6 -0.152443 True -0.028691 False
7 -0.141327 True -0.006070 False
8 -0.110571 True -0.096960 True
9 -0.097252 True -0.565082 True

In [195]:
picked.sum()


Out[195]:
pred    -2.114658
large    7.000000
train   -2.757341
gt       7.000000
dtype: float64

In [165]:
i = 2
diffRMSD = pd.read_table(f"/Users/weilu/Research/data/refinement_jul27/set{i}/diffRMSD.txt", names=["diffRMSD"], sep="\s+")
eigenvalues = pd.read_table(f"/Users/weilu/Research/data/refinement_jul27/set{i}/eigenvalues.txt", names=["eigenvalues"], sep="\s+")
entropy = pd.read_table(f"/Users/weilu/Research/data/refinement_jul27/set{i}/entropy.txt", names=["entropy"], sep="\s+")
pca = pd.read_table(f"/Users/weilu/Research/data/refinement_jul27/set{i}/std.PCA.txt", names=["pca"], sep="\s+")
raw_data_2 = pd.concat([eigenvalues, entropy, pca, diffRMSD], axis=1)

In [166]:
data_2 = my_transform(raw_data, label=LABEL, degree=1, FEATURES=FEATURES)

In [167]:
test_y = data_2[:,-1]
test_set = data_2[:,:-1]

y_pred = lin_reg.predict(train_set)

In [168]:
data_2


Out[168]:
array([[  6.74157155e+00,   1.67029682e-02,  -1.31131000e-01],
       [  6.24683413e+00,  -4.61862619e+00,  -7.36925000e-02],
       [  4.63557806e+00,  -3.54300231e-01,  -4.41102000e-02],
       [  4.28502428e+00,  -1.91024887e+00,  -4.68995000e-03],
       [  3.92727394e+00,   1.08402967e+00,  -3.46758000e-02],
       [  3.58305324e+00,   9.74195694e-01,  -2.50843000e-02],
       [  3.26352572e+00,   2.06149694e-01,  -1.01665000e-02],
       [  3.18244367e+00,   3.88927742e-01,  -6.82601000e-05],
       [  2.91077060e+00,   4.19328449e-01,  -9.05235000e-05],
       [  2.76889872e+00,   7.60120407e-02,  -5.62739000e-02],
       [  2.62258098e+00,   1.23791533e+00,  -1.53555000e-02],
       [  2.43144532e+00,  -9.26596297e-02,  -6.22972000e-04],
       [  2.34912812e+00,   1.44628764e+00,  -4.32242000e-02],
       [  2.08882973e+00,   2.72636079e-01,  -8.62511000e-02],
       [  2.07284963e+00,   5.23852719e-01,  -5.44797000e-04],
       [  2.03651007e+00,  -9.45072730e-02,  -4.29566000e-02],
       [  1.94714519e+00,  -2.33640544e-02,  -4.71649000e-03],
       [  1.92257582e+00,   2.01383420e-02,  -7.33374000e-02],
       [  1.86818930e+00,  -6.55999061e-02,  -2.36041000e-02],
       [  1.80240997e+00,  -2.40728320e-01,  -7.04469000e-03],
       [  1.67816871e+00,   1.94638276e-01,  -1.39979000e-03],
       [  1.61222363e+00,   4.50298545e-01,  -8.72481000e-03],
       [  1.53990975e+00,   4.05572359e-01,  -7.86921000e-03],
       [  1.46143075e+00,   3.78978043e-01,  -4.90715000e-02],
       [  1.42182788e+00,  -3.61165485e-01,  -7.81370000e-02],
       [  1.32038959e+00,   1.09116305e+00,  -3.08933000e-03],
       [  1.27678537e+00,   7.37054083e-01,  -1.50127000e-03],
       [  1.23183793e+00,   8.62865670e-01,  -1.45141000e-02],
       [  1.13187994e+00,   8.89337333e-01,  -8.65393000e-02],
       [  1.12354823e+00,   1.60001880e-01,  -1.68806000e-02],
       [  1.08904266e+00,   1.08542990e+00,  -7.58387000e-03],
       [  1.07635577e+00,   4.32038290e-01,  -6.07691000e-02],
       [  1.02425211e+00,   1.10223324e+00,  -8.81419000e-03],
       [  9.38925295e-01,   1.14661351e+00,  -6.69598000e-03],
       [  9.00334763e-01,   8.88459703e-01,  -7.90080000e-03],
       [  8.41772459e-01,   1.01794900e+00,  -1.42386000e-02],
       [  8.12959824e-01,   7.60060707e-01,  -4.48091000e-02],
       [  7.95073895e-01,  -1.19868797e-01,  -3.24309000e-02],
       [  7.42779420e-01,   8.04353360e-01,  -1.84897000e-02],
       [  7.24742508e-01,   6.10482204e-01,  -5.84193000e-03],
       [  6.91744415e-01,   3.87728463e-01,  -4.31564000e-02],
       [  6.84908757e-01,   9.41450002e-01,  -2.91446000e-03],
       [  6.72215665e-01,  -3.60422596e-02,  -8.82187000e-04],
       [  6.60527719e-01,   8.48460856e-01,  -1.67760000e-03],
       [  6.24698669e-01,   9.46852229e-01,  -3.44938000e-04],
       [  5.87196280e-01,   5.68123580e-01,  -7.48668000e-03],
       [  5.35999623e-01,   5.76976089e-01,  -1.21419000e-02],
       [  5.25508745e-01,  -4.68564757e-01,  -2.33775000e-07],
       [  5.09696750e-01,   4.73958812e-01,  -1.27951000e-05],
       [  4.91469280e-01,   5.07813327e-01,  -9.03684000e-02],
       [  4.86807054e-01,   6.29428849e-01,  -4.64136000e-03],
       [  4.66379231e-01,   1.01756782e+00,  -5.34634000e-02],
       [  4.40459460e-01,   1.73259778e-01,  -2.16277000e-02],
       [  4.03143162e-01,   9.31298422e-01,  -1.63303000e-02],
       [  3.85560564e-01,  -1.63581557e-02,  -2.14795000e-02],
       [  3.72178063e-01,   1.17082405e+00,  -5.60263000e-02],
       [  3.61417351e-01,   4.77230303e-01,  -2.46531000e-02],
       [  3.52380224e-01,   2.37310181e-01,  -1.79584000e-02],
       [  3.20211543e-01,   4.15279061e-01,  -1.42618000e-02],
       [  3.13167649e-01,   4.11434522e-01,  -6.10886000e-03],
       [  2.94194756e-01,   4.40127384e-01,  -8.59928000e-04],
       [  2.84025445e-01,   3.88454581e-01,  -3.14038000e-02],
       [  2.73161638e-01,   9.14106503e-01,  -2.58712000e-02],
       [  2.59317691e-01,   5.06936962e-01,  -2.51378000e-03],
       [  2.27593522e-01,   8.44737823e-01,  -3.18383000e-02],
       [  2.18902527e-01,   5.78047514e-01,  -1.48132000e-02],
       [  2.01898674e-01,   8.28477594e-01,  -2.45589000e-02],
       [  1.89905599e-01,  -5.42888800e-02,  -1.23120000e-04],
       [  1.71562566e-01,   2.84754712e-01,  -4.32493000e-03],
       [  1.62653532e-01,   8.52993231e-01,  -1.26518000e-02],
       [  1.51927620e-01,   4.58037322e-01,  -1.12483000e-02],
       [  1.39220323e-01,   9.10811187e-01,  -2.15334000e-02],
       [  1.29290885e-01,   9.32317355e-01,  -2.43820000e-03],
       [  1.23505034e-01,   4.26767172e-01,  -2.91779000e-02],
       [  1.11873846e-01,   1.00081972e+00,  -3.48448000e-02],
       [  1.06781126e-01,   8.52341895e-01,  -9.83258000e-04],
       [  9.93884951e-02,   8.38245166e-01,  -7.55218000e-02],
       [  8.96743032e-02,   7.76455773e-01,  -9.22948000e-03],
       [  8.12414171e-02,   8.94644408e-01,  -2.36153000e-03],
       [  7.62726349e-02,   7.76115041e-01,  -1.04692000e-01],
       [  5.98960369e-02,  -5.90221705e-01,  -1.19506000e-03],
       [  5.43014891e-02,   5.45415324e-01,  -6.30976000e-03],
       [  4.78704574e-02,   1.02508792e+00,  -1.48555000e-03],
       [  4.64291744e-02,   1.03128050e+00,  -9.59444000e-03],
       [  3.02433207e-02,   4.88215642e-01,  -7.03305000e-05],
       [  7.93547228e-03,   5.69887401e-01,  -3.17742000e-02],
       [  4.81539068e-03,   5.71668292e-01,  -5.30854000e-03],
       [ -1.07713086e-02,   7.71492204e-01,  -9.78444000e-03],
       [ -1.82767738e-02,   6.15106121e-01,  -1.45028000e-05],
       [ -2.37165550e-02,   9.67659391e-01,  -1.21810000e-02],
       [ -2.63194247e-02,   8.12773563e-01,  -4.01473000e-03],
       [ -3.58062742e-02,   7.01888107e-01,  -1.39628000e-06],
       [ -4.23686248e-02,   8.30659316e-01,  -1.87563000e-03],
       [ -5.03352261e-02,   6.26932277e-01,  -3.84569000e-02],
       [ -6.04036133e-02,   9.34469803e-01,  -1.50103000e-02],
       [ -6.55783374e-02,   7.23306901e-01,  -2.09709000e-02],
       [ -7.36193135e-02,   7.30203244e-01,  -9.26104000e-03],
       [ -8.16701526e-02,  -9.96688345e-01,  -1.30770000e-02],
       [ -8.67161009e-02,   2.33212393e-01,  -2.87665000e-03],
       [ -8.90808349e-02,   5.38873044e-01,  -8.61013000e-03],
       [ -1.02112056e-01,   5.66721923e-01,  -6.93631000e-04],
       [ -1.08284854e-01,   2.89465708e-01,  -1.03549000e-02],
       [ -1.11042924e-01,   2.91198339e-01,  -3.67934000e-03],
       [ -1.19126267e-01,   8.10353807e-01,  -4.83771000e-03],
       [ -1.21143877e-01,  -1.17813014e-02,  -5.30832000e-03],
       [ -1.29172758e-01,  -4.74464867e-01,  -1.79334000e-03],
       [ -1.39870322e-01,   7.93283667e-01,  -1.05409000e-04],
       [ -1.48831461e-01,   3.71343882e-01,  -2.83697000e-03],
       [ -1.57656009e-01,   1.58775661e-01,  -6.31264000e-02],
       [ -1.61643652e-01,   7.84438868e-01,  -1.15440000e-03],
       [ -1.63345588e-01,   4.87936750e-01,  -1.55709000e-02],
       [ -1.70319326e-01,   7.33609859e-02,  -1.08887000e-05],
       [ -1.72476319e-01,   4.35245412e-01,  -2.46875000e-04],
       [ -1.77965229e-01,   7.06583860e-01,  -2.50911000e-03],
       [ -1.83459349e-01,   5.73315951e-01,  -1.81348000e-02],
       [ -1.93281847e-01,   5.77504118e-01,  -1.10066000e-03],
       [ -1.94667798e-01,   7.04915993e-01,  -1.34197000e-04],
       [ -2.01254775e-01,   9.01191070e-01,  -1.15387000e-03],
       [ -2.02683900e-01,   1.00603328e+00,  -4.59827000e-03],
       [ -2.09524272e-01,   7.64426862e-01,  -2.19924000e-02],
       [ -2.16744830e-01,   5.95908175e-01,  -1.22218000e-03],
       [ -2.26332604e-01,   9.10303996e-01,  -2.18137000e-05],
       [ -2.31149659e-01,  -5.22449551e-02,  -5.16414000e-03],
       [ -2.36004863e-01,   4.82250719e-01,  -5.92800000e-03],
       [ -2.37595981e-01,   4.42122577e-01,  -1.10518000e-02],
       [ -2.39728782e-01,   3.76845825e-01,  -3.34106000e-02],
       [ -2.45324850e-01,   5.19256591e-01,  -2.45945000e-05],
       [ -2.47010658e-01,   3.69875004e-01,  -9.76754000e-04],
       [ -2.48309270e-01,   7.86047858e-01,  -1.62749000e-03],
       [ -2.52582130e-01,   9.01886058e-02,  -4.90756000e-04],
       [ -2.55046175e-01,   3.67274312e-01,  -7.78287000e-04],
       [ -2.59839814e-01,   6.80555430e-01,  -7.15732000e-03],
       [ -2.60837453e-01,   7.66702237e-01,  -2.08928000e-02],
       [ -2.62514701e-01,   1.27169139e-01,  -3.52615000e-04],
       [ -2.70285968e-01,   7.11385815e-01,  -1.09640000e-02],
       [ -2.73468080e-01,   3.89220182e-01,  -3.34225000e-05],
       [ -2.78059406e-01,   6.49654791e-01,  -3.79924000e-03],
       [ -2.81705973e-01,   7.03215516e-01,  -2.16499000e-02],
       [ -2.85682574e-01,  -7.39062395e-01,  -1.19337000e-03],
       [ -2.89661192e-01,   5.44645460e-01,  -2.66348000e-04],
       [ -2.93333998e-01,  -1.86240380e-01,  -5.18529000e-04],
       [ -2.94835543e-01,   6.54997295e-01,  -5.36594000e-04],
       [ -2.96985682e-01,   4.18970899e-01,  -1.60224000e-04],
       [ -2.97785878e-01,   7.11501051e-01,  -3.69685000e-04],
       [ -3.03260210e-01,   8.77836218e-01,  -1.84768000e-04],
       [ -3.08814810e-01,   9.58956832e-01,  -4.70676000e-05],
       [ -3.12793738e-01,   4.15504472e-01,  -7.25242000e-03],
       [ -3.19230880e-01,   9.34286635e-01,  -1.12059000e-02],
       [ -3.20204358e-01,   6.34820618e-01,  -2.84607000e-04],
       [ -3.23959386e-01,   7.42479351e-01,  -2.41014000e-03],
       [ -3.26195406e-01,   7.58072189e-01,  -3.87747000e-04],
       [ -3.27924729e-01,   7.46949483e-01,  -5.16914000e-03],
       [ -3.32584691e-01,   6.61080195e-01,  -2.92624000e-02],
       [ -3.37340088e-01,   7.40400341e-01,  -1.04926000e-02],
       [ -3.37872187e-01,   6.41269431e-01,  -7.22716000e-04],
       [ -3.39129797e-01,   3.87033461e-01,  -1.10574000e-03],
       [ -3.41140212e-01,   6.94968162e-01,  -6.49715000e-03],
       [ -3.43387677e-01,   3.82164583e-01,  -3.92842000e-04],
       [ -3.44926224e-01,   2.90639523e-01,  -1.04158000e-05],
       [ -3.46831155e-01,   7.15768848e-01,  -2.72629000e-03],
       [ -3.48720486e-01,   3.56577199e-01,  -1.28353000e-02],
       [ -3.49431110e-01,   1.07809817e+00,  -6.62791000e-03],
       [ -3.52415313e-01,   6.21676847e-01,  -1.63109000e-03],
       [ -3.54369776e-01,   7.89381801e-01,  -4.33726000e-03],
       [ -3.57354227e-01,   4.49347297e-01,  -8.61129000e-04],
       [ -3.58704543e-01,   5.77153214e-01,  -5.76915000e-03],
       [ -3.59488766e-01,   8.49315902e-01,  -1.10211000e-02],
       [ -3.61564716e-01,   6.13359090e-01,  -5.55436000e-03],
       [ -3.63861309e-01,   7.24255060e-01,  -8.23964000e-03],
       [ -3.65148508e-01,   6.48147691e-01,  -2.62600000e-03],
       [ -3.67804817e-01,   7.60537625e-01,  -2.31804000e-03],
       [ -3.69864639e-01,   1.93759218e-01,  -6.55455000e-04],
       [ -3.73350981e-01,   4.91648793e-01,  -1.36865000e-03],
       [ -3.75287362e-01,  -5.31064009e-01,  -1.48147000e-03],
       [ -3.76412040e-01,   5.12857743e-01,  -1.29272000e-04],
       [ -3.81911092e-01,   3.70878781e-01,  -1.31770000e-02],
       [ -3.85745457e-01,   1.06326980e+00,  -2.39284000e-03],
       [ -3.87615279e-01,  -5.65411937e-01,  -8.70783000e-05],
       [ -3.89194487e-01,   5.84342264e-01,  -5.22247000e-03],
       [ -3.91747205e-01,   6.35115075e-01,  -1.58735000e-02],
       [ -3.95141834e-01,   4.23985842e-01,  -8.37931000e-08],
       [ -3.98743305e-01,   4.55845754e-01,  -4.01700000e-04],
       [ -4.00464936e-01,   5.83774017e-01,  -6.19898000e-03],
       [ -4.05974812e-01,   9.32189856e-02,  -2.90878000e-03],
       [ -4.06993604e-01,   4.05642491e-01,  -4.08755000e-04],
       [ -4.08223052e-01,   1.56256703e-01,  -3.77723000e-03],
       [ -4.09664366e-01,   4.64341754e-01,  -3.25425000e-05],
       [ -4.11166346e-01,   2.89517234e-01,  -2.88696000e-04],
       [ -4.11829547e-01,  -2.92872646e-01,  -8.79217000e-04],
       [ -4.14228770e-01,   5.40568522e-01,  -3.62668000e-03],
       [ -4.15064137e-01,   4.81897891e-01,  -2.88621000e-03],
       [ -4.16919879e-01,   5.96024995e-01,  -2.44277000e-02],
       [ -4.18558233e-01,  -4.21014374e-01,  -2.39481000e-02],
       [ -4.21348932e-01,  -2.23590605e-01,  -9.45668000e-03],
       [ -4.22266800e-01,   1.74085106e-01,  -4.44974000e-04],
       [ -4.25349322e-01,   1.33053401e-01,  -2.32552000e-05],
       [ -4.29865311e-01,   1.06678008e-01,  -7.26632000e-06],
       [ -4.31068179e-01,  -6.09318686e-01,  -2.38587000e-03],
       [ -4.32179118e-01,   1.94830639e-01,  -1.82814000e-03],
       [ -4.35468698e-01,  -6.35064523e-01,  -2.13317000e-02],
       [ -4.40599194e-01,  -1.33146002e+00,  -5.97998000e-07],
       [ -4.41491568e-01,  -5.23687035e-01,  -5.37608000e-03],
       [ -4.43288597e-01,   8.71008494e-02,  -7.34290000e-03],
       [ -4.48654128e-01,   3.32863375e-01,  -1.00925000e-03],
       [ -4.49018155e-01,   2.31747953e-01,  -1.84189000e-05],
       [ -4.51323649e-01,  -4.27267515e-01,  -2.93433000e-02],
       [ -4.54410606e-01,  -8.78271905e-01,  -1.46583000e-03],
       [ -4.58453612e-01,  -8.83488549e-01,  -2.07591000e-05],
       [ -4.59029070e-01,  -8.06023297e-01,  -4.82710000e-03],
       [ -4.62083942e-01,  -8.93669646e-01,  -2.43700000e-04],
       [ -4.66194716e-01,  -2.11416543e+00,  -1.45189000e-05],
       [ -4.66998975e-01,  -7.22128302e-02,  -8.48042000e-03],
       [ -4.68812101e-01,  -8.02163531e-01,  -1.95450000e-06],
       [ -4.70824656e-01,  -1.30022174e+00,  -3.25523000e-05],
       [ -4.73446010e-01,   2.06309639e-02,  -1.48752000e-04],
       [ -4.74454722e-01,  -9.26757408e-01,  -5.90742000e-04],
       [ -4.77926068e-01,  -9.08455519e-01,  -2.94292000e-03],
       [ -4.80012083e-01,  -7.66076783e-01,  -1.29211000e-03],
       [ -4.81921155e-01,  -1.11491574e+00,  -8.52645000e-03],
       [ -4.86145150e-01,  -9.89824751e-02,  -1.39683000e-03],
       [ -4.90259847e-01,  -5.75047973e-01,  -3.16176000e-04],
       [ -4.92772478e-01,  -1.07407050e+00,  -5.54745000e-03],
       [ -4.95396314e-01,  -1.19585300e+00,  -6.32917000e-03],
       [ -5.03044419e-01,  -2.42351514e+00,  -4.92183000e-03],
       [ -5.06260322e-01,  -3.03121490e+00,  -3.78423000e-04],
       [ -5.11026450e-01,  -3.05722604e+00,  -4.06964000e-03],
       [ -5.12180345e-01,  -2.26951760e+00,  -9.46486000e-04],
       [ -5.20681387e-01,  -1.40213108e+00,  -2.14874000e-02],
       [ -5.23596938e-01,  -1.04707089e+00,  -2.12216000e-03],
       [ -5.30154854e-01,  -2.89892150e+00,  -3.76530000e-04],
       [ -5.35819000e-01,  -1.41247263e+00,  -1.60764000e-04],
       [ -5.37571351e-01,  -1.36906782e+00,  -7.40651000e-03],
       [ -5.40074910e-01,  -3.80277181e-02,  -7.80139000e-03],
       [ -5.41864526e-01,  -1.02083175e+00,  -5.63555000e-03],
       [ -5.46041146e-01,  -2.20920137e+00,  -2.97148000e-03],
       [ -5.50699898e-01,  -2.49668559e+00,  -2.34804000e-04],
       [ -5.58658048e-01,   1.81568015e-01,  -4.29547000e-03],
       [ -5.59925986e-01,  -1.33111042e+00,  -1.63544000e-03],
       [ -5.64530214e-01,   1.37682074e-01,  -5.02382000e-05],
       [ -5.64800978e-01,  -9.80807892e-01,  -1.96468000e-04],
       [ -5.65911452e-01,  -2.08537521e+00,  -1.86368000e-04],
       [ -5.69842336e-01,   3.25337670e-03,  -1.76263000e-03],
       [ -5.73358654e-01,  -2.72890551e-01,  -2.14553000e-05],
       [ -5.74420620e-01,  -3.87853248e-01,  -7.12336000e-03],
       [ -5.76383085e-01,   6.50125501e-03,  -1.27663000e-04],
       [ -5.78426112e-01,  -1.39008079e+00,  -1.36788000e-04],
       [ -5.79470042e-01,  -6.34983550e-01,  -1.10631000e-02],
       [ -5.79962550e-01,   2.33449193e-01,  -7.68856000e-03],
       [ -5.81819749e-01,  -5.01128417e-01,  -2.74083000e-04],
       [ -5.83225147e-01,  -6.42460902e-01,  -3.01401000e-03],
       [ -5.83681243e-01,  -4.59451819e-01,  -7.50557000e-05],
       [ -5.86324231e-01,   2.01770564e-02,  -5.52649000e-04],
       [ -5.87297772e-01,  -1.27807327e-01,  -3.46077000e-04],
       [ -5.88612171e-01,  -5.78077232e-01,  -1.82893000e-04],
       [ -5.90017539e-01,   6.26335266e-01,  -1.27560000e-04],
       [ -5.91984749e-01,   3.04443226e-02,  -1.13574000e-05],
       [ -5.92241029e-01,  -1.01894181e+00,  -2.25830000e-04],
       [ -5.93448774e-01,  -3.59396364e-01,  -7.35600000e-03],
       [ -5.93990721e-01,  -1.40639073e-01,  -2.12944000e-05],
       [ -5.95127999e-01,  -1.15229840e-01,  -2.59608000e-03],
       [ -5.96631592e-01,  -1.45518247e-01,  -2.61217000e-03],
       [ -5.98557319e-01,  -2.57993019e-01,  -8.59025000e-04],
       [ -5.99048633e-01,   1.68031699e-01,  -5.53298000e-04],
       [ -5.99439132e-01,   1.44729427e-01,  -6.24409000e-04],
       [ -6.01192313e-01,  -1.11477981e+00,  -4.06142000e-05],
       [ -6.01944272e-01,  -5.00647400e-02,  -2.43185000e-04],
       [ -6.02382705e-01,  -6.64650274e-01,  -6.60920000e-05],
       [ -6.03117443e-01,   3.89140929e-01,  -3.75741000e-06],
       [ -6.04646306e-01,  -2.39869569e-01,  -5.05019000e-04],
       [ -6.05522730e-01,  -2.68191468e-01,  -1.43738000e-05],
       [ -6.07118562e-01,   1.50170209e-01,  -3.01833000e-04],
       [ -6.07464089e-01,   3.85894095e-01,  -2.56684000e-05],
       [ -6.07826464e-01,   9.79786885e-02,  -9.21261000e-05],
       [ -6.08974435e-01,   1.37435668e-01,  -9.11934000e-04],
       [ -6.09566091e-01,  -2.75089754e-01,  -1.03443000e-03],
       [ -6.10271908e-01,  -1.01198375e+00,  -1.02871000e-03],
       [ -6.11349939e-01,   1.28521657e-02,  -3.38844000e-05],
       [ -6.12161114e-01,  -1.07801397e+00,  -4.20636000e-04],
       [ -6.12297528e-01,  -2.50533073e-01,  -2.97911000e-05],
       [ -6.12382781e-01,  -3.51100422e-01,  -4.11765000e-05],
       [ -6.13492595e-01,  -5.11950570e-01,  -2.56136000e-07],
       [ -6.14005434e-01,  -2.04942107e-01,  -6.27025000e-04],
       [ -6.14285851e-01,  -1.95009614e-01,  -1.81598000e-05],
       [ -6.14808320e-01,  -1.27428540e+00,  -2.95630000e-04],
       [ -6.15594094e-01,  -4.80534263e-01,  -7.26203000e-04],
       [ -6.16031635e-01,  -2.75176652e-01,  -9.06470000e-04],
       [ -6.16297057e-01,  -3.87697384e-01,  -1.77972000e-03],
       [ -6.16852015e-01,  -8.98168757e-01,  -6.27859000e-07],
       [ -6.16881347e-01,   2.01932602e-01,  -7.09714000e-05],
       [ -6.17816227e-01,  -9.13162085e-01,  -6.39543000e-05],
       [ -6.18887490e-01,   1.61904858e-01,  -1.47788000e-03],
       [ -6.19265520e-01,  -1.39362230e+00,  -2.90230000e-04],
       [ -6.19631586e-01,  -1.54720451e+00,  -2.72453000e-03],
       [ -6.20303976e-01,  -1.23734959e+00,  -5.06764000e-04],
       [ -6.20683146e-01,  -1.21896464e+00,  -1.99522000e-06],
       [ -6.21495469e-01,  -9.75553360e-01,  -1.94494000e-03],
       [ -6.22091158e-01,  -8.63103331e-01,  -1.18942000e-04],
       [ -6.23486111e-01,  -2.16722145e+00,  -5.87104000e-06],
       [ -6.25944853e-01,  -1.56203274e+00,  -1.14804000e-03],
       [ -6.26311718e-01,  -1.64279339e+00,  -2.53627000e-04],
       [ -6.26552219e-01,  -4.92192870e-01,  -2.50065000e-04],
       [ -6.27603375e-01,  -1.11297687e+00,  -4.72164000e-03],
       [ -6.29087886e-01,  -1.63234113e+00,  -1.01309000e-02],
       [ -6.29762415e-01,  -1.72649923e+00,  -7.01713000e-03],
       [ -6.30201585e-01,  -9.67542316e-01,  -7.00665000e-06],
       [ -6.31073232e-01,  -1.33805796e+00,  -1.40980000e-05],
       [ -6.33546365e-01,  -4.44608252e+00,  -3.24941000e-03],
       [ -6.34827198e-01,  -2.41559615e+00,  -6.12515000e-04],
       [ -6.36350307e-01,  -2.12849549e+00,  -1.55898000e-03],
       [ -6.37324499e-01,  -3.80719534e+00,  -2.99739000e-03],
       [ -6.37576584e-01,  -3.70280679e+00,  -9.54254000e-04],
       [ -6.39988531e-01,  -3.56133245e+00,  -5.49887000e-03],
       [ -7.18341435e-01,   1.41869497e+00,   7.31771000e-10],
       [ -7.18697713e-01,   1.86265989e+00,  -4.90841000e-10],
       [ -7.18790971e-01,   1.82967487e+00,   1.63373000e-10],
       [ -7.18898363e-01,   1.19419543e+00,   1.07616000e-10],
       [ -7.20146853e-01,   1.45077765e+00,  -4.68208000e-15],
       [ -7.20149643e-01,   1.23223991e+00,  -5.19378000e-15]])

In [ ]: