In [141]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sklearn as sk
from os import walk
from os import listdir
from os.path import isfile, join
from sklearn.model_selection import train_test_split
from tpot import TPOTClassifier

In [ ]:
#relax = pd.read_csv("../Muse Data/novonarelax.csv", names = np.arange(0,220))
basePath = "../Muse Data/"
allData = pd.DataFrame()

for (f) in listdir(basePath):
    if not isfile(join(basePath, f)): continue
    if f.startswith("."): continue
    #print(f)
    singleFileData = pd.read_csv(join(basePath, f), header=None)
    singleFileData = singleFileData.iloc[:,0:110] #cut it down like before
    singleFileData['originalRow'] = list(singleFileData.index)
    #print(singleFileData.shape)
    #print(singleFileData.head())
    singleFileData = pd.melt(singleFileData, var_name="originalColumn", id_vars = "originalRow")
    singleFileData['epoch'] = singleFileData["originalRow"].apply(lambda x: x//4)
    singleFileData['chan'] = singleFileData["originalRow"].apply(lambda x: x % 4 + 1)
    singleFileData['variableID'] = singleFileData["originalColumn"].astype(str) +".originalColumn."  +singleFileData["chan"].astype(str) +".chan"
    singleFileData = singleFileData.pivot(index='epoch', values = 'value', columns='variableID')
    singleFileData['source'] = f
    name = f.replace('.csv','')
    name = name.replace('focus','')
    name = name.replace('relax','')
    singleFileData['user'] = name
    if "relax" in f:
        singleFileData['class'] = "relax"
    elif "focus" in f:
        singleFileData['class'] = "focus"
    else:
        continue
    allData = pd.concat([allData, singleFileData])
allData.shape
print(allData.head())

In [137]:
allData['class'] = allData['class'].astype('category')
taskInstancesTarget = allData['class']
taskInstancesTarget = taskInstancesTarget.cat.codes #switch from categories to numbers

taskInstancesData = allData.drop('class', 1)
groups = taskInstancesData['user'] #loose information about subject, use it for cross validation
taskInstancesData = taskInstancesData.drop('source', 1)
taskInstancesData = taskInstancesData.drop('user', 1)
#taskInstancesData = pd.get_dummies(taskInstancesData) #transform catagorical to dummy vars
taskInstancesData


variableID  0.originalColumn.1.chan  0.originalColumn.2.chan  \
epoch                                                          
0                          3.347379                 3.922594   
1                          3.098009                 3.419794   
2                          3.813447                 0.630652   
3                          4.568050                 3.700144   
4                          4.249591                 2.235759   

variableID  0.originalColumn.3.chan  0.originalColumn.4.chan  \
epoch                                                          
0                          0.576974                 4.030018   
1                          4.943621                 2.093604   
2                          4.498111                 3.138631   
3                          3.112263                 3.405895   
4                          4.125033                 1.713787   

variableID  1.originalColumn.1.chan  1.originalColumn.2.chan  \
epoch                                                          
0                          3.931242                 4.075038   
1                          3.241444                 4.093690   
2                          5.820545                 3.431472   
3                          4.791244                 4.319584   
4                          4.812217                 4.111675   

variableID  1.originalColumn.3.chan  1.originalColumn.4.chan  \
epoch                                                          
0                          4.819483                 4.003981   
1                          5.205723                 4.121692   
2                          5.243981                 4.631442   
3                          4.669425                 4.950427   
4                          3.983703                 4.011845   

variableID  10.originalColumn.1.chan  10.originalColumn.2.chan  ...    \
epoch                                                           ...     
0                           4.289408                  2.527719  ...     
1                           4.172744                  3.132734  ...     
2                           4.202472                  3.114908  ...     
3                           4.046890                  3.594767  ...     
4                           4.947804                  4.088056  ...     

variableID  98.originalColumn.2.chan  98.originalColumn.3.chan  \
epoch                                                            
0                           3.304977                  2.142997   
1                           2.823127                  3.844751   
2                           3.536290                  3.344073   
3                           2.990566                  2.213034   
4                           2.299560                  3.586225   

variableID  98.originalColumn.4.chan  99.originalColumn.1.chan  \
epoch                                                            
0                           1.864090                  2.256091   
1                           3.080960                  3.992207   
2                           2.400871                  3.745031   
3                           3.525030                  3.665514   
4                           2.756181                  3.232633   

variableID  99.originalColumn.2.chan  99.originalColumn.3.chan  \
epoch                                                            
0                           3.460892                  3.135498   
1                           3.624723                  4.135198   
2                           3.918821                  3.892224   
3                           3.072042                  3.500677   
4                           3.529257                  3.874831   

variableID  99.originalColumn.4.chan          source   user  class  
epoch                                                               
0                           2.223744  chrisfocus.csv  chris  focus  
1                           3.547663  chrisfocus.csv  chris  focus  
2                           3.532747  chrisfocus.csv  chris  focus  
3                           3.365765  chrisfocus.csv  chris  focus  
4                           3.259507  chrisfocus.csv  chris  focus  

[5 rows x 443 columns]
Out[137]:
variableID 0.originalColumn.1.chan 0.originalColumn.2.chan 0.originalColumn.3.chan 0.originalColumn.4.chan 1.originalColumn.1.chan 1.originalColumn.2.chan 1.originalColumn.3.chan 1.originalColumn.4.chan 10.originalColumn.1.chan 10.originalColumn.2.chan ... 97.originalColumn.3.chan 97.originalColumn.4.chan 98.originalColumn.1.chan 98.originalColumn.2.chan 98.originalColumn.3.chan 98.originalColumn.4.chan 99.originalColumn.1.chan 99.originalColumn.2.chan 99.originalColumn.3.chan 99.originalColumn.4.chan
epoch
0 3.347379 3.922594 0.576974 4.030018 3.931242 4.075038 4.819483 4.003981 4.289408 2.527719 ... 2.735172 2.734484 3.082056 3.304977 2.142997 1.864090 2.256091 3.460892 3.135498 2.223744
1 3.098009 3.419794 4.943621 2.093604 3.241444 4.093690 5.205723 4.121692 4.172744 3.132734 ... 3.211310 3.003500 4.070686 2.823127 3.844751 3.080960 3.992207 3.624723 4.135198 3.547663
2 3.813447 0.630652 4.498111 3.138631 5.820545 3.431472 5.243981 4.631442 4.202472 3.114908 ... 2.869345 2.902601 3.600004 3.536290 3.344073 2.400871 3.745031 3.918821 3.892224 3.532747
3 4.568050 3.700144 3.112263 3.405895 4.791244 4.319584 4.669425 4.950427 4.046890 3.594767 ... 2.686930 2.962952 3.426544 2.990566 2.213034 3.525030 3.665514 3.072042 3.500677 3.365765
4 4.249591 2.235759 4.125033 1.713787 4.812217 4.111675 3.983703 4.011845 4.947804 4.088056 ... 2.838686 3.065413 3.067536 2.299560 3.586225 2.756181 3.232633 3.529257 3.874831 3.259507
5 2.921151 1.247593 4.882949 3.953637 4.088597 5.006454 5.208285 4.855020 4.772075 0.832822 ... 3.389917 2.257431 3.426572 3.502524 3.241918 2.592785 3.440996 3.130509 3.659813 3.141567
6 4.149429 4.015063 3.880120 3.589195 5.279610 3.881867 5.534196 4.245647 4.613681 3.519436 ... 1.720762 2.115118 0.848753 2.942815 2.091096 2.498783 2.903763 2.825080 2.790937 3.150382
7 4.791668 3.844846 2.560082 3.910423 4.914403 3.421144 2.595327 3.890560 4.403010 4.271159 ... 2.424754 3.358113 3.730447 2.735974 2.466114 2.913223 4.054744 3.390924 3.872206 2.487035
8 3.064607 3.125479 5.413046 2.840387 4.008005 4.262903 5.464248 4.146316 3.733918 3.766099 ... 2.685586 2.799285 2.985269 2.205963 2.357228 3.015734 1.862222 3.920350 3.495498 3.769358
9 0.734629 3.552746 3.956709 3.311105 4.070433 4.198953 4.788826 3.551729 4.394979 3.138979 ... 2.871439 3.283018 3.355047 3.015315 2.530201 2.195652 3.959013 3.315885 3.625882 3.480884
10 4.069254 3.929880 3.422276 2.816773 4.316570 4.102171 3.855225 3.774444 4.407372 3.242270 ... 2.724803 3.324908 2.797439 3.328250 3.289727 3.446897 3.695348 3.761917 3.877537 3.098915
11 4.201542 3.161880 4.332460 3.771248 4.705073 3.930838 4.569869 4.363993 4.801332 3.917685 ... 2.833843 3.453647 2.392560 3.650866 0.444743 3.889272 3.470286 4.146879 3.883804 4.196105
12 3.891622 3.959751 4.943475 -1.396367 4.927269 4.402476 5.742325 4.262489 5.058930 3.002453 ... 2.696241 3.248863 3.499220 2.033731 2.636093 2.846894 3.457376 3.654438 3.729653 3.201829
13 3.120879 3.300459 3.914877 2.502367 4.551734 4.133098 4.028651 2.542383 3.994427 3.432941 ... 2.981701 2.256510 1.461731 2.404430 1.969643 2.638910 2.485913 1.823576 2.601005 3.054855
14 2.254437 1.940516 3.693879 3.340573 3.599377 4.605322 4.097221 3.932888 4.694945 3.875309 ... 3.182842 3.480038 2.348114 2.749011 3.240837 3.118355 3.292516 3.678795 3.170890 3.218843
15 3.604621 3.699194 5.236961 3.493270 4.547742 4.565039 5.334054 4.303559 3.742077 3.877199 ... 1.671380 1.225315 1.413109 3.308732 2.892749 2.241621 3.651008 3.626552 3.676577 3.541767
16 5.851729 4.160017 3.479897 3.120531 6.613521 4.233246 5.108249 5.024500 3.020357 3.724953 ... 2.206809 3.427096 2.945178 3.652092 3.265846 3.028515 3.462890 3.692343 3.836778 3.605609
17 5.893486 3.639671 4.703190 4.468746 5.936065 4.323497 5.186954 4.865067 4.439698 4.021733 ... 2.447225 3.355378 2.912528 3.276767 2.955328 2.446172 3.636540 3.840569 3.948530 3.549892
18 2.645602 2.512104 5.453631 4.563753 4.400667 3.775893 6.042422 4.306783 3.303534 3.195990 ... 3.390496 3.162356 3.726516 3.582741 3.275550 2.691963 3.590400 3.643999 3.520477 3.104067
19 4.659342 3.846752 4.123329 4.358699 4.786035 4.496532 5.075993 4.229061 4.742643 3.284575 ... 3.001426 2.146505 3.288851 2.777328 2.752152 1.425346 3.450737 3.386366 3.703772 3.162932
20 2.697908 3.335723 5.424710 2.082104 3.588352 3.876893 5.833543 4.294408 3.425891 2.898351 ... 2.363150 2.410524 2.952884 3.612637 3.255531 2.993480 3.839298 3.909550 3.842676 3.487473
21 3.538564 1.752255 4.555118 3.322151 3.664648 3.838564 4.513907 3.528019 4.333111 2.350710 ... 2.729032 3.469420 3.329896 1.862167 1.660961 3.272306 2.665762 3.529093 3.099277 3.745000
22 4.632369 2.506587 5.130536 4.192742 4.343385 3.047183 5.425307 3.962344 3.782265 3.388013 ... 3.019507 3.202345 2.932201 3.456856 3.414189 3.396552 3.078795 3.033416 3.894821 2.779740
23 3.885101 4.271117 5.134657 3.708185 4.445012 4.633379 5.249836 2.856698 3.849333 3.713999 ... 3.055464 2.633825 2.039767 2.261611 3.245742 2.483886 3.020668 3.597381 3.565528 2.641674
24 4.092290 3.894280 3.735168 3.178737 4.247038 4.018584 4.464507 2.850566 4.075825 3.717475 ... 1.625549 1.656691 3.398637 2.906457 3.004416 2.551595 3.719601 3.640809 3.647307 2.446618
25 4.683437 4.631958 3.864363 1.933524 5.066507 4.664135 4.817661 4.740607 4.633970 2.666014 ... 2.723206 3.334739 2.459717 2.692150 2.726453 2.998611 3.664287 3.463570 3.534985 3.765643
26 1.692266 3.489568 4.937346 2.583811 3.788823 3.952483 5.080831 4.382755 3.946792 3.930902 ... 3.048974 3.032980 3.857580 0.947880 2.988059 2.708149 3.352103 1.953421 3.028731 3.151624
27 4.424360 2.223210 0.778978 4.364839 4.816126 3.521518 4.353939 4.473333 4.314168 3.869049 ... 2.762401 2.258963 1.182106 2.970341 2.526998 1.787706 2.883174 3.440585 3.072002 3.167458
28 2.976518 3.166772 3.304294 2.802796 3.783525 4.217820 2.928997 4.391334 4.249507 3.193848 ... 2.973767 1.059853 2.628984 3.289814 2.972823 3.493414 3.780196 3.627784 3.840794 3.931233
29 2.053590 3.572137 4.315622 3.564013 4.462465 4.107465 4.809213 4.432893 4.443831 2.829249 ... 2.474006 2.973758 3.459917 3.619674 3.177025 2.809332 3.011845 3.681581 3.448287 3.468361
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
10 5.607295 3.567706 5.014509 5.721536 6.486480 6.171319 6.107629 6.513922 5.436821 5.357609 ... 3.819092 4.090940 4.499219 3.460076 2.836531 4.724949 4.916944 4.038652 3.231912 5.075339
11 5.526713 5.089861 6.322809 4.736695 5.821517 5.594780 6.733763 5.645194 4.759136 4.350413 ... 2.985640 3.355795 3.620405 3.251433 3.404827 3.840848 4.811364 4.179463 2.707676 4.802420
12 5.805463 6.002592 5.905821 5.886561 6.375750 6.410046 6.384027 6.435581 4.019235 3.498737 ... 3.752924 4.479862 4.681543 3.893185 4.004712 4.552805 4.996125 4.323640 4.413336 5.113447
13 5.127918 4.589741 5.067250 5.212510 5.839280 5.542868 5.049180 6.026713 3.866822 4.613245 ... 3.681762 4.869128 4.882684 3.802938 3.473142 4.796897 5.048939 4.409972 3.699586 5.200444
14 4.814339 5.131501 5.184867 4.259249 5.356190 5.210868 5.811849 6.479200 4.863374 4.335520 ... 3.451844 3.543295 3.367481 3.951408 3.391101 4.425220 4.645353 4.231638 3.479093 4.968906
15 6.007476 4.926854 5.169822 6.088074 6.087428 5.184142 5.309464 6.191888 3.755298 4.659191 ... 3.476154 3.533474 3.878648 3.563201 3.530674 4.118099 4.767584 3.921320 3.838482 4.573218
16 5.306625 4.687372 5.399844 2.713542 6.140818 5.739044 6.013072 5.134364 4.421676 4.727808 ... 3.691187 3.254334 3.903665 2.764987 3.413388 4.086061 4.653125 3.356746 3.487194 4.875062
17 3.786949 1.357976 3.726758 1.982955 5.829047 5.303277 5.588874 5.343609 4.877752 4.240776 ... 3.812953 4.611335 3.441681 3.302543 3.033641 4.447593 4.340655 3.867188 3.892529 3.907470
18 5.059399 4.487735 5.214720 5.413317 5.571450 5.146249 4.543321 5.665423 4.886701 4.600407 ... 3.921280 3.998838 3.468264 3.695196 3.205283 3.900531 4.374712 3.894573 3.265062 4.962569
19 5.455513 3.527765 4.174697 5.843351 5.672028 6.593520 4.317458 6.034998 5.847869 4.618932 ... 3.356555 4.376316 4.161039 3.780821 3.094458 4.899926 4.703525 5.278107 3.848602 5.584153
20 5.430236 6.473985 4.953525 3.289477 6.021010 6.619530 4.086408 5.660364 4.634722 4.217476 ... 3.025307 4.317685 3.959237 4.206417 2.289493 4.430648 4.697751 4.424098 3.819080 4.666237
21 4.372993 5.151210 5.636453 5.943089 5.568368 5.560648 6.139599 5.503583 5.065893 4.813980 ... 3.492929 5.120545 4.199851 3.825603 3.309298 2.157835 4.382940 5.022696 3.531271 4.855925
22 6.372010 5.857866 5.302560 6.301160 6.310809 6.911256 5.587409 6.140643 3.658794 3.888509 ... 3.009566 4.097179 4.196860 4.897473 3.143923 5.005931 4.328150 4.975262 3.603869 5.302584
23 5.999632 5.757645 5.365221 5.868504 5.627507 6.014991 5.532793 5.908661 5.147679 4.557087 ... 1.706287 4.082982 4.059813 4.281488 3.556235 3.755402 4.435043 4.676115 4.090917 4.727311
24 5.809400 3.966558 4.769970 4.874529 6.273107 4.926049 5.526899 6.125106 5.669473 4.220743 ... 4.271838 4.281120 3.997983 3.892680 3.498066 4.107957 4.219202 4.625889 3.227052 4.326955
25 2.813381 3.973847 5.000486 5.362628 6.025172 4.574582 5.575362 5.276353 5.591877 4.115054 ... 3.371799 4.920393 3.241106 4.694307 3.071132 4.947823 3.989398 5.282124 3.588496 5.253612
26 5.279986 3.884931 2.894732 5.235798 5.107825 5.468906 5.841897 5.235743 5.657126 5.137996 ... 1.378042 3.043347 4.228358 4.461551 3.491073 3.912321 4.239959 4.489266 3.969810 4.274055
27 5.619839 5.913186 5.233015 4.955209 5.600658 6.620265 4.712773 5.621174 5.529987 4.906236 ... 2.543406 3.983471 4.588048 4.475174 3.186102 4.331064 4.912041 4.916760 3.967446 4.070698
28 5.111303 5.108199 3.816684 5.080682 6.580273 6.256079 5.606414 6.329681 6.038283 5.041672 ... 3.804686 3.089181 3.877622 4.405421 4.164347 3.740614 4.604124 4.927151 4.206158 4.457131
29 4.758999 5.057867 3.088382 5.351760 6.239748 5.382525 5.431493 6.513286 5.122676 4.416362 ... 3.749465 4.235079 3.015360 4.262442 4.156645 4.512072 3.812291 5.142352 4.210889 4.936300
30 4.931864 4.548870 3.553542 5.135565 6.481079 5.576549 5.601718 6.566112 6.055384 4.326374 ... 3.914212 4.481454 4.264532 3.929032 3.898189 3.930299 4.605902 4.703644 3.520392 4.204051
31 6.360191 1.470537 5.532834 6.229746 6.514647 5.007563 6.031443 6.309885 5.622291 4.189989 ... 3.490948 2.777921 3.541679 4.122182 2.953278 3.614130 4.205146 4.773656 4.325754 3.965067
32 3.662398 4.492980 5.804762 4.854507 4.227894 4.025107 6.492764 4.126954 5.549627 4.925130 ... 3.980258 3.951587 4.297670 4.407453 3.503835 3.874236 4.359057 4.477769 3.262217 4.532589
33 4.475780 5.527996 5.864883 5.744544 5.729197 5.498094 6.054205 5.814300 5.370716 4.664112 ... 3.434479 3.711394 3.827005 4.127634 4.052745 3.228881 3.867979 4.791114 4.090326 4.375690
34 3.995213 3.402875 4.583006 5.020864 4.928695 4.745383 5.569933 4.887435 5.038396 3.899942 ... 3.882227 4.498115 4.075468 4.630077 3.567558 4.247223 4.371348 4.869270 3.170104 4.712457
35 5.920833 5.209316 4.030833 5.904095 5.973547 5.416812 5.142854 5.933479 5.694769 4.577059 ... 3.330231 3.996627 4.173782 3.507642 3.476227 3.566311 3.763514 4.929296 4.228534 4.056913
36 4.953954 3.953080 5.108241 5.022202 5.614534 4.702110 5.784135 5.581157 6.043006 5.046572 ... 4.511589 3.973211 3.886744 4.143581 3.442380 3.736792 4.770883 4.998651 3.803632 4.092388
37 5.186008 4.751489 3.586117 5.240365 5.877414 5.262638 5.770140 5.899083 6.010110 4.826298 ... 4.079463 4.152309 4.155369 4.605407 2.330116 4.202804 3.641217 5.095412 3.376215 4.652654
38 4.859955 5.027041 5.904876 5.085668 5.077137 5.217168 6.518223 5.358655 3.983219 4.975109 ... 4.353100 4.160216 4.372827 4.752237 3.878716 3.808957 4.730868 5.055835 3.329111 4.713250
39 4.414922 4.773204 2.909451 4.385105 5.218198 6.221910 6.140058 5.927251 5.378706 4.009006 ... 3.677192 4.232053 4.299733 4.849951 3.962618 4.826359 4.570068 5.309988 4.442783 5.144439

270 rows × 440 columns


In [143]:
#should be splitting on 'user' - this mixes users
X_train, X_test, y_train, y_test = train_test_split(taskInstancesData, taskInstancesTarget,train_size=0.75, test_size=0.25)

tpot = TPOTClassifier(generations=10, population_size=30, verbosity=2)
tpot.fit(X_train, y_train)
print(tpot.score(X_test, y_test))


Optimization Progress:   0%|          | 0/330 [00:00<?, ?pipeline/s]
Version 0.6.8 of tpot is outdated. Version 0.7.0 was released Wednesday March 22, 2017.
Optimization Progress:  10%|█         | 33/330 [00:34<01:37,  3.05pipeline/s]
Generation 1 - Current best internal CV score: 0.5094101885768553
Optimization Progress:  19%|█▉        | 63/330 [00:50<02:08,  2.07pipeline/s]
Generation 2 - Current best internal CV score: 0.5094101885768553
Optimization Progress:  30%|██▉       | 98/330 [01:04<00:36,  6.29pipeline/s]
Generation 3 - Current best internal CV score: 0.5094101885768553
Optimization Progress:  35%|███▍      | 115/330 [01:11<01:11,  3.02pipeline/s]
Generation 4 - Current best internal CV score: 0.5094101885768553
Optimization Progress:  47%|████▋     | 156/330 [01:25<00:35,  4.97pipeline/s]
Generation 5 - Current best internal CV score: 0.5094101885768553
Optimization Progress:  55%|█████▍    | 180/330 [01:26<00:09, 15.69pipeline/s]
Generation 6 - Current best internal CV score: 0.5094101885768553
Optimization Progress:  65%|██████▌   | 216/330 [01:44<01:18,  1.44pipeline/s]
Generation 7 - Current best internal CV score: 0.5096306471306471
Optimization Progress:  73%|███████▎  | 242/330 [01:44<00:15,  5.57pipeline/s]
Generation 8 - Current best internal CV score: 0.5096306471306471
Optimization Progress:  85%|████████▍ | 279/330 [01:45<00:01, 26.17pipeline/s]
Generation 9 - Current best internal CV score: 0.5155830280830281
Optimization Progress:  91%|█████████ | 300/330 [01:45<00:00, 41.57pipeline/s]
Generation 10 - Current best internal CV score: 0.5155830280830281
                                                                              
Best pipeline: BernoulliNB(input_matrix, 0.83000000000000007, 5.0)
0.415019762846


In [ ]:


In [ ]:


In [ ]: