In [2]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cross_decomposition import PLSRegression
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from scipy.stats import randint as sp_randint
from sklearn.model_selection import RandomizedSearchCV
get_ipython().magic('matplotlib inline')

In [3]:
zspectra = pd.read_csv('centered_cest.csv', header = None).values
t2_signal = pd.read_csv('t2_signal.csv', header = None).values
t2 = pd.read_csv('t2_par.csv', header = None).values
conc = pd.read_csv('concentration.csv', header = None).values
pH = pd.read_csv('pH.csv', header = None).values
concs = pd.read_csv('concs.csv', header = None).values
pHs = pd.read_csv('pHs.csv', header = None).values

In [4]:
def mymetric(yexp, ypred):
    d = np.sum((yexp - ypred)**2 )
    d = d / ypred.shape[0]
    d = np.sqrt(d)
    d = d / np.mean(yexp)
    d = 100 * d
    return d

In [5]:
Y = conc
Ys = np.sort(concs)

In [6]:
Ys


Out[6]:
array([[  27.87,   34.84,   43.55,   54.44,   68.05,   85.07,  106.33,
         132.92,  166.15]])

In [13]:
X = zspectra
num_components = 30


Error = np.zeros((num_components -1,1))

for idx,K in enumerate(np.arange(1,num_components)):
    X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
    y_test=y_test/y_train.max()
    y_train=y_train/y_train.max()
    pls = PLSRegression(n_components = K, scale = False)
    pls.fit(X_train, y_train)
    y_hat = pls.predict(X_test)
    Error[idx] = mymetric(y_test , y_hat)

plt.plot( np.arange(1,num_components), Error ,'o-')
print('Min = ', Error.min(),'%')


Min =  27.273099367 %

In [14]:
y_hat


Out[14]:
array([[ 0.18360978],
       [ 0.12169511],
       [ 0.80538581],
       [ 0.85703244],
       [ 0.18646708],
       [ 0.20486126],
       [ 0.19993669],
       [ 0.87285847],
       [ 0.62178391],
       [ 0.2043197 ],
       [ 0.62897609],
       [ 0.54654137],
       [ 0.35877768],
       [ 0.63187567],
       [ 0.59631839],
       [ 0.40839208],
       [ 0.49789556],
       [ 0.1800769 ],
       [ 0.33593338],
       [ 0.2280763 ],
       [ 0.76465577],
       [ 0.87370473],
       [ 0.28489761],
       [ 0.83235266],
       [ 0.09721287],
       [ 0.80395082],
       [ 0.31281651],
       [ 0.34458717],
       [ 0.58334963],
       [ 0.27126421],
       [ 0.70671346],
       [ 0.50899566],
       [ 0.20347865],
       [ 0.30396689],
       [ 0.37709222],
       [ 0.45057312],
       [ 0.35823331],
       [ 0.68136641],
       [ 0.28507558],
       [ 0.476824  ],
       [ 0.32637133],
       [ 0.31712089],
       [ 0.79516889],
       [ 0.67017012],
       [ 0.3817333 ],
       [ 0.8154255 ],
       [ 0.13745626],
       [ 0.58473984],
       [ 0.69595839],
       [ 0.60229664],
       [ 0.53249325],
       [ 0.25709166],
       [ 0.24641723],
       [ 0.76217631],
       [ 0.4032917 ],
       [ 0.31431702],
       [ 0.83108064],
       [ 0.20801202],
       [ 0.55356955],
       [ 0.83615699],
       [ 0.77239049],
       [ 0.52155496],
       [ 0.84592697],
       [ 0.19028156],
       [ 0.23278411],
       [ 0.79031937],
       [ 0.64734702],
       [ 0.66934244],
       [ 0.24496046],
       [ 0.53693139],
       [ 0.83428008],
       [ 0.6646745 ],
       [ 0.76240471],
       [ 0.4786849 ],
       [ 0.37798628],
       [ 0.40091629],
       [ 0.2518612 ],
       [ 0.7173849 ],
       [ 0.42101079],
       [ 0.53484838],
       [ 0.49181251],
       [ 0.3298854 ],
       [ 0.28134488],
       [ 0.65075106],
       [ 0.17809431],
       [ 0.56745591],
       [ 0.25046215],
       [ 0.18351536],
       [ 0.71266355],
       [ 0.85121139],
       [ 0.62181197],
       [ 0.53109486],
       [ 0.92457272],
       [ 0.76819746],
       [ 0.86179771],
       [ 0.40293715],
       [ 0.8201454 ],
       [ 0.21355372],
       [ 0.86631355],
       [ 0.0674075 ],
       [ 0.81686885],
       [ 0.37799996],
       [ 0.41633229],
       [ 0.7581274 ],
       [ 0.77672078],
       [ 0.231322  ],
       [ 0.51608725],
       [ 0.02547765],
       [ 0.32555313],
       [ 0.77133233],
       [ 0.13428355],
       [ 0.54330951],
       [ 0.8340292 ],
       [ 0.45092726],
       [ 0.7897032 ],
       [ 0.63196143],
       [ 0.34913307],
       [ 0.33101478],
       [ 0.77518766],
       [ 0.88783767],
       [ 0.23613508],
       [ 0.40954859],
       [ 0.20075882],
       [ 0.50013543],
       [ 0.25819414],
       [ 0.27527676],
       [ 0.40614905],
       [ 0.19166438],
       [ 0.63935222],
       [ 0.35535691],
       [ 0.87838497],
       [ 0.45607338],
       [ 0.31214878],
       [ 0.60685832],
       [ 0.46543271],
       [ 0.54311404],
       [ 0.28332419],
       [ 0.66682414],
       [ 0.38364831],
       [ 0.57854045],
       [ 0.23839099],
       [ 0.19726298],
       [ 0.50589258],
       [ 0.26547101],
       [ 0.39699166],
       [ 0.97167061],
       [ 0.22569761],
       [ 0.59375068],
       [ 0.64535411],
       [ 0.27161189],
       [ 0.15302948],
       [ 0.62328354],
       [ 0.59498066],
       [ 0.84534921],
       [ 0.50948325],
       [ 0.66361125],
       [ 0.17200194],
       [-0.01312525],
       [ 0.38604072],
       [ 0.296066  ],
       [ 0.32272533],
       [ 0.18297506],
       [ 0.75402636],
       [ 0.18787283],
       [ 0.7831343 ],
       [ 0.75151889],
       [ 0.3733748 ],
       [ 0.81325286],
       [ 0.04210974],
       [ 0.7820016 ],
       [ 0.10372106],
       [ 0.80822182],
       [ 0.5491467 ],
       [ 0.24403475],
       [ 0.19567272],
       [ 0.23307928],
       [ 0.62776482],
       [ 0.8962192 ],
       [ 0.40440062],
       [ 0.53044575],
       [ 0.29655796],
       [ 0.45897265],
       [ 0.85371898],
       [ 0.7381796 ],
       [ 0.35925379],
       [ 0.72444806],
       [ 0.46100887],
       [ 0.68268554],
       [ 0.2124191 ],
       [ 0.85769961],
       [ 0.37760401],
       [ 0.14839852],
       [ 0.23489804],
       [ 0.28152613],
       [ 0.78590697],
       [ 0.20521451],
       [ 0.321308  ],
       [ 1.03708741],
       [ 0.66520561],
       [ 0.85052552],
       [ 0.58709443],
       [ 0.7474336 ],
       [ 0.75015874],
       [ 0.76396741],
       [ 0.72250276],
       [ 0.56606944],
       [ 0.44679854],
       [ 0.27053961],
       [ 0.41135438],
       [ 0.06328016],
       [ 0.82863638],
       [ 0.65698412],
       [ 0.87198299],
       [ 0.09186839],
       [ 0.35635621],
       [ 0.15577902],
       [ 0.4060758 ],
       [ 0.69997439],
       [ 0.56705375],
       [ 0.44991619],
       [ 0.74973592],
       [ 0.45367396],
       [ 0.51477891],
       [ 0.11045299],
       [ 0.64172434],
       [ 0.67960593],
       [ 0.56534247],
       [ 0.39106039],
       [ 0.31848838],
       [ 0.36505591],
       [ 0.62741251],
       [ 0.489242  ],
       [ 0.69933936],
       [ 0.10638434],
       [ 0.7446976 ],
       [ 0.39600164],
       [ 0.52298857],
       [ 0.26515704],
       [ 0.57850572],
       [ 0.20500921],
       [ 0.50021086],
       [ 0.71453999],
       [ 0.51315719],
       [ 0.06191875],
       [ 0.30422691],
       [ 0.12710428],
       [ 0.61045351],
       [ 0.86004556],
       [ 0.29169098],
       [ 0.59784415],
       [ 0.71016772],
       [ 0.6152291 ],
       [ 0.50975719],
       [ 0.63638523],
       [ 0.29882668],
       [ 0.77484562],
       [ 0.53262592],
       [ 0.91345055],
       [ 0.10078221],
       [ 0.77990924],
       [ 0.49357179],
       [ 0.18955272],
       [ 0.60778151],
       [ 0.01619287],
       [ 0.22413011],
       [ 0.36987008],
       [ 0.86953138],
       [-0.02469732],
       [ 0.24794704],
       [ 0.49476721],
       [ 0.754114  ],
       [ 0.41259422],
       [ 0.78216132],
       [ 0.00499834],
       [ 0.58323696],
       [ 0.5116906 ],
       [ 0.70828422],
       [ 0.73664425],
       [ 0.77952272],
       [ 0.45698918],
       [ 0.30183348],
       [ 0.91104749],
       [ 0.17300323],
       [ 0.31603912],
       [ 0.28165663],
       [ 0.46209681],
       [ 0.05175485],
       [ 0.49047903],
       [ 0.76515615],
       [-0.00814035],
       [ 0.75791159],
       [ 0.16068779],
       [ 0.70559013],
       [ 1.03581616],
       [ 0.55289655],
       [ 0.58074035],
       [ 0.39397319],
       [ 0.11914408],
       [ 0.34805594],
       [ 0.82014266],
       [ 0.85128568],
       [ 0.66185097],
       [ 0.62905933],
       [ 0.64475232],
       [ 0.52974762],
       [ 0.82111189],
       [ 0.22197391],
       [ 0.68694034],
       [ 0.44922012],
       [ 0.52231367],
       [ 0.71460213],
       [ 0.52875899],
       [ 0.85269509],
       [ 0.81216657],
       [ 0.32140853],
       [ 0.65962184],
       [ 0.71837545],
       [ 0.98059227],
       [ 0.4645057 ],
       [ 0.81171934],
       [ 0.82323739],
       [ 0.58403383],
       [ 0.81329764],
       [ 0.00184   ],
       [ 0.22910971],
       [ 0.40415825],
       [ 0.18460279],
       [ 0.63211568],
       [ 0.40927665],
       [ 0.34629592],
       [ 0.39397884],
       [ 0.11236051],
       [ 0.59007894],
       [ 0.21142245],
       [ 0.41766755],
       [ 0.51332085],
       [ 0.7897191 ],
       [ 0.20511907],
       [ 0.58195824],
       [ 0.37011706],
       [ 0.35736087],
       [ 0.83579023],
       [ 0.61177403],
       [ 0.43493197],
       [ 0.47278981],
       [ 0.32901652],
       [ 0.42667976],
       [ 0.23933433],
       [ 0.3863625 ],
       [ 0.85769143],
       [ 0.6372298 ],
       [ 0.64465796],
       [ 0.29538681],
       [ 0.83935783],
       [ 0.766442  ],
       [ 0.44579309],
       [ 0.90097648],
       [ 0.6790141 ],
       [ 0.59177612],
       [ 0.55506301],
       [ 0.17059   ],
       [ 0.59248142],
       [ 0.90690626],
       [ 0.23727049],
       [ 0.15931979],
       [ 0.91599695],
       [ 0.93640565],
       [ 0.32528844],
       [ 0.67032661],
       [ 0.43009002],
       [ 0.61908666],
       [ 0.47269411],
       [ 0.35851904],
       [ 0.19200061],
       [ 0.51846076],
       [ 0.54095917],
       [ 0.73735569],
       [ 0.6259394 ],
       [ 0.45156376],
       [ 0.5847199 ],
       [ 0.32808222],
       [ 0.80063768],
       [ 0.07281623],
       [ 0.13116502],
       [ 0.49481655],
       [ 0.86863838],
       [ 0.67665637],
       [ 0.20648905],
       [ 0.88971352],
       [ 0.86512199],
       [ 0.88291592],
       [ 0.74248748],
       [ 0.77184677],
       [ 0.3493882 ],
       [ 0.12447243],
       [ 0.20088295],
       [ 0.80728266],
       [ 0.66976837],
       [ 0.79732002],
       [ 0.07599184],
       [ 0.6669839 ],
       [ 0.70368488],
       [ 0.7621028 ],
       [ 0.79897788],
       [ 0.27415083],
       [ 0.72430263],
       [ 0.64377068],
       [ 0.7672813 ],
       [ 0.12078792],
       [ 0.54642175],
       [ 0.52196785],
       [ 0.26904492],
       [ 0.49394936],
       [ 0.66025705],
       [ 0.49820252],
       [ 0.63181581],
       [ 0.12530974],
       [ 0.46301728],
       [ 0.1970502 ],
       [ 0.28199945],
       [ 0.7863125 ],
       [ 0.59514301],
       [ 0.51047246],
       [ 0.82619705],
       [ 0.14375909],
       [ 0.12024228],
       [ 0.41338743],
       [ 0.28299201],
       [ 0.81097153],
       [ 0.80938087],
       [ 0.41694188],
       [ 0.63229923],
       [ 0.2874394 ],
       [ 0.61199399],
       [ 0.34786639],
       [ 0.74862086],
       [ 0.08569967],
       [ 0.59913359],
       [ 0.176815  ],
       [-0.00535672],
       [ 0.11355987],
       [ 0.31290339],
       [ 0.34839918],
       [ 0.38462837],
       [ 0.63038824],
       [ 0.36904418],
       [ 0.86544204],
       [ 0.12234444],
       [ 0.68401331],
       [ 0.89130341],
       [ 0.1171481 ],
       [ 0.14010272],
       [ 0.23061896],
       [ 0.31516402],
       [ 0.36277747],
       [ 0.68449714],
       [ 0.65927232],
       [ 0.41808339],
       [ 0.71932202],
       [ 0.14921565],
       [ 0.55723788],
       [ 0.72124161],
       [ 0.3999923 ],
       [ 0.57528759],
       [ 0.2553331 ],
       [ 0.48583352],
       [ 0.69639406],
       [ 0.31896597],
       [ 0.19293306],
       [ 0.41849065],
       [ 0.36877383],
       [ 0.36521376],
       [ 0.22198736],
       [ 0.22684487],
       [ 0.89683255],
       [ 0.13961925],
       [ 0.30946539],
       [ 0.89552296],
       [ 0.57119122],
       [ 0.22605887],
       [ 0.47508467],
       [ 0.80480744],
       [-0.02980536],
       [ 0.97986096],
       [ 0.47371588],
       [ 0.37213331],
       [ 0.63906314],
       [ 0.55296504],
       [ 0.34352974],
       [ 0.19981214],
       [ 0.7890953 ],
       [ 0.40002994],
       [ 0.6153956 ],
       [ 0.37997995],
       [ 0.41568906],
       [ 0.1382163 ],
       [ 0.34854177],
       [ 0.80933944],
       [ 0.53142405],
       [ 0.851965  ],
       [ 0.42350128],
       [ 0.34967941],
       [ 0.56075196],
       [ 0.44965983],
       [ 0.48594153],
       [ 0.35488436],
       [ 0.71968036],
       [ 0.68430854],
       [ 0.5522705 ],
       [ 0.68407233],
       [ 0.72726461],
       [ 0.43222702],
       [ 0.83244573],
       [ 0.17730258],
       [ 0.82177606],
       [ 0.37791717],
       [ 0.48946563],
       [ 0.49741185],
       [ 0.63220987],
       [ 0.10074919],
       [ 0.84677042],
       [ 0.28202735],
       [ 0.34525822],
       [ 0.41678691],
       [ 0.67863141],
       [ 0.38821012],
       [ 0.92533812],
       [ 0.71592957],
       [ 0.58920908],
       [ 0.63260026],
       [ 0.15323496],
       [ 0.25129967],
       [ 0.34962926],
       [ 0.17851712],
       [ 0.55380858],
       [ 0.23318623],
       [ 0.22951485],
       [ 0.13672239],
       [ 0.15125008],
       [ 0.7623244 ],
       [ 0.26818447],
       [ 0.60047871],
       [ 0.37273462],
       [ 0.21309822],
       [ 0.69431259],
       [ 0.03055713],
       [ 0.7357137 ],
       [ 0.60878637],
       [ 0.6417165 ],
       [ 0.23310359],
       [ 0.47533704],
       [ 0.39452717],
       [ 0.50271122],
       [ 0.52587104],
       [ 0.80192977],
       [ 0.73390585],
       [ 0.19025619],
       [ 0.3098771 ],
       [ 0.47631825],
       [ 0.27754076],
       [ 0.38618536],
       [ 0.46638813],
       [ 0.2211511 ],
       [ 0.16415417],
       [ 0.37732003],
       [ 0.67571222],
       [ 0.22535996],
       [ 0.45863092],
       [ 0.54486021],
       [ 0.76241638],
       [ 0.6310934 ],
       [ 0.17446096],
       [ 0.37613368],
       [ 0.18570369],
       [ 0.68983049],
       [ 0.39585293],
       [ 0.22340049],
       [ 0.28010794],
       [ 0.6322611 ],
       [ 0.51699759],
       [ 0.72629503],
       [ 0.48952244],
       [ 0.57953036],
       [ 0.65363964],
       [ 0.89292119],
       [ 0.20659269],
       [ 0.29078238],
       [ 0.10754869],
       [ 0.54403043],
       [ 0.2557285 ],
       [ 0.17732083],
       [ 0.68280692],
       [ 0.63881249],
       [ 0.16058405],
       [ 0.38459177],
       [ 0.43743898],
       [ 0.63790623],
       [ 0.24997821],
       [ 0.24576746],
       [ 0.48890218],
       [ 0.75200806],
       [ 0.79261945],
       [ 0.77606264],
       [ 0.96079623],
       [ 0.2968025 ],
       [ 0.24490039],
       [ 0.98914872],
       [ 0.58121006],
       [ 0.28318245],
       [ 0.26253719],
       [ 0.7730892 ],
       [ 0.24305284],
       [ 0.82502353],
       [ 0.27163867],
       [ 0.61998896],
       [ 0.86892882],
       [ 0.37061828],
       [ 0.21980904],
       [ 0.40271286],
       [ 0.74487567],
       [ 0.77329366],
       [ 0.91208647],
       [ 0.55728698],
       [ 0.78218121],
       [ 0.78300732],
       [ 0.64975689],
       [ 0.44599551],
       [ 0.19667399],
       [ 0.20480526],
       [ 0.22734994],
       [ 0.60047236],
       [ 0.12706324],
       [ 0.67963035],
       [ 0.39982165],
       [ 0.69383339],
       [ 0.69285889],
       [ 0.31712758],
       [ 0.91724072],
       [ 0.65734607],
       [ 0.67544798],
       [ 0.10047201],
       [ 0.78177465],
       [ 0.6084214 ],
       [ 0.63348854],
       [ 0.41496881],
       [ 0.69939029],
       [ 0.37796978],
       [ 0.33341024],
       [ 0.65198963],
       [ 0.2968773 ],
       [ 0.26896156],
       [ 0.27012342],
       [ 0.73181294],
       [ 0.53597991],
       [ 0.39548224],
       [ 0.55485011],
       [ 0.46719397],
       [ 0.31291468],
       [ 0.67104436],
       [ 0.67087466],
       [ 0.87152363],
       [ 0.52230531],
       [ 0.25119723],
       [ 0.27843139],
       [ 0.8240009 ],
       [ 0.63966691],
       [ 0.6904299 ],
       [ 0.23563384],
       [ 0.68928023],
       [ 0.79874373],
       [ 0.11583471],
       [ 0.56741653],
       [ 0.35291015],
       [ 0.20012454],
       [ 0.84231196],
       [ 0.73698894],
       [ 0.31419257],
       [ 0.48580473],
       [ 0.23926392],
       [ 0.2748457 ],
       [ 0.56360699],
       [ 0.28843543],
       [ 0.56273115],
       [ 0.60657529],
       [ 0.38324386],
       [ 0.70825047],
       [ 0.19176433],
       [ 0.60350746],
       [ 0.48992678],
       [ 0.63326587],
       [ 0.66149877],
       [ 0.60359782],
       [ 0.18634089],
       [ 0.7553256 ],
       [ 0.40360694],
       [ 0.54008116],
       [ 0.34353911],
       [ 0.7621404 ],
       [ 0.65596613],
       [ 0.75172703],
       [ 0.30573251],
       [ 0.89250308],
       [ 0.35147767],
       [ 0.03199721],
       [ 0.86217803],
       [ 0.42618805],
       [ 0.28710936],
       [ 0.32298733],
       [ 0.76248613],
       [ 0.59287141],
       [ 0.6634477 ],
       [ 0.43654572],
       [ 0.03682589],
       [ 0.60478358],
       [ 0.1503095 ],
       [ 0.69599002],
       [ 0.64579516],
       [ 0.51029081],
       [ 0.74605025],
       [ 0.66647112],
       [ 0.6476632 ],
       [ 0.50574542],
       [ 0.3552941 ]])

In [8]:
K=Error.argmin()+1
X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
pls = PLSRegression(n_components = K, scale = False)
pls.fit(X_train, y_train)
y_hat = pls.predict(X_test)

Error_dum = np.zeros(Ys.shape)
for idk,K in enumerate(np.arange(0,Ys.shape[1])):
    Error_dum[0,K] = mymetric(y_test[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))], y_hat[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))])
    
plt.plot(Ys,Error_dum,'o-')


Out[8]:
[<matplotlib.lines.Line2D at 0x1ff07b3f908>,
 <matplotlib.lines.Line2D at 0x1ff07b3fcc0>,
 <matplotlib.lines.Line2D at 0x1ff07af51d0>,
 <matplotlib.lines.Line2D at 0x1ff07af58d0>,
 <matplotlib.lines.Line2D at 0x1ff07af52b0>,
 <matplotlib.lines.Line2D at 0x1ff07af5780>,
 <matplotlib.lines.Line2D at 0x1ff07af5748>,
 <matplotlib.lines.Line2D at 0x1ff07af5f28>,
 <matplotlib.lines.Line2D at 0x1ff071bddd8>]

In [9]:
X = np.concatenate((zspectra,t2_signal),axis = 1)
num_components = X.shape[1]


Error = np.zeros((num_components -1,1))

for idx,K in enumerate(np.arange(1,num_components)):
    X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
    pls = PLSRegression(n_components = K, scale = False)
    pls.fit(X_train, y_train)
    y_hat = pls.predict(X_test)
    Error[idx] = mymetric(y_test , y_hat)

plt.plot( np.arange(1,num_components), Error ,'o-')
print('Min = ', Error.min(),'%')


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-9-7cbf0fceee8d> in <module>()
      8     X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
      9     pls = PLSRegression(n_components = K, scale = False)
---> 10     pls.fit(X_train, y_train)
     11     y_hat = pls.predict(X_test)
     12     Error[idx] = mymetric(y_test , y_hat)

F:\Users\Luis\Anaconda3\lib\site-packages\sklearn\cross_decomposition\pls_.py in fit(self, X, Y)
    329             x_loadings = np.dot(Xk.T, x_scores) / np.dot(x_scores.T, x_scores)
    330             # - subtract rank-one approximations to obtain remainder matrix
--> 331             Xk -= np.dot(x_scores, x_loadings.T)
    332             if self.deflation_mode == "canonical":
    333                 # - regress Yk's on y_score, then subtract rank-one approx.

KeyboardInterrupt: 

In [ ]:
K=Error.argmin()+1
X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
pls = PLSRegression(n_components = K, scale = False)
pls.fit(X_train, y_train)
y_hat = pls.predict(X_test)

Error_dum = np.zeros(Ys.shape)
for idk,K in enumerate(np.arange(0,Ys.shape[1])):
    Error_dum[0,K] = mymetric(y_test[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))], y_hat[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))])
    
plt.plot(Ys,Error_dum,'o-')

In [ ]:
X = np.concatenate((zspectra,t2),axis = 1)
num_components = X.shape[1]


Error = np.zeros((num_components -1,1))

for idx,K in enumerate(np.arange(1,num_components)):
    X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
    pls = PLSRegression(n_components = K, scale = False)
    pls.fit(X_train, y_train)
    y_hat = pls.predict(X_test)
    Error[idx] = mymetric(y_test , y_hat)

plt.plot( np.arange(1,num_components), Error ,'o-')
print('Min = ', Error.min(),'%')

In [ ]:
K=Error.argmin()+1
X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
pls = PLSRegression(n_components = K, scale = False)
pls.fit(X_train, y_train)
y_hat = pls.predict(X_test)

Error_dum = np.zeros(Ys.shape)
for idk,K in enumerate(np.arange(0,Ys.shape[1])):
    Error_dum[0,K] = mymetric(y_test[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))], y_hat[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))])
    
plt.plot(Ys,Error_dum,'o-')

In [ ]:
X = t2_signal
num_components = X.shape[1]


Error = np.zeros((num_components -1,1))

for idx,K in enumerate(np.arange(1,num_components)):
    X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
    pls = PLSRegression(n_components = K, scale = False)
    pls.fit(X_train, y_train)
    y_hat = pls.predict(X_test)
    Error[idx] = mymetric(y_test , y_hat)

plt.plot( np.arange(1,num_components), Error ,'o-')
print('Min = ', Error.min(),'%')

In [ ]:
K=Error.argmin()+1
X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
pls = PLSRegression(n_components = K, scale = False)
pls.fit(X_train, y_train)
y_hat = pls.predict(X_test)

Error_dum = np.zeros(Ys.shape)
for idk,K in enumerate(np.arange(0,Ys.shape[1])):
    Error_dum[0,K] = mymetric(y_test[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))], y_hat[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))])
    
plt.plot(Ys,Error_dum,'o-')

In [ ]:
X = t2
num_components = X.shape[1]


Error = np.zeros((num_components -1,1))

X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.05, random_state=42)
pls = PLSRegression(n_components = num_components, scale = False)
pls.fit(X_train, y_train)
y_hat = pls.predict(X_test)
Error = mymetric(y_test , y_hat)
print('Min = ', Error.min(),'%')

In [ ]:
Error_dum = np.zeros(Ys.shape)
for idk,K in enumerate(np.arange(0,Ys.shape[1])):
    Error_dum[0,K] = mymetric(y_test[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))], y_hat[np.where(y_test==(np.ones(y_test.shape)*Ys[0,K]))])
    
plt.plot(Ys,Error_dum,'o-')

In [ ]:
'''
steps = [1,4,8]
labels = list()

for step in steps:
    X = zspectra[:, 0:101:step]
    labels.append(int(X.shape[1]))
    Y = pH
    num_components = 10
    Error = np.zeros((num_components -1,1))

    for idx,K in enumerate(np.arange(1,num_components)):
        X_train, X_test, y_train, y_test = train_test_split( X, Y, test_size=0.50, random_state=42)
        pls = PLSRegression(n_components = K, scale = False)
        pls.fit(X_train, y_train)
        y_hat = pls.predict(X_test)
        Error[idx] = mymetric(y_test , y_hat)

    plt.plot( np.arange(1,num_components), Error ,'o-')
    plt.legend(labels)
'''