In [1]:
import matplotlib.pyplot as plt
import neurolab as nl
import numpy as np
import pandas as pd
#from sklearn import metrics
#from sklearn.linear_model import LogisticRegression
data_frame = pd.read_csv("personal_stats2.csv")
data_frame = data_frame[["Happiness", "Motivation", "Flexibility", "Strength", "Endurance", "Relationships"]]
# Get 80% of our dataset
index_at_80_percent = int(len(data_frame) * .8)
#truth_data = data_frame[:index_at_80_percent]
#training_data = data_frame[index_at_80_percent:]
# Get the first 80% as input and the following day as the target result
training_input = data_frame[:index_at_80_percent]
training_target = data_frame[1:index_at_80_percent + 1]
# The final 20% same as above, current day as input next day as expected output
test_input = data_frame[index_at_80_percent + 1: len(data_frame) - 1]
test_target = data_frame[index_at_80_percent + 2:]
training_input = training_input / 10
training_target = training_target / 10
test_input = test_input / 10
test_target = test_target / 10
#print test_input
# Make 6 inputs, 20 neurons and 6 outputs
net = nl.net.newff(
[
[0, 1], [0, 1], [0, 1], [0, 1], [0, 1], [0, 1]
#[0, 10], [0, 10], [0, 10], [0, 10], [0, 10], [0, 10],
],
[20, 6]
)
#net = nl.net.newff([[0, 1],[0, 1],[0, 1],[0, 1],[0, 1],[0, 1]], 1)
#err = net.train(training_input, training_target, rr=0.005, epochs=1000, show=1, goal=1)
err = net.train(training_input, training_target, show=15)
#print "done"
# Take the data and expand it into input -> output
# truth_data_expanded = []
# print dir(truth_data)
# for truth in truth_data.items():
# print truth
Epoch: 15; Error: 242.557996813;
Epoch: 30; Error: 242.190718705;
Epoch: 45; Error: 242.116517795;
Epoch: 60; Error: 242.03822422;
Epoch: 75; Error: 241.98242112;
Epoch: 90; Error: 241.932338124;
Epoch: 105; Error: 241.908022755;
Epoch: 120; Error: 241.878166608;
Epoch: 135; Error: 241.837613904;
Epoch: 150; Error: 241.790865763;
Epoch: 165; Error: 241.738311027;
Epoch: 180; Error: 241.703514042;
Epoch: 195; Error: 241.664317636;
Epoch: 210; Error: 241.558339799;
Epoch: 225; Error: 241.457291036;
Epoch: 240; Error: 241.411372777;
Epoch: 255; Error: 241.361399908;
Epoch: 270; Error: 241.307299692;
Epoch: 285; Error: 241.240336237;
Epoch: 300; Error: 241.200408787;
Epoch: 315; Error: 241.149505564;
Epoch: 330; Error: 241.108030322;
Epoch: 345; Error: 241.078166623;
Epoch: 360; Error: 241.040420659;
Epoch: 375; Error: 241.002079942;
Epoch: 390; Error: 240.961492861;
Epoch: 405; Error: 240.93501421;
Epoch: 420; Error: 235.048227956;
Epoch: 435; Error: 207.701949481;
Epoch: 450; Error: 195.412695677;
Epoch: 465; Error: 194.561277784;
Epoch: 480; Error: 194.154673127;
Epoch: 495; Error: 193.61796262;
The maximum number of train epochs is reached
In [2]:
err
Out[2]:
[773.90572581269601,
264.68313227764224,
254.81084279477307,
254.74296961219127,
254.10072564390899,
250.50441932951438,
249.67751263494759,
248.62323773915369,
246.93193646747613,
244.88349730357527,
243.07904272720035,
242.89299807070603,
242.76961766591688,
242.62591643647761,
242.55799681325931,
242.44344222405351,
242.38096617551017,
242.31922110513244,
242.31025088282817,
242.2964996866616,
242.27360722278533,
242.24826341692122,
242.24427638004451,
242.23873633946945,
242.22916337732886,
242.21438466199771,
242.20408708105114,
242.19805992279043,
242.19212802813433,
242.19071870451606,
242.18803309834249,
242.1730496859517,
242.1678007595745,
242.16602271719341,
242.16392295074286,
242.16249726102055,
242.15987363220142,
242.15517124036273,
242.14755662126367,
242.14032240454031,
242.13732680312273,
242.13383005692194,
242.12978100172876,
242.12328482153617,
242.11651779538715,
242.11393496812349,
242.10970926574683,
242.10290049531451,
242.09281873321919,
242.08833354348326,
242.08269126723138,
242.07570260696116,
242.06573735825481,
242.06143815924213,
242.05485791767572,
242.04971651993708,
242.04702163790017,
242.04454399747962,
242.04209539523305,
242.0382242198431,
242.03353327377067,
242.03077881418506,
242.0270260767681,
242.02461146843311,
242.02246219211128,
242.02034513776647,
242.01814449715877,
242.01482492798806,
242.01078152489731,
242.00411371502136,
241.99330532494716,
241.98423239532832,
241.98416860036104,
241.98355984652397,
241.98242112004678,
241.98044216487204,
241.9778576601378,
241.97603431983305,
241.97352705150482,
241.97048716813808,
241.96717225187513,
241.9626843666135,
241.95883692726716,
241.95500804919038,
241.9509187393565,
241.94904788646824,
241.94619019069691,
241.94157225782658,
241.93575812750936,
241.93233812438757,
241.9303113003192,
241.92852390204777,
241.92615520002676,
241.92392642999243,
241.92308294706459,
241.92209090379035,
241.92049200901138,
241.91800592406099,
241.91555952476375,
241.91439355982538,
241.91322205392294,
241.91211294731067,
241.91079407280478,
241.90938076537958,
241.90802275514787,
241.90716290619565,
241.90646200632153,
241.90541157757497,
241.90387271944942,
241.90205322635114,
241.90029540128808,
241.89824523586208,
241.89572573761887,
241.89306904252743,
241.89193727887317,
241.8900982590587,
241.88754264508563,
241.8847055500369,
241.8819470390782,
241.87816660826795,
241.87429464907945,
241.86918332031343,
241.86375307820063,
241.86183967119854,
241.86035285403494,
241.85883978211734,
241.85695557009035,
241.8542755350071,
241.85142749343598,
241.85067403322378,
241.84951409793922,
241.84802888476747,
241.84579463243514,
241.8422709096111,
241.83761390408708,
241.83457218037137,
241.83002539417151,
241.82443133043523,
241.81956183750282,
241.8158621844494,
241.81282136873398,
241.81150853166278,
241.80970338304456,
241.80701366957229,
241.80593591232952,
241.80431826009539,
241.80190439976502,
241.79849294486797,
241.79492513762222,
241.79086576338304,
241.78607592360436,
241.78414789696131,
241.78096393227361,
241.77565118224805,
241.76772497497626,
241.75990971752125,
241.75795766828162,
241.75512554686671,
241.75233117005385,
241.75107579418608,
241.74994244155127,
241.74835833935356,
241.7460141160941,
241.7425464893326,
241.73831102685472,
241.73464654943845,
241.73323225760748,
241.73106512653635,
241.72898482568672,
241.72739696880103,
241.72531167884898,
241.72204113754813,
241.7185061129118,
241.71628184956282,
241.71307975507415,
241.7101994102122,
241.70903339363852,
241.70763748374659,
241.70562579940429,
241.70351404236314,
241.70090471239456,
241.69688388454111,
241.69207612126328,
241.69010578489102,
241.6881371285715,
241.68620570847844,
241.68344574359463,
241.68078054874843,
241.67760460549164,
241.67583430929776,
241.67442147835311,
241.67334642396384,
241.67165201737805,
241.66867679587628,
241.66431763585194,
241.65861403128545,
241.65382618607299,
241.64807509137651,
241.64228150649726,
241.63423546649796,
241.6259029932298,
241.61908762119185,
241.6122839635625,
241.60279976214142,
241.58997397916022,
241.58325246269828,
241.57403640619964,
241.56738041200978,
241.56354744194709,
241.55833979927326,
241.55056202924982,
241.54089775000435,
241.53227562114671,
241.52022318600018,
241.5157263237148,
241.50884744356588,
241.49967923208266,
241.49668386998536,
241.49122739926344,
241.48290422668546,
241.47671944321044,
241.47060235386635,
241.46392105728106,
241.46092379815195,
241.45729103573933,
241.45310087787314,
241.44917444283209,
241.44536793786375,
241.44157364638812,
241.43859412178955,
241.43570614187871,
241.43169510395634,
241.42776921149459,
241.42459964455145,
241.42138795742795,
241.42006866717364,
241.4184189982559,
241.4158749845929,
241.41273510314161,
241.41137277652535,
241.40924889797157,
241.40618725006198,
241.40366873480261,
241.40113012045634,
241.39721714276789,
241.39248161085939,
241.38907821659205,
241.3858305736,
241.38172037984168,
241.37651940721693,
241.37191799450397,
241.36788379257308,
241.36640943624411,
241.36434490476668,
241.36139990752872,
241.35822677573589,
241.3543988383754,
241.35042737791323,
241.3472458174725,
241.34422833040566,
241.34090813497264,
241.33582202747652,
241.33003729499006,
241.32712417674219,
241.32349519519511,
241.32133791087693,
241.31855134173281,
241.31497301213454,
241.31139542485502,
241.30729969186692,
241.30238010691988,
241.29545080234607,
241.28891279612301,
241.28556246725145,
241.28189015164253,
241.27877485793292,
241.27496534488552,
241.27034219092656,
241.2649768980713,
241.25882144942182,
241.25397439652062,
241.24888667352187,
241.24509286312349,
241.24245304722001,
241.24033623682112,
241.23767459274612,
241.23424714450837,
241.23093587460832,
241.22789331559255,
241.22445410069531,
241.22227863034868,
241.22091461627261,
241.21959915496058,
241.21851456639484,
241.21682224392674,
241.21425705352002,
241.21151891473528,
241.2092448086845,
241.20560314851576,
241.20040878702309,
241.19811730014987,
241.19431613665373,
241.18952513816583,
241.18415831660326,
241.18048970917158,
241.17838904087543,
241.17497855803859,
241.16958540708606,
241.16321019266212,
241.16176464535965,
241.15961395630626,
241.15713333548857,
241.15494543825272,
241.15266158185972,
241.14950556424736,
241.14596052676464,
241.14245528219223,
241.13946525855255,
241.13603906727735,
241.1319485773663,
241.12670645372833,
241.12306985846439,
241.11925850351383,
241.11786211809905,
241.11647035823853,
241.11539644968721,
241.11445949373388,
241.11300430252558,
241.11079436102511,
241.10803032163244,
241.10582928448883,
241.10361203440394,
241.10188001992637,
241.0998834761169,
241.09751345034803,
241.09423216646769,
241.09221474815541,
241.09057555354343,
241.08839662204608,
241.0854675879973,
241.08336729793609,
241.08161796075876,
241.0805635258846,
241.07922896732916,
241.07816662275354,
241.0765852054455,
241.0747949628921,
241.07286229714325,
241.07062321079755,
241.06794139007192,
241.06558379989343,
241.06386736774112,
241.06181575263338,
241.05976750509637,
241.05745900604003,
241.05404280840946,
241.04992401252079,
241.04581008625038,
241.04293488979533,
241.04042065858636,
241.03955673268578,
241.03818705442711,
241.03594956057896,
241.03264763125102,
241.02914359366929,
241.02593080843201,
241.0221924439235,
241.01908650163759,
241.01670575819523,
241.01477435017077,
241.01239649267612,
241.01045894949428,
241.0081582971946,
241.00535002456172,
241.00207994153362,
240.9985785880084,
240.99390370468461,
240.98957127856175,
240.98735055678881,
240.98620042748394,
240.98461476783115,
240.98238480317889,
240.98010994511176,
240.97791330903542,
240.97558880102434,
240.97314274700787,
240.96961622628831,
240.96609258000717,
240.96398661556253,
240.9614928607333,
240.95848152950973,
240.95574854397509,
240.95334267008121,
240.95117544235711,
240.9488001825431,
240.94595767945722,
240.94427138221113,
240.94300097843944,
240.94206214272936,
240.94151721884543,
240.94074433287918,
240.93953899961232,
240.93781222731042,
240.93627869187435,
240.93501420999704,
240.93392898520995,
240.93287768797592,
240.93165948197156,
240.93014816516001,
240.92792226698549,
240.92500764699935,
240.92107963076316,
238.15460127615813,
238.15458893008025,
237.1373708499475,
237.1360942107865,
236.96119301326624,
236.35085154467265,
235.06032568281012,
235.04822795567688,
234.61840453095232,
233.4378370536667,
233.09953340607061,
232.58047154358783,
230.29040228859006,
229.10243362516013,
226.87993544382158,
222.4740831869471,
220.44894006743255,
219.15794690561802,
217.08343336799075,
213.41533150406656,
209.8057296247807,
209.03472422658035,
207.7019494812854,
205.55928910657229,
203.5049354411945,
202.77565926102704,
201.56305275070039,
199.93154600506597,
199.01845913768057,
198.32753860364053,
197.56611049644826,
196.80758848672679,
196.65542678342979,
196.3945320701161,
196.02987607907005,
195.81098244597021,
195.54933400769801,
195.41269567692643,
195.32404536048597,
195.210685945808,
195.07458051027169,
195.01577338168926,
194.9442543706401,
194.91227067643948,
194.89470102855881,
194.86822462998168,
194.83844310800356,
194.80198415361392,
194.75135039562338,
194.69161137689099,
194.64420399699907,
194.5990371866846,
194.56127778352987,
194.53640354466364,
194.51050279558663,
194.48292469997546,
194.45368420853964,
194.4275117587726,
194.39668801168602,
194.37328250490205,
194.35397548007415,
194.3254248606753,
194.28414287628578,
194.24604594740168,
194.22587906789167,
194.20093662818999,
194.17794959584361,
194.15467312678055,
194.14008542472658,
194.11434434772207,
194.07413458903346,
194.01913711394462,
193.99571262930823,
193.95585315677894,
193.88839140386796,
193.8205851568008,
193.78934300796655,
193.76313896306937,
193.73521343126549,
193.69629112809145,
193.66871396216828,
193.65000153022396,
193.61796261976031,
193.572262280042,
193.54917357120252,
193.52784135891903,
193.50613212603088,
193.48825033622884]
In [ ]:
In [ ]:
Content source: dev-coop/machine-learning
Similar notebooks: