Linear Classifier Plot


In [2]:
import matplotlib.pyplot as plt
import pandas as pd

circles = [15, 10, 15, 16, 15, 18, 20, 20]
crosses = [3, 0.5, 1.2, 3, 2.5, 6.2, 3, 8.3]

def line(m, x, d):
    return m*x+d

line1 = []

for i in range(0,10):
    line1.append(line(2,i,3.9))

print(line1)

plt.plot( range(0, len(circles) ), circles, "bo")
plt.plot( range(0, len(crosses) ), crosses, "rx")

plt.plot( range(0, len(line1) ), line1, "b-")

plt.show()


[3.9, 5.9, 7.9, 9.9, 11.9, 13.9, 15.9, 17.9, 19.9, 21.9]

In [3]:
##Neural Network - MPLClassifier (Multi-Layer-Perceptron)

In [4]:
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import classification_report
import matplotlib.pyplot as plt 

#Import Data
digits = load_digits()
print(digits.data)
print(digits.target)

X = digits.data 
Y = digits.target

X_train, X_test, y_train, y_test = train_test_split(X, Y, train_size=0.05)

print(len(X_train),"-",len(X_test))
#Create Neural Network
neuralnetwork = MLPClassifier(hidden_layer_sizes = (100, ),
                              max_iter = 200,
                              activation = 'logistic', 
                              learning_rate = 'adaptive',
                              verbose = True)
neuralnetwork.fit(X_train, y_train)

y_prediction = neuralnetwork.predict(X_test)

print(classification_report(y_test, y_prediction))


"""
plt.gray() 
plt.matshow(digits.images[12]) 
plt.show() 
"""


C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\sklearn\cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)
[[  0.   0.   5. ...,   0.   0.   0.]
 [  0.   0.   0. ...,  10.   0.   0.]
 [  0.   0.   0. ...,  16.   9.   0.]
 ..., 
 [  0.   0.   1. ...,   6.   0.   0.]
 [  0.   0.   2. ...,  12.   0.   0.]
 [  0.   0.  10. ...,  12.   1.   0.]]
[0 1 2 ..., 8 9 8]
89 - 1708
Iteration 1, loss = 2.46768885
Iteration 2, loss = 2.38033061
Iteration 3, loss = 2.29925152
Iteration 4, loss = 2.22463505
Iteration 5, loss = 2.15647782
Iteration 6, loss = 2.09454238
Iteration 7, loss = 2.03839964
Iteration 8, loss = 1.98747972
Iteration 9, loss = 1.94110395
Iteration 10, loss = 1.89852225
Iteration 11, loss = 1.85896302
Iteration 12, loss = 1.82168707
Iteration 13, loss = 1.78604975
Iteration 14, loss = 1.75155875
Iteration 15, loss = 1.71790101
Iteration 16, loss = 1.68492708
Iteration 17, loss = 1.65259757
Iteration 18, loss = 1.62091573
Iteration 19, loss = 1.58987432
Iteration 20, loss = 1.55942859
Iteration 21, loss = 1.52949835
Iteration 22, loss = 1.49998760
Iteration 23, loss = 1.47080368
Iteration 24, loss = 1.44186863
Iteration 25, loss = 1.41312518
Iteration 26, loss = 1.38454126
Iteration 27, loss = 1.35611230
Iteration 28, loss = 1.32786010
Iteration 29, loss = 1.29982788
Iteration 30, loss = 1.27207182
Iteration 31, loss = 1.24465132
Iteration 32, loss = 1.21762116
Iteration 33, loss = 1.19102777
Iteration 34, loss = 1.16490892
Iteration 35, loss = 1.13929308
Iteration 36, loss = 1.11419478
Iteration 37, loss = 1.08960623
Iteration 38, loss = 1.06549435
Iteration 39, loss = 1.04181462
Iteration 40, loss = 1.01853284
Iteration 41, loss = 0.99563432
Iteration 42, loss = 0.97312009
Iteration 43, loss = 0.95099612
Iteration 44, loss = 0.92926375
Iteration 45, loss = 0.90792036
Iteration 46, loss = 0.88696610
Iteration 47, loss = 0.86640611
Iteration 48, loss = 0.84624805
Iteration 49, loss = 0.82649804
Iteration 50, loss = 0.80715832
Iteration 51, loss = 0.78822799
Iteration 52, loss = 0.76970567
Iteration 53, loss = 0.75159177
Iteration 54, loss = 0.73388877
Iteration 55, loss = 0.71659903
Iteration 56, loss = 0.69972145
Iteration 57, loss = 0.68324885
Iteration 58, loss = 0.66716787
Iteration 59, loss = 0.65146133
Iteration 60, loss = 0.63611161
Iteration 61, loss = 0.62110117
Iteration 62, loss = 0.60640861
Iteration 63, loss = 0.59200500
Iteration 64, loss = 0.57786082
Iteration 65, loss = 0.56396033
Iteration 66, loss = 0.55031006
Iteration 67, loss = 0.53693942
Iteration 68, loss = 0.52389273
Iteration 69, loss = 0.51121012
Iteration 70, loss = 0.49890376
Iteration 71, loss = 0.48695067
Iteration 72, loss = 0.47531389
Iteration 73, loss = 0.46396803
Iteration 74, loss = 0.45290618
Iteration 75, loss = 0.44213369
Iteration 76, loss = 0.43165960
Iteration 77, loss = 0.42148932
Iteration 78, loss = 0.41161996
Iteration 79, loss = 0.40203944
Iteration 80, loss = 0.39272991
Iteration 81, loss = 0.38367336
Iteration 82, loss = 0.37485611
Iteration 83, loss = 0.36627029
Iteration 84, loss = 0.35791258
Iteration 85, loss = 0.34978175
Iteration 86, loss = 0.34187631
Iteration 87, loss = 0.33419296
Iteration 88, loss = 0.32672614
Iteration 89, loss = 0.31946839
Iteration 90, loss = 0.31241133
Iteration 91, loss = 0.30554669
Iteration 92, loss = 0.29886713
Iteration 93, loss = 0.29236656
Iteration 94, loss = 0.28603999
Iteration 95, loss = 0.27988317
Iteration 96, loss = 0.27389213
Iteration 97, loss = 0.26806282
Iteration 98, loss = 0.26239092
Iteration 99, loss = 0.25687185
Iteration 100, loss = 0.25150087
Iteration 101, loss = 0.24627321
Iteration 102, loss = 0.24118429
Iteration 103, loss = 0.23622975
Iteration 104, loss = 0.23140557
Iteration 105, loss = 0.22670804
Iteration 106, loss = 0.22213369
Iteration 107, loss = 0.21767928
Iteration 108, loss = 0.21334167
Iteration 109, loss = 0.20911782
Iteration 110, loss = 0.20500464
Iteration 111, loss = 0.20099910
Iteration 112, loss = 0.19709814
Iteration 113, loss = 0.19329876
Iteration 114, loss = 0.18959805
Iteration 115, loss = 0.18599320
Iteration 116, loss = 0.18248150
Iteration 117, loss = 0.17906033
Iteration 118, loss = 0.17572716
Iteration 119, loss = 0.17247946
Iteration 120, loss = 0.16931476
Iteration 121, loss = 0.16623060
Iteration 122, loss = 0.16322456
Iteration 123, loss = 0.16029429
Iteration 124, loss = 0.15743753
Iteration 125, loss = 0.15465213
Iteration 126, loss = 0.15193604
Iteration 127, loss = 0.14928734
Iteration 128, loss = 0.14670421
Iteration 129, loss = 0.14418492
Iteration 130, loss = 0.14172781
Iteration 131, loss = 0.13933125
Iteration 132, loss = 0.13699364
Iteration 133, loss = 0.13471337
Iteration 134, loss = 0.13248887
Iteration 135, loss = 0.13031856
Iteration 136, loss = 0.12820086
Iteration 137, loss = 0.12613427
Iteration 138, loss = 0.12411730
Iteration 139, loss = 0.12214851
Iteration 140, loss = 0.12022653
Iteration 141, loss = 0.11835002
Iteration 142, loss = 0.11651767
Iteration 143, loss = 0.11472821
Iteration 144, loss = 0.11298040
Iteration 145, loss = 0.11127304
Iteration 146, loss = 0.10960493
Iteration 147, loss = 0.10797492
Iteration 148, loss = 0.10638190
Iteration 149, loss = 0.10482478
Iteration 150, loss = 0.10330252
Iteration 151, loss = 0.10181412
Iteration 152, loss = 0.10035860
Iteration 153, loss = 0.09893503
Iteration 154, loss = 0.09754252
Iteration 155, loss = 0.09618018
Iteration 156, loss = 0.09484719
Iteration 157, loss = 0.09354270
Iteration 158, loss = 0.09226595
Iteration 159, loss = 0.09101614
Iteration 160, loss = 0.08979255
Iteration 161, loss = 0.08859444
Iteration 162, loss = 0.08742113
Iteration 163, loss = 0.08627194
Iteration 164, loss = 0.08514621
Iteration 165, loss = 0.08404332
Iteration 166, loss = 0.08296266
Iteration 167, loss = 0.08190364
Iteration 168, loss = 0.08086568
Iteration 169, loss = 0.07984823
Iteration 170, loss = 0.07885075
Iteration 171, loss = 0.07787273
Iteration 172, loss = 0.07691366
Iteration 173, loss = 0.07597306
Iteration 174, loss = 0.07505045
Iteration 175, loss = 0.07414538
Iteration 176, loss = 0.07325740
Iteration 177, loss = 0.07238609
Iteration 178, loss = 0.07153102
Iteration 179, loss = 0.07069180
Iteration 180, loss = 0.06986804
Iteration 181, loss = 0.06905936
Iteration 182, loss = 0.06826538
Iteration 183, loss = 0.06748575
Iteration 184, loss = 0.06672014
Iteration 185, loss = 0.06596819
Iteration 186, loss = 0.06522959
Iteration 187, loss = 0.06450402
Iteration 188, loss = 0.06379117
Iteration 189, loss = 0.06309074
Iteration 190, loss = 0.06240244
Iteration 191, loss = 0.06172600
Iteration 192, loss = 0.06106113
Iteration 193, loss = 0.06040757
Iteration 194, loss = 0.05976506
Iteration 195, loss = 0.05913335
Iteration 196, loss = 0.05851220
Iteration 197, loss = 0.05790136
Iteration 198, loss = 0.05730061
Iteration 199, loss = 0.05670972
Iteration 200, loss = 0.05612846
             precision    recall  f1-score   support

          0       0.99      0.98      0.99       170
          1       0.68      0.88      0.77       172
          2       0.97      0.76      0.85       172
          3       0.97      0.86      0.91       177
          4       0.98      0.72      0.83       176
          5       0.94      0.94      0.94       168
          6       0.96      0.97      0.97       170
          7       0.93      0.91      0.92       173
          8       0.76      0.74      0.75       164
          9       0.72      0.97      0.83       166

avg / total       0.89      0.87      0.88      1708

C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:563: ConvergenceWarning: Stochastic Optimizer: Maximum iterations reached and the optimization hasn't converged yet.
  % (), ConvergenceWarning)
Out[4]:
'\nplt.gray() \nplt.matshow(digits.images[12]) \nplt.show() \n'

In [6]:
"""
from yahoo_finance import Share
tesla = Share('TSLA')
print(dir(tesla))
print(tesla.get_historical('2016-09-22', '2017-08-22'))
"""

filename_csv = "../datasets/TSLA.csv"
csv_tesla_data = pd.read_csv(filename_csv)
csv_tesla_data['Change'] = csv_tesla_data['Close']-csv_tesla_data['Open']
csv_tesla_data['Change_p'] = csv_tesla_data['Change']/csv_tesla_data['Open']*100
#csv_tesla_data['Target '] = sdaf

print(csv_tesla_data.head())

f, axarr = plt.subplots(2, sharex=True, sharey=False)
axarr[0].plot(range(1,len(csv_tesla_data)+1), csv_tesla_data['Close'], 'b-')
axarr[1].plot(range(1,len(csv_tesla_data)+1), csv_tesla_data['Change_p'], 'r-')
plt.show()

classifier_x = csv_tesla_data[['']]

#train_test_split
X_train, X_test, y_train, y_test = train_test_split(classifier_x,
                                                     classifier_y)

#plot the predictions


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-6-aca07d27715f> in <module>()
     13 csv_tesla_data.describe()
     14 
---> 15 print(csv_tesla_data.desribe())
     16 
     17 f, axarr = plt.subplots(2, sharex=True, sharey=False)

C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\pandas\core\generic.py in __getattr__(self, name)
   2968             if name in self._info_axis:
   2969                 return self[name]
-> 2970             return object.__getattribute__(self, name)
   2971 
   2972     def __setattr__(self, name, value):

AttributeError: 'DataFrame' object has no attribute 'desribe'

In [ ]:


In [ ]: