Linear Classifier Plot


In [2]:
import matplotlib.pyplot as plt
import pandas as pd

circles = [15, 10, 15, 16, 15, 18, 20, 20]
crosses = [3, 0.5, 1.2, 3, 2.5, 6.2, 3, 8.3]

def line(m, x, d):
    return m*x+d

line1 = []

for i in range(0,10):
    line1.append(line(2,i,3.9))

print(line1)

plt.plot( range(0, len(circles) ), circles, "bo")
plt.plot( range(0, len(crosses) ), crosses, "rx")

plt.plot( range(0, len(line1) ), line1, "b-")

plt.show()


[3.9, 5.9, 7.9, 9.9, 11.9, 13.9, 15.9, 17.9, 19.9, 21.9]

In [3]:
##Neural Network - MPLClassifier (Multi-Layer-Perceptron)

In [4]:
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import classification_report, confusion_matrix
import matplotlib.pyplot as plt 

#Import Data
digits = load_digits()
print(digits.data)
print(digits.target)

X = digits.data 
Y = digits.target

X_train, X_test, y_train, y_test = train_test_split(X, Y, train_size=0.05)

print(len(X_train),"-",len(X_test))
#Create Neural Network
neuralnetwork = MLPClassifier(hidden_layer_sizes = (100, ),
                              max_iter = 200,
                              activation = 'logistic', 
                              learning_rate = 'adaptive',
                              verbose = True)
neuralnetwork.fit(X_train, y_train)

y_prediction = neuralnetwork.predict(X_test)

print(classification_report(y_test, y_prediction))


"""
plt.gray() 
plt.matshow(digits.images[12]) 
plt.show() 
"""


C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\sklearn\cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)
[[  0.   0.   5. ...,   0.   0.   0.]
 [  0.   0.   0. ...,  10.   0.   0.]
 [  0.   0.   0. ...,  16.   9.   0.]
 ..., 
 [  0.   0.   1. ...,   6.   0.   0.]
 [  0.   0.   2. ...,  12.   0.   0.]
 [  0.   0.  10. ...,  12.   1.   0.]]
[0 1 2 ..., 8 9 8]
89 - 1708
Iteration 1, loss = 2.46768885
Iteration 2, loss = 2.38033061
Iteration 3, loss = 2.29925152
Iteration 4, loss = 2.22463505
Iteration 5, loss = 2.15647782
Iteration 6, loss = 2.09454238
Iteration 7, loss = 2.03839964
Iteration 8, loss = 1.98747972
Iteration 9, loss = 1.94110395
Iteration 10, loss = 1.89852225
Iteration 11, loss = 1.85896302
Iteration 12, loss = 1.82168707
Iteration 13, loss = 1.78604975
Iteration 14, loss = 1.75155875
Iteration 15, loss = 1.71790101
Iteration 16, loss = 1.68492708
Iteration 17, loss = 1.65259757
Iteration 18, loss = 1.62091573
Iteration 19, loss = 1.58987432
Iteration 20, loss = 1.55942859
Iteration 21, loss = 1.52949835
Iteration 22, loss = 1.49998760
Iteration 23, loss = 1.47080368
Iteration 24, loss = 1.44186863
Iteration 25, loss = 1.41312518
Iteration 26, loss = 1.38454126
Iteration 27, loss = 1.35611230
Iteration 28, loss = 1.32786010
Iteration 29, loss = 1.29982788
Iteration 30, loss = 1.27207182
Iteration 31, loss = 1.24465132
Iteration 32, loss = 1.21762116
Iteration 33, loss = 1.19102777
Iteration 34, loss = 1.16490892
Iteration 35, loss = 1.13929308
Iteration 36, loss = 1.11419478
Iteration 37, loss = 1.08960623
Iteration 38, loss = 1.06549435
Iteration 39, loss = 1.04181462
Iteration 40, loss = 1.01853284
Iteration 41, loss = 0.99563432
Iteration 42, loss = 0.97312009
Iteration 43, loss = 0.95099612
Iteration 44, loss = 0.92926375
Iteration 45, loss = 0.90792036
Iteration 46, loss = 0.88696610
Iteration 47, loss = 0.86640611
Iteration 48, loss = 0.84624805
Iteration 49, loss = 0.82649804
Iteration 50, loss = 0.80715832
Iteration 51, loss = 0.78822799
Iteration 52, loss = 0.76970567
Iteration 53, loss = 0.75159177
Iteration 54, loss = 0.73388877
Iteration 55, loss = 0.71659903
Iteration 56, loss = 0.69972145
Iteration 57, loss = 0.68324885
Iteration 58, loss = 0.66716787
Iteration 59, loss = 0.65146133
Iteration 60, loss = 0.63611161
Iteration 61, loss = 0.62110117
Iteration 62, loss = 0.60640861
Iteration 63, loss = 0.59200500
Iteration 64, loss = 0.57786082
Iteration 65, loss = 0.56396033
Iteration 66, loss = 0.55031006
Iteration 67, loss = 0.53693942
Iteration 68, loss = 0.52389273
Iteration 69, loss = 0.51121012
Iteration 70, loss = 0.49890376
Iteration 71, loss = 0.48695067
Iteration 72, loss = 0.47531389
Iteration 73, loss = 0.46396803
Iteration 74, loss = 0.45290618
Iteration 75, loss = 0.44213369
Iteration 76, loss = 0.43165960
Iteration 77, loss = 0.42148932
Iteration 78, loss = 0.41161996
Iteration 79, loss = 0.40203944
Iteration 80, loss = 0.39272991
Iteration 81, loss = 0.38367336
Iteration 82, loss = 0.37485611
Iteration 83, loss = 0.36627029
Iteration 84, loss = 0.35791258
Iteration 85, loss = 0.34978175
Iteration 86, loss = 0.34187631
Iteration 87, loss = 0.33419296
Iteration 88, loss = 0.32672614
Iteration 89, loss = 0.31946839
Iteration 90, loss = 0.31241133
Iteration 91, loss = 0.30554669
Iteration 92, loss = 0.29886713
Iteration 93, loss = 0.29236656
Iteration 94, loss = 0.28603999
Iteration 95, loss = 0.27988317
Iteration 96, loss = 0.27389213
Iteration 97, loss = 0.26806282
Iteration 98, loss = 0.26239092
Iteration 99, loss = 0.25687185
Iteration 100, loss = 0.25150087
Iteration 101, loss = 0.24627321
Iteration 102, loss = 0.24118429
Iteration 103, loss = 0.23622975
Iteration 104, loss = 0.23140557
Iteration 105, loss = 0.22670804
Iteration 106, loss = 0.22213369
Iteration 107, loss = 0.21767928
Iteration 108, loss = 0.21334167
Iteration 109, loss = 0.20911782
Iteration 110, loss = 0.20500464
Iteration 111, loss = 0.20099910
Iteration 112, loss = 0.19709814
Iteration 113, loss = 0.19329876
Iteration 114, loss = 0.18959805
Iteration 115, loss = 0.18599320
Iteration 116, loss = 0.18248150
Iteration 117, loss = 0.17906033
Iteration 118, loss = 0.17572716
Iteration 119, loss = 0.17247946
Iteration 120, loss = 0.16931476
Iteration 121, loss = 0.16623060
Iteration 122, loss = 0.16322456
Iteration 123, loss = 0.16029429
Iteration 124, loss = 0.15743753
Iteration 125, loss = 0.15465213
Iteration 126, loss = 0.15193604
Iteration 127, loss = 0.14928734
Iteration 128, loss = 0.14670421
Iteration 129, loss = 0.14418492
Iteration 130, loss = 0.14172781
Iteration 131, loss = 0.13933125
Iteration 132, loss = 0.13699364
Iteration 133, loss = 0.13471337
Iteration 134, loss = 0.13248887
Iteration 135, loss = 0.13031856
Iteration 136, loss = 0.12820086
Iteration 137, loss = 0.12613427
Iteration 138, loss = 0.12411730
Iteration 139, loss = 0.12214851
Iteration 140, loss = 0.12022653
Iteration 141, loss = 0.11835002
Iteration 142, loss = 0.11651767
Iteration 143, loss = 0.11472821
Iteration 144, loss = 0.11298040
Iteration 145, loss = 0.11127304
Iteration 146, loss = 0.10960493
Iteration 147, loss = 0.10797492
Iteration 148, loss = 0.10638190
Iteration 149, loss = 0.10482478
Iteration 150, loss = 0.10330252
Iteration 151, loss = 0.10181412
Iteration 152, loss = 0.10035860
Iteration 153, loss = 0.09893503
Iteration 154, loss = 0.09754252
Iteration 155, loss = 0.09618018
Iteration 156, loss = 0.09484719
Iteration 157, loss = 0.09354270
Iteration 158, loss = 0.09226595
Iteration 159, loss = 0.09101614
Iteration 160, loss = 0.08979255
Iteration 161, loss = 0.08859444
Iteration 162, loss = 0.08742113
Iteration 163, loss = 0.08627194
Iteration 164, loss = 0.08514621
Iteration 165, loss = 0.08404332
Iteration 166, loss = 0.08296266
Iteration 167, loss = 0.08190364
Iteration 168, loss = 0.08086568
Iteration 169, loss = 0.07984823
Iteration 170, loss = 0.07885075
Iteration 171, loss = 0.07787273
Iteration 172, loss = 0.07691366
Iteration 173, loss = 0.07597306
Iteration 174, loss = 0.07505045
Iteration 175, loss = 0.07414538
Iteration 176, loss = 0.07325740
Iteration 177, loss = 0.07238609
Iteration 178, loss = 0.07153102
Iteration 179, loss = 0.07069180
Iteration 180, loss = 0.06986804
Iteration 181, loss = 0.06905936
Iteration 182, loss = 0.06826538
Iteration 183, loss = 0.06748575
Iteration 184, loss = 0.06672014
Iteration 185, loss = 0.06596819
Iteration 186, loss = 0.06522959
Iteration 187, loss = 0.06450402
Iteration 188, loss = 0.06379117
Iteration 189, loss = 0.06309074
Iteration 190, loss = 0.06240244
Iteration 191, loss = 0.06172600
Iteration 192, loss = 0.06106113
Iteration 193, loss = 0.06040757
Iteration 194, loss = 0.05976506
Iteration 195, loss = 0.05913335
Iteration 196, loss = 0.05851220
Iteration 197, loss = 0.05790136
Iteration 198, loss = 0.05730061
Iteration 199, loss = 0.05670972
Iteration 200, loss = 0.05612846
             precision    recall  f1-score   support

          0       0.99      0.98      0.99       170
          1       0.68      0.88      0.77       172
          2       0.97      0.76      0.85       172
          3       0.97      0.86      0.91       177
          4       0.98      0.72      0.83       176
          5       0.94      0.94      0.94       168
          6       0.96      0.97      0.97       170
          7       0.93      0.91      0.92       173
          8       0.76      0.74      0.75       164
          9       0.72      0.97      0.83       166

avg / total       0.89      0.87      0.88      1708

C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:563: ConvergenceWarning: Stochastic Optimizer: Maximum iterations reached and the optimization hasn't converged yet.
  % (), ConvergenceWarning)
Out[4]:
'\nplt.gray() \nplt.matshow(digits.images[12]) \nplt.show() \n'

In [30]:
"""
from yahoo_finance import Share
tesla = Share('TSLA')
print(dir(tesla))
print(tesla.get_historical('2016-09-22', '2017-08-22'))
"""

def get_y(change):
    if change > 0:
        return 1
    elif change <= 0:
        return 0

filename_csv = "../datasets/TSLA.csv"
csv_tesla_data = pd.read_csv(filename_csv)
csv_tesla_data['Change'] = csv_tesla_data['Close']-csv_tesla_data['Open']
csv_tesla_data['Change_p'] = csv_tesla_data['Change']/csv_tesla_data['Open']*100
csv_tesla_data['Target'] = range(0, len(csv_tesla_data))
#print(get_y(csv_tesla_data['Change_p']))
#csv_tesla_data['Target'] = get_y(csv_tesla_data['Change_p'])

for i in range(0, len(csv_tesla_data)):
    if i == 0:
        continue
    csv_tesla_data['Target'][i] = get_y(csv_tesla_data['Change_p'][i-1])

print(csv_tesla_data)
f, axarr = plt.subplots(2, sharex=True, sharey=False)
axarr[0].plot(range(1,len(csv_tesla_data)+1), csv_tesla_data['Close'], 'b-')
axarr[1].plot(range(1,len(csv_tesla_data)+1), csv_tesla_data['Change_p'], 'r-')
plt.show()

classifier_x = csv_tesla_data[['Open', 'High', 'Low', 'Close', 'Volume', 'Change', 'Change_p']]
classifier_y = csv_tesla_data['Target']

#train_test_split
X_train, X_test, y_train, y_test = train_test_split(classifier_x,
                                                     classifier_y)




#Fit the model
neuralnetwork = MLPClassifier(hidden_layer_sizes = (100, ),
                              max_iter = 200,
                              activation = 'logistic', 
                              learning_rate = 'adaptive',
                              verbose = True)
neuralnetwork.fit(X_train, y_train)

y_prediction = neuralnetwork.predict(X_test)
print(y_test, " - ", y_prediction)
print(classification_report(y_test, y_prediction))
print(confusion_matrix(y_test, y_prediction))

#plot the predictions


C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\ipykernel_launcher.py:25: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
           Date        Open        High         Low       Close   Adj Close  \
0    2014-09-22  255.000000  256.019989  244.710007  250.029999  250.029999   
1    2014-09-23  245.220001  253.800003  245.000000  250.410004  250.410004   
2    2014-09-24  251.119995  252.839996  247.039993  252.139999  252.139999   
3    2014-09-25  252.520004  254.960007  246.100006  246.949997  246.949997   
4    2014-09-26  248.250000  249.729996  246.070007  246.600006  246.600006   
5    2014-09-29  244.000000  248.639999  241.380005  245.259995  245.259995   
6    2014-09-30  246.919998  247.649994  240.119995  242.679993  242.679993   
7    2014-10-01  242.199997  242.660004  235.649994  240.240005  240.240005   
8    2014-10-02  250.199997  252.789993  245.360001  251.419998  251.419998   
9    2014-10-03  253.059998  256.500000  251.029999  255.210007  255.210007   
10   2014-10-06  259.130005  262.489990  257.799988  260.619995  260.619995   
11   2014-10-07  258.529999  261.459991  255.729996  259.570007  259.570007   
12   2014-10-08  260.100006  262.880005  252.639999  259.279999  259.279999   
13   2014-10-09  262.250000  265.540009  254.399994  257.010010  257.010010   
14   2014-10-10  244.639999  245.889999  235.199997  236.910004  236.910004   
15   2014-10-13  238.570007  238.960007  221.000000  224.589996  224.589996   
16   2014-10-14  228.250000  232.470001  223.000000  227.059998  227.059998   
17   2014-10-15  220.000000  230.990005  217.320007  229.699997  229.699997   
18   2014-10-16  219.720001  229.919998  219.100006  226.350006  226.350006   
19   2014-10-17  233.380005  234.770004  226.550003  227.479996  227.479996   
20   2014-10-20  226.720001  232.399994  225.509995  230.470001  230.470001   
21   2014-10-21  234.270004  235.389999  230.800003  235.339996  235.339996   
22   2014-10-22  233.190002  237.389999  230.559998  231.100006  231.100006   
23   2014-10-23  234.660004  236.279999  232.000000  235.289993  235.289993   
24   2014-10-24  236.270004  237.800003  231.199997  235.240005  235.240005   
25   2014-10-27  234.250000  234.610001  220.309998  221.669998  221.669998   
26   2014-10-28  229.600006  244.600006  228.250000  242.770004  242.770004   
27   2014-10-29  241.130005  241.500000  235.639999  238.100006  238.100006   
28   2014-10-30  238.139999  240.500000  235.059998  238.660004  238.660004   
29   2014-10-31  242.509995  243.119995  238.750000  241.699997  241.699997   
..          ...         ...         ...         ...         ...         ...   
728  2017-08-11  356.970001  361.260010  353.619995  357.869995  357.869995   
729  2017-08-14  364.630005  367.660004  362.600006  363.799988  363.799988   
730  2017-08-15  365.200012  365.489990  359.369995  362.329987  362.329987   
731  2017-08-16  363.000000  366.500000  362.519989  362.910004  362.910004   
732  2017-08-17  361.209991  363.299988  351.589996  351.920013  351.920013   
733  2017-08-18  352.910004  354.000000  345.799988  347.459991  347.459991   
734  2017-08-21  345.820007  345.820007  331.850006  337.859985  337.859985   
735  2017-08-22  341.130005  342.239990  337.369995  341.350006  341.350006   
736  2017-08-23  338.989990  353.489990  338.299988  352.769989  352.769989   
737  2017-08-24  352.519989  356.660004  349.739990  352.929993  352.929993   
738  2017-08-25  354.239990  355.690002  347.299988  348.049988  348.049988   
739  2017-08-28  347.279999  347.350006  339.720001  345.660004  345.660004   
740  2017-08-29  339.480011  349.049988  338.750000  347.359985  347.359985   
741  2017-08-30  349.670013  353.470001  347.000000  353.179993  353.179993   
742  2017-08-31  353.549988  358.440002  352.820007  355.899994  355.899994   
743  2017-09-01  356.119995  357.589996  353.690002  355.399994  355.399994   
744  2017-09-05  353.799988  355.489990  345.890015  349.589996  349.589996   
745  2017-09-06  349.500000  350.980011  341.559998  344.529999  344.529999   
746  2017-09-07  345.980011  352.480011  343.450012  350.609985  350.609985   
747  2017-09-08  348.989990  349.779999  342.299988  343.399994  343.399994   
748  2017-09-11  351.350006  363.709991  350.000000  363.690002  363.690002   
749  2017-09-12  364.489990  368.760010  360.399994  362.750000  362.750000   
750  2017-09-13  363.820007  368.070007  359.589996  366.230011  366.230011   
751  2017-09-14  364.329987  377.959991  362.630005  377.640015  377.640015   
752  2017-09-15  374.510010  380.000000  372.700012  379.809998  379.809998   
753  2017-09-18  380.250000  389.609985  377.679993  385.000000  385.000000   
754  2017-09-19  380.000000  382.390015  373.570007  375.100006  375.100006   
755  2017-09-20  373.000000  378.250000  371.070007  373.910004  373.910004   
756  2017-09-21  374.899994  376.829987  364.510010  366.480011  366.480011   
757  2017-09-22  366.489990  369.899994  350.880005  351.089996  351.089996   

       Volume     Change  Change_p  Target  
0     8214100  -4.970001 -1.949020       0  
1     5658700   5.190003  2.116468       0  
2     3749500   1.020004  0.406182       1  
3     4834200  -5.570007 -2.205769       1  
4     3795400  -1.649994 -0.664650       0  
5     4852700   1.259995  0.516391       0  
6     4238300  -4.240005 -1.717157       1  
7     5941700  -1.959992 -0.809245       0  
8     8998200   1.220001  0.487610       0  
9     5406300   2.150009  0.849604       1  
10    7713300   1.489990  0.574997       1  
11    4485500   1.040008  0.402277       1  
12    5055100  -0.820007 -0.315266       1  
13    7361300  -5.239990 -1.998090       0  
14   12888300  -7.729995 -3.159743       0  
15   11268700 -13.980011 -5.859920       0  
16    7105300  -1.190002 -0.521359       0  
17    9147300   9.699997  4.409090       0  
18    5399300   6.630005  3.017479       1  
19   10549400  -5.900009 -2.528070       1  
20    3494400   3.750000  1.654023       0  
21    4130300   1.069992  0.456735       1  
22    4116600  -2.089996 -0.896263       1  
23    3492400   0.629989  0.268469       0  
24    3463300  -1.029999 -0.435942       1  
25    9553300 -12.580002 -5.370332       0  
26   10516300  13.169998  5.736062       0  
27    4962500  -3.029999 -1.256583       1  
28    3228400   0.520005  0.218361       0  
29    3775300  -0.809998 -0.334006       1  
..        ...        ...       ...     ...  
728   4365800   0.899994  0.252120       0  
729   4502700  -0.830017 -0.227633       1  
730   3085100  -2.870025 -0.785878       0  
731   3413800  -0.089996 -0.024792       0  
732   5027700  -9.289978 -2.571905       0  
733   5408200  -5.450013 -1.544307       0  
734   6495400  -7.960022 -2.301782       0  
735   4322000   0.220001  0.064492       0  
736   4954500  13.779999  4.065016       1  
737   4584700   0.410004  0.116307       1  
738   3484000  -6.190002 -1.747404       1  
739   3764000  -1.619995 -0.466481       0  
740   4073700   7.879974  2.321189       0  
741   3412900   3.509980  1.003798       1  
742   4072800   2.350006  0.664688       1  
743   3049500  -0.720001 -0.202179       1  
744   3835100  -4.209992 -1.189936       0  
745   4091400  -4.970001 -1.422032       0  
746   4239200   4.629974  1.338220       0  
747   3257600  -5.589996 -1.601764       1  
748   7667100  12.339996  3.512166       0  
749   5972900  -1.739990 -0.477377       1  
750   4185200   2.410004  0.662417       0  
751   7202500  13.310028  3.653289       1  
752   5420500   5.299988  1.415179       1  
753   7188000   4.750000  1.249178       1  
754   6451900  -4.899994 -1.289472       1  
755   4919100   0.910004  0.243969       0  
756   4618200  -8.419983 -2.245928       1  
757   8119800 -15.399994 -4.202023       0  

[758 rows x 10 columns]
Iteration 1, loss = 0.72367163
Iteration 2, loss = 0.70990822
Iteration 3, loss = 0.70110206
Iteration 4, loss = 0.69472723
Iteration 5, loss = 0.69242581
Iteration 6, loss = 0.69371888
Iteration 7, loss = 0.69448091
Iteration 8, loss = 0.69499897
Training loss did not improve more than tol=0.000100 for two consecutive epochs. Stopping.
1      0
425    0
326    0
242    1
439    0
659    0
564    0
728    0
523    0
681    1
487    0
614    1
293    0
269    0
706    1
539    1
516    0
621    0
650    0
654    0
627    0
324    0
359    1
605    1
118    1
64     0
675    1
677    1
256    0
710    0
      ..
415    0
668    0
138    1
685    0
718    0
181    0
259    0
207    1
241    0
723    1
460    1
159    1
184    1
4      0
204    1
453    0
87     1
753    1
343    1
338    0
251    1
125    1
699    0
746    0
709    1
620    0
46     1
602    1
459    1
532    0
Name: Target, Length: 190, dtype: int32  -  [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 1 1 1 1 1]
             precision    recall  f1-score   support

          0       0.00      0.00      0.00       103
          1       0.46      1.00      0.63        87

avg / total       0.21      0.46      0.29       190

C:\Users\Windows\AppData\Local\Continuum\Anaconda3\lib\site-packages\sklearn\metrics\classification.py:1113: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-30-1cae0842dbef> in <module>()
     52 print(y_test, " - ", y_prediction)
     53 print(classification_report(y_test, y_prediction))
---> 54 print(confusion_matrix(y_test, y_prediction))
     55 
     56 #plot the predictions

NameError: name 'confusion_matrix' is not defined

In [ ]:


In [ ]: