In [5]:
#importing scikit-learn datasets package and the Perceptron package
from sklearn import datasets
from sklearn.linear_model import Perceptron

iris = datasets.load_iris()

m = Perceptron()

In [8]:
#fitting the linear model with stochastic gradient descent
m.fit(iris.data,iris.target)


Out[8]:
Perceptron(alpha=0.0001, class_weight=None, eta0=1.0, fit_intercept=True,
      n_iter=5, n_jobs=1, penalty=None, random_state=0, shuffle=True,
      verbose=0, warm_start=False)

In [9]:
#decision_function to predict confidence scores
m.decision_function(iris.data)


Out[9]:
array([[  11.59,  -56.6 , -112.75],
       [   9.24,  -44.47, -101.12],
       [  10.54,  -51.26, -103.58],
       [   8.73,  -48.12,  -93.61],
       [  11.92,  -59.74, -112.36],
       [  11.09,  -66.76, -108.97],
       [  10.36,  -57.87,  -98.46],
       [  10.42,  -53.97, -106.02],
       [   8.31,  -44.39,  -89.87],
       [   9.33,  -45.24, -102.47],
       [  12.11,  -59.82, -118.43],
       [   9.58,  -54.48,  -98.9 ],
       [   9.44,  -43.63, -102.22],
       [  10.86,  -47.71, -102.24],
       [  15.72,  -67.2 , -140.56],
       [  14.82,  -79.4 , -129.02],
       [  13.65,  -68.8 , -121.93],
       [  11.29,  -57.95, -109.71],
       [  11.26,  -61.25, -116.28],
       [  11.94,  -65.33, -111.12],
       [   9.54,  -50.91, -107.3 ],
       [  11.21,  -64.05, -106.53],
       [  14.08,  -63.82, -117.56],
       [   7.91,  -53.86,  -90.81],
       [   7.66,  -52.95,  -89.18],
       [   8.06,  -42.94,  -96.58],
       [   9.18,  -56.16,  -96.7 ],
       [  11.05,  -55.58, -111.45],
       [  11.26,  -53.46, -113.14],
       [   8.62,  -49.73,  -93.86],
       [   8.29,  -46.59,  -94.25],
       [  10.22,  -54.63, -107.7 ],
       [  13.93,  -70.01, -123.79],
       [  15.  ,  -72.97, -131.36],
       [   9.33,  -45.24, -102.47],
       [  11.48,  -50.24, -112.64],
       [  12.63,  -55.07, -123.75],
       [   9.33,  -45.24, -102.47],
       [   9.38,  -47.53,  -94.66],
       [  10.52,  -53.46, -107.96],
       [  11.83,  -58.97, -111.01],
       [   6.17,  -29.96,  -82.71],
       [  10.24,  -52.79,  -97.76],
       [   9.01,  -61.49,  -92.17],
       [   9.08,  -64.64,  -95.12],
       [   8.84,  -46.33,  -96.14],
       [  11.6 ,  -63.47, -110.92],
       [   9.8 ,  -51.26,  -98.4 ],
       [  12.01,  -60.33, -116.49],
       [  10.63,  -51.85, -107.71],
       [ -12.52,  -38.39,   -1.56],
       [ -12.14,  -43.82,    6.64],
       [ -14.63,  -36.6 ,   11.45],
       [ -13.11,  -24.59,   15.77],
       [ -14.4 ,  -32.28,   14.14],
       [ -13.96,  -34.17,   20.34],
       [ -13.39,  -47.29,   16.55],
       [  -7.9 ,  -29.8 ,   -5.94],
       [ -13.27,  -31.7 ,    4.57],
       [ -11.35,  -38.5 ,   15.19],
       [ -10.8 ,  -17.75,    4.8 ],
       [ -11.58,  -42.64,    9.72],
       [ -12.14,  -15.36,   -1.5 ],
       [ -14.71,  -35.09,   20.55],
       [  -7.87,  -41.9 ,   -8.43],
       [ -11.33,  -38.82,   -3.91],
       [ -13.8 ,  -42.64,   25.26],
       [ -10.83,  -29.02,   -2.13],
       [ -16.64,  -18.54,   26.02],
       [ -10.91,  -27.15,    1.41],
       [ -15.46,  -48.89,   35.18],
       [ -10.36,  -34.68,   -3.62],
       [ -17.81,  -23.88,   32.39],
       [ -14.54,  -29.76,   16.02],
       [ -11.55,  -34.25,   -1.27],
       [ -11.86,  -36.7 ,   -0.42],
       [ -15.08,  -28.38,   11.76],
       [ -16.5 ,  -37.18,   26.2 ],
       [ -13.83,  -37.97,   19.05],
       [  -7.52,  -29.96,  -18.08],
       [ -10.8 ,  -25.54,    1.66],
       [  -9.86,  -24.7 ,   -4.62],
       [ -10.15,  -32.74,   -2.53],
       [ -18.83,  -31.  ,   44.63],
       [ -14.  ,  -43.66,   29.14],
       [ -11.98,  -52.47,   14.34],
       [ -13.55,  -38.64,    8.85],
       [ -14.87,  -18.47,   13.21],
       [ -10.64,  -41.98,    6.22],
       [ -12.25,  -29.85,   12.67],
       [ -14.08,  -29.09,   21.04],
       [ -13.64,  -38.23,   15.76],
       [ -11.22,  -29.6 ,    2.26],
       [  -8.23,  -26.66,   -6.33],
       [ -12.57,  -33.58,   14.11],
       [ -10.88,  -39.61,    4.48],
       [ -11.61,  -38.33,    9.07],
       [ -11.75,  -35.27,    2.61],
       [  -5.65,  -34.29,  -18.05],
       [ -11.4 ,  -36.21,    7.38],
       [ -24.41,  -52.81,   86.03],
       [ -19.93,  -36.07,   57.63],
       [ -23.06,  -35.95,   59.76],
       [ -21.47,  -34.88,   57.99],
       [ -23.32,  -40.87,   71.2 ],
       [ -27.04,  -29.83,   72.74],
       [ -17.25,  -35.76,   52.67],
       [ -24.95,  -26.21,   61.27],
       [ -24.07,  -21.3 ,   62.91],
       [ -22.86,  -55.6 ,   67.16],
       [ -17.38,  -47.  ,   39.34],
       [ -20.61,  -31.99,   52.47],
       [ -20.8 ,  -39.52,   52.62],
       [ -20.55,  -33.18,   62.47],
       [ -21.  ,  -45.45,   71.28],
       [ -19.66,  -50.54,   56.88],
       [ -20.2 ,  -37.  ,   49.32],
       [ -24.44,  -51.2 ,   64.68],
       [ -31.18,  -19.97,   92.8 ],
       [ -20.04,  -17.01,   46.1 ],
       [ -21.72,  -45.95,   60.14],
       [ -18.72,  -42.09,   56.52],
       [ -28.14,  -22.2 ,   74.1 ],
       [ -17.85,  -33.19,   38.41],
       [ -20.89,  -46.9 ,   56.39],
       [ -21.84,  -36.14,   48.84],
       [ -16.88,  -36.84,   35.56],
       [ -16.76,  -42.1 ,   37.64],
       [ -22.7 ,  -35.79,   66.72],
       [ -20.82,  -29.2 ,   39.38],
       [ -24.3 ,  -25.44,   57.44],
       [ -21.72,  -49.01,   45.  ],
       [ -23.  ,  -37.14,   69.76],
       [ -17.8 ,  -30.75,   34.22],
       [ -21.76,  -22.61,   54.36],
       [ -24.34,  -34.57,   60.68],
       [ -21.12,  -56.13,   68.48],
       [ -19.87,  -40.14,   49.71],
       [ -16.22,  -43.12,   36.34],
       [ -19.63,  -42.15,   45.89],
       [ -22.01,  -46.2 ,   65.37],
       [ -18.31,  -46.38,   42.25],
       [ -19.93,  -36.07,   57.63],
       [ -23.1 ,  -45.44,   68.56],
       [ -22.09,  -52.3 ,   68.55],
       [ -19.58,  -44.26,   50.92],
       [ -19.65,  -28.77,   47.79],
       [ -18.88,  -41.23,   45.68],
       [ -19.64,  -56.31,   60.9 ],
       [ -18.24,  -42.1 ,   48.  ]])

In [10]:
#converting coefficient matrix to dense array format
m.densify()


Out[10]:
Perceptron(alpha=0.0001, class_weight=None, eta0=1.0, fit_intercept=True,
      n_iter=5, n_jobs=1, penalty=None, random_state=0, shuffle=True,
      verbose=0, warm_start=False)

In [11]:
#fit model to data and transform the result
m.fit_transform(iris.data,iris.target)


C:\Users\priyu\Anaconda3\lib\site-packages\sklearn\utils\deprecation.py:70: DeprecationWarning: Function transform is deprecated; Support to use estimators as feature selectors will be removed in version 0.19. Use SelectFromModel instead.
  warnings.warn(msg, category=DeprecationWarning)
Out[11]:
array([[ 3.5,  1.4,  0.2],
       [ 3. ,  1.4,  0.2],
       [ 3.2,  1.3,  0.2],
       [ 3.1,  1.5,  0.2],
       [ 3.6,  1.4,  0.2],
       [ 3.9,  1.7,  0.4],
       [ 3.4,  1.4,  0.3],
       [ 3.4,  1.5,  0.2],
       [ 2.9,  1.4,  0.2],
       [ 3.1,  1.5,  0.1],
       [ 3.7,  1.5,  0.2],
       [ 3.4,  1.6,  0.2],
       [ 3. ,  1.4,  0.1],
       [ 3. ,  1.1,  0.1],
       [ 4. ,  1.2,  0.2],
       [ 4.4,  1.5,  0.4],
       [ 3.9,  1.3,  0.4],
       [ 3.5,  1.4,  0.3],
       [ 3.8,  1.7,  0.3],
       [ 3.8,  1.5,  0.3],
       [ 3.4,  1.7,  0.2],
       [ 3.7,  1.5,  0.4],
       [ 3.6,  1. ,  0.2],
       [ 3.3,  1.7,  0.5],
       [ 3.4,  1.9,  0.2],
       [ 3. ,  1.6,  0.2],
       [ 3.4,  1.6,  0.4],
       [ 3.5,  1.5,  0.2],
       [ 3.4,  1.4,  0.2],
       [ 3.2,  1.6,  0.2],
       [ 3.1,  1.6,  0.2],
       [ 3.4,  1.5,  0.4],
       [ 4.1,  1.5,  0.1],
       [ 4.2,  1.4,  0.2],
       [ 3.1,  1.5,  0.1],
       [ 3.2,  1.2,  0.2],
       [ 3.5,  1.3,  0.2],
       [ 3.1,  1.5,  0.1],
       [ 3. ,  1.3,  0.2],
       [ 3.4,  1.5,  0.2],
       [ 3.5,  1.3,  0.3],
       [ 2.3,  1.3,  0.3],
       [ 3.2,  1.3,  0.2],
       [ 3.5,  1.6,  0.6],
       [ 3.8,  1.9,  0.4],
       [ 3. ,  1.4,  0.3],
       [ 3.8,  1.6,  0.2],
       [ 3.2,  1.4,  0.2],
       [ 3.7,  1.5,  0.2],
       [ 3.3,  1.4,  0.2],
       [ 3.2,  4.7,  1.4],
       [ 3.2,  4.5,  1.5],
       [ 3.1,  4.9,  1.5],
       [ 2.3,  4. ,  1.3],
       [ 2.8,  4.6,  1.5],
       [ 2.8,  4.5,  1.3],
       [ 3.3,  4.7,  1.6],
       [ 2.4,  3.3,  1. ],
       [ 2.9,  4.6,  1.3],
       [ 2.7,  3.9,  1.4],
       [ 2. ,  3.5,  1. ],
       [ 3. ,  4.2,  1.5],
       [ 2.2,  4. ,  1. ],
       [ 2.9,  4.7,  1.4],
       [ 2.9,  3.6,  1.3],
       [ 3.1,  4.4,  1.4],
       [ 3. ,  4.5,  1.5],
       [ 2.7,  4.1,  1. ],
       [ 2.2,  4.5,  1.5],
       [ 2.5,  3.9,  1.1],
       [ 3.2,  4.8,  1.8],
       [ 2.8,  4. ,  1.3],
       [ 2.5,  4.9,  1.5],
       [ 2.8,  4.7,  1.2],
       [ 2.9,  4.3,  1.3],
       [ 3. ,  4.4,  1.4],
       [ 2.8,  4.8,  1.4],
       [ 3. ,  5. ,  1.7],
       [ 2.9,  4.5,  1.5],
       [ 2.6,  3.5,  1. ],
       [ 2.4,  3.8,  1.1],
       [ 2.4,  3.7,  1. ],
       [ 2.7,  3.9,  1.2],
       [ 2.7,  5.1,  1.6],
       [ 3. ,  4.5,  1.5],
       [ 3.4,  4.5,  1.6],
       [ 3.1,  4.7,  1.5],
       [ 2.3,  4.4,  1.3],
       [ 3. ,  4.1,  1.3],
       [ 2.5,  4. ,  1.3],
       [ 2.6,  4.4,  1.2],
       [ 3. ,  4.6,  1.4],
       [ 2.6,  4. ,  1.2],
       [ 2.3,  3.3,  1. ],
       [ 2.7,  4.2,  1.3],
       [ 3. ,  4.2,  1.2],
       [ 2.9,  4.2,  1.3],
       [ 2.9,  4.3,  1.3],
       [ 2.5,  3. ,  1.1],
       [ 2.8,  4.1,  1.3],
       [ 3.3,  6. ,  2.5],
       [ 2.7,  5.1,  1.9],
       [ 3. ,  5.9,  2.1],
       [ 2.9,  5.6,  1.8],
       [ 3. ,  5.8,  2.2],
       [ 3. ,  6.6,  2.1],
       [ 2.5,  4.5,  1.7],
       [ 2.9,  6.3,  1.8],
       [ 2.5,  5.8,  1.8],
       [ 3.6,  6.1,  2.5],
       [ 3.2,  5.1,  2. ],
       [ 2.7,  5.3,  1.9],
       [ 3. ,  5.5,  2.1],
       [ 2.5,  5. ,  2. ],
       [ 2.8,  5.1,  2.4],
       [ 3.2,  5.3,  2.3],
       [ 3. ,  5.5,  1.8],
       [ 3.8,  6.7,  2.2],
       [ 2.6,  6.9,  2.3],
       [ 2.2,  5. ,  1.5],
       [ 3.2,  5.7,  2.3],
       [ 2.8,  4.9,  2. ],
       [ 2.8,  6.7,  2. ],
       [ 2.7,  4.9,  1.8],
       [ 3.3,  5.7,  2.1],
       [ 3.2,  6. ,  1.8],
       [ 2.8,  4.8,  1.8],
       [ 3. ,  4.9,  1.8],
       [ 2.8,  5.6,  2.1],
       [ 3. ,  5.8,  1.6],
       [ 2.8,  6.1,  1.9],
       [ 3.8,  6.4,  2. ],
       [ 2.8,  5.6,  2.2],
       [ 2.8,  5.1,  1.5],
       [ 2.6,  5.6,  1.4],
       [ 3. ,  6.1,  2.3],
       [ 3.4,  5.6,  2.4],
       [ 3.1,  5.5,  1.8],
       [ 3. ,  4.8,  1.8],
       [ 3.1,  5.4,  2.1],
       [ 3.1,  5.6,  2.4],
       [ 3.1,  5.1,  2.3],
       [ 2.7,  5.1,  1.9],
       [ 3.2,  5.9,  2.3],
       [ 3.3,  5.7,  2.5],
       [ 3. ,  5.2,  2.3],
       [ 2.5,  5. ,  1.9],
       [ 3. ,  5.2,  2. ],
       [ 3.4,  5.4,  2.3],
       [ 3. ,  5.1,  1.8]])

In [12]:
#list classifier parameters
m.get_params()


Out[12]:
{'alpha': 0.0001,
 'class_weight': None,
 'eta0': 1.0,
 'fit_intercept': True,
 'n_iter': 5,
 'n_jobs': 1,
 'penalty': None,
 'random_state': 0,
 'shuffle': True,
 'verbose': 0,
 'warm_start': False}

In [13]:
#fitting the linear model with stochastic gradient descent
m.partial_fit(iris.data,iris.target)


Out[13]:
Perceptron(alpha=0.0001, class_weight=None, eta0=1.0, fit_intercept=True,
      n_iter=5, n_jobs=1, penalty=None, random_state=0, shuffle=True,
      verbose=0, warm_start=False)

In [14]:
#predict class labels for samples
m.predict(iris.data)


Out[14]:
array([1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1,
       0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])

In [15]:
m.score(iris.data,iris.target)


Out[15]:
0.42666666666666669

In [16]:
m.sparsify()


Out[16]:
Perceptron(alpha=0.0001, class_weight=None, eta0=1.0, fit_intercept=True,
      n_iter=5, n_jobs=1, penalty=None, random_state=0, shuffle=True,
      verbose=0, warm_start=False)