In [1]:
print(__doc__)

import numpy as np
import matplotlib.pyplot as plt


Automatically created module for IPython interactive environment

In [2]:
from sklearn.datasets import make_multilabel_classification
from sklearn.multiclass import OneVsRestClassifier # One-vs-the-rest (OvR) multiclass/multilabel strategy
from sklearn.svm import SVC # C-Support Vector Classification
from sklearn.preprocessing import LabelBinarizer # Binarize labels in a one-vs-all fashion
from sklearn.decomposition import PCA # primary component analysis
from sklearn.cross_decomposition import CCA # canonical component analysis

In [3]:
def plot_hyperplane(clf, min_x, max_x, linestyle, label):
    # get the separating hyperplane
    w = clf.coef_[0]
    a = -w[0] / w[1]
    xx = np.linspace(min_x - 5, max_x + 5)  # make sure the line is long enough
    yy = a * xx - (clf.intercept_[0]) / w[1]
    plt.plot(xx, yy, linestyle, label=label)

In [4]:
def plot_subfigure(X, Y, subplot, title, transform):
    if transform == "pca":
        X = PCA(n_components=2).fit_transform(X)
    elif transform == "cca":
        X = CCA(n_components=2).fit(X, Y).transform(X)
    else:
        raise ValueError

    min_x = np.min(X[:, 0])
    max_x = np.max(X[:, 0])

    min_y = np.min(X[:, 1])
    max_y = np.max(X[:, 1])

    classif = OneVsRestClassifier(SVC(kernel='linear'))
    classif.fit(X, Y)

    plt.subplot(2, 2, subplot)
    plt.title(title)

    zero_class = np.where(Y[:, 0])
    one_class = np.where(Y[:, 1])
    plt.scatter(X[:, 0], X[:, 1], s=40, c='gray')
    plt.scatter(X[zero_class, 0], X[zero_class, 1], s=160, edgecolors='b',
               facecolors='none', linewidths=2, label='Class 1')
    plt.scatter(X[one_class, 0], X[one_class, 1], s=80, edgecolors='orange',
               facecolors='none', linewidths=2, label='Class 2')

    plot_hyperplane(classif.estimators_[0], min_x, max_x, 'k--',
                    'Boundary\nfor class 1')
    plot_hyperplane(classif.estimators_[1], min_x, max_x, 'k-.',
                    'Boundary\nfor class 2')
    plt.xticks(())
    plt.yticks(())

    plt.xlim(min_x - .5 * max_x, max_x + .5 * max_x)
    plt.ylim(min_y - .5 * max_y, max_y + .5 * max_y)
    if subplot == 2:
        plt.xlabel('First principal component')
        plt.ylabel('Second principal component')
        plt.legend(loc="upper left")

In [5]:
plt.figure(figsize=(8, 6))


Out[5]:
<matplotlib.figure.Figure at 0x16466fd0>

In [39]:
# X : array or sparse CSR matrix of shape [n_samples, n_features]
# The generated samples.
# Y : tuple of lists or array of shape [n_samples, n_classes]
# The label sets.
# Generate a random multilabel classification problem.
# For each sample, the generative process is:
# pick the number of labels: n ~ Poisson(n_labels)
# n times, choose a class c: c ~ Multinomial(theta)
# pick the document length: k ~ Poisson(length)
# k times, choose a word: w ~ Multinomial(theta_c)
# default: n_samples=100, n_features=20, n_classes=5, n_labels=2, length=50
X, Y = make_multilabel_classification(n_classes=2, n_labels=1,
                                      allow_unlabeled=True,
                                      return_indicator=True,
                                      random_state=1)

In [40]:
X.shape


Out[40]:
(100L, 20L)

In [41]:
Y.shape


Out[41]:
(100L, 2L)

In [42]:
X


Out[42]:
array([[ 5.,  3.,  2., ...,  2.,  2.,  3.],
       [ 4.,  2.,  3., ...,  5.,  2.,  1.],
       [ 0.,  0.,  3., ...,  2.,  4.,  3.],
       ..., 
       [ 1.,  2.,  2., ...,  6.,  2.,  4.],
       [ 0.,  0.,  1., ...,  2.,  2.,  2.],
       [ 2.,  2.,  4., ...,  1.,  2.,  2.]])

In [43]:
Y


Out[43]:
array([[0, 0],
       [0, 0],
       [0, 1],
       [0, 1],
       [1, 0],
       [0, 0],
       [1, 0],
       [0, 0],
       [0, 0],
       [1, 1],
       [1, 0],
       [1, 0],
       [0, 1],
       [1, 1],
       [0, 0],
       [0, 0],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 1],
       [0, 1],
       [1, 1],
       [0, 0],
       [1, 0],
       [1, 1],
       [0, 0],
       [0, 0],
       [1, 1],
       [0, 0],
       [1, 1],
       [1, 1],
       [0, 1],
       [0, 0],
       [1, 0],
       [0, 1],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 0],
       [0, 1],
       [0, 1],
       [1, 0],
       [0, 0],
       [0, 0],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 0],
       [1, 1],
       [1, 0],
       [1, 1],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 1],
       [0, 1],
       [0, 1],
       [1, 1],
       [0, 0],
       [1, 1],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 1],
       [0, 1],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 0],
       [0, 0],
       [0, 0],
       [1, 1],
       [0, 0],
       [0, 1],
       [0, 0],
       [0, 0],
       [0, 0],
       [0, 0],
       [1, 1],
       [0, 1],
       [0, 0],
       [1, 1],
       [0, 1],
       [1, 0],
       [0, 1],
       [0, 0],
       [0, 0],
       [0, 0],
       [0, 1],
       [0, 0],
       [1, 1],
       [0, 1],
       [1, 0],
       [1, 0],
       [0, 1],
       [0, 0],
       [1, 1],
       [0, 0]])

In [13]:
plot_subfigure(X, Y, 1, "With unlabeled samples + CCA", "cca")
plot_subfigure(X, Y, 2, "With unlabeled samples + PCA", "pca")

In [14]:
X, Y = make_multilabel_classification(n_classes=2, n_labels=1,
                                      allow_unlabeled=False,
                                      return_indicator=True,
                                      random_state=1)

In [15]:
plot_subfigure(X, Y, 3, "Without unlabeled samples + CCA", "cca")
plot_subfigure(X, Y, 4, "Without unlabeled samples + PCA", "pca")


C:\Anaconda\lib\site-packages\sklearn\cross_decomposition\pls_.py:298: UserWarning: X scores are null at iteration 1
  warnings.warn('X scores are null at iteration %s' % k)
C:\Anaconda\lib\site-packages\IPython\kernel\__main__.py:4: RuntimeWarning: divide by zero encountered in double_scalars
C:\Anaconda\lib\site-packages\IPython\kernel\__main__.py:6: RuntimeWarning: divide by zero encountered in double_scalars
C:\Anaconda\lib\site-packages\IPython\kernel\__main__.py:6: RuntimeWarning: invalid value encountered in subtract
C:\Anaconda\lib\site-packages\matplotlib\axes\_base.py:2809: UserWarning: Attempting to set identical bottom==top results
in singular transformations; automatically expanding.
bottom=0.0, top=0.0
  'bottom=%s, top=%s') % (bottom, top))

In [17]:
plt.subplots_adjust(.04, .02, .97, .94, .09, .2)

In [18]:
plt.show()

In [ ]: