In [1]:
# (p. 184) Ensemble Learning
# 1) generate data
%matplotlib inline
import matplotlib
import matplotlib.pyplot as plt

from sklearn.datasets import make_moons

X, y = make_moons(n_samples=5000, noise=0.2)

# generate test and training sets
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test= train_test_split(X, y, test_size=0.2)

# visualize data

plt.scatter(X[:,0], X[:,1], s=5, c=y)
plt.title("Moons Dataset")


Out[1]:
<matplotlib.text.Text at 0x7f5467d9e5f8>

In [2]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC

log_clf = LogisticRegression()
rnd_clf = RandomForestClassifier()
svm_clf = SVC()

voting_clf = VotingClassifier(
    estimators = [
        ('lr', log_clf),
        ('rf', rnd_clf),
        ('svc', svm_clf)
    ],
    voting = 'hard'
)
voting_clf.fit(X, y)

from sklearn.metrics import accuracy_score
for clf in (log_clf, rnd_clf, svm_clf, voting_clf):
    clf.fit(X_train, y_train)
    y_pred = clf.predict(X_test)
    acc = accuracy_score(y_test, y_pred)
    print(clf.__class__.__name__, acc)


LogisticRegression 0.855
RandomForestClassifier 0.954
SVC 0.958
VotingClassifier 0.956

In [3]:
# (p. 186) Bagging and pasting
from sklearn.ensemble import BaggingClassifier
from sklearn.tree import DecisionTreeClassifier

bag_clf = BaggingClassifier(
    DecisionTreeClassifier(), n_estimators=500,
    max_samples=100, bootstrap=True, n_jobs=-1,
    oob_score=True
)
bag_clf.fit(X, y)
bag_clf.oob_score_


Out[3]:
0.95640000000000003

In [4]:
y_pred = bag_clf.predict(X_test)
accuracy_score(y_test, y_pred)


Out[4]:
0.95199999999999996

In [5]:
# (p. 189) RandomForestClassifier
from sklearn.ensemble import RandomForestClassifier
rnd_clf = RandomForestClassifier(n_estimators=500, max_leaf_nodes=16, n_jobs=-1)
rnd_clf.fit(X, y)
y_pred_rf = rnd_clf.predict(X_test)
accuracy_score(y_test, y_pred_rf)


Out[5]:
0.95899999999999996

In [ ]: