In [2]:
import mglearn
import numpy as np
from sklearn import linear_model
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import Pipeline
from sklearn.neighbors import KNeighborsClassifier

# Generate sample data
X = np.sort(5*np.random.rand(40,1), axis=0)
y = X**3
y=y.ravel()

# Add noise to targets
X[::4] +=(0.5 - np.random.rand(1))
y[::5] +=(0.5 - np.random.rand(8))

a=np.array(X)
b=np.array(y)

X_train=a[:19]
X_test=a[19:]
y_train=b[:19]
y_test=b[19:]

model=Pipeline([('poly', PolynomialFeatures(degree=3)),('linear', LinearRegression(fit_intercept=False))])
model=model.fit(X_train, y_train)
pred=model.predict(X_test)


poly=PolynomialFeatures(degree=3)
poly.fit_transform(X_train, y_train)
plt.scatter(X_test, y_test)
plt.plot(X_test, pred, color='green')
plt.show()

print (model.score(X_test,y_test))

print ("---------K-Nearest Neighbors-------")
"""neighbors_settings=range(1,11)
for n_neighbors in neighbors_settings:
    clf=KNeighborsClassifier(n_neighbors=n_neighbors)
    clf.fit(X_train, y_train)
    training_accuracy.append(clf.score(X_train, y_train))
    test_accuracy.append(clf.score(X_test, y_test))


print (mglearn.plots.plot_knn_regression(n_neighbors=3))"""

from sklearn.neighbors import KNeighborsRegressor

X, y=mglearn.datasets.make_wave(n_samples=40)
reg = KNeighborsRegressor(n_neighbors=3)
reg.fit(X_train, y_train)


0.943743942225
---------K-Nearest Neighbors-------
Out[2]:
KNeighborsRegressor(algorithm='auto', leaf_size=30, metric='minkowski',
          metric_params=None, n_jobs=1, n_neighbors=3, p=2,
          weights='uniform')

In [ ]:


In [ ]: