In [1]:
import numpy as np
from sklearn import datasets
In [27]:
class KNNClassifier(object):
def __init__(self):
self.X_train = None
self.y_train = None
def euc_distance(self, a, b):
return np.linalg.norm(a-b)
def closest(self, row):
"""
Retorna a classe respondente ao ponto mais próximo do dataset de treino.\
É um exemplo de implementação do kNN com k=1.
"""
dists = [self.euc_distance(row, item) for item in self.X_train]
nei = dists.index(min(dists))
return self.y_train[nei]
def fit(self, training_data, training_labels):
self.X_train = training_data
self.y_train = training_labels
def predict(self, to_classify):
predictions = []
for row in to_classify:
label = self.closest(row)
predictions.append(label)
return predictions
In [2]:
iris = datasets.load_iris()
In [3]:
print(iris.target_names)
In [4]:
X = iris.data
y = iris.target
In [5]:
print(X)
print(y)
print(X.size)
In [32]:
from sklearn.model_selection import train_test_split
In [33]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
In [34]:
knn = KNNClassifier()
In [35]:
knn.fit(X_train, y_train)
In [36]:
result = knn.predict(X_test)
In [37]:
print(result)
In [38]:
print(y_test)
In [39]:
from sklearn import metrics
In [41]:
score = metrics.accuracy_score(y_pred=result, y_true=y_test)
In [43]:
print('{0:f}'.format(score))
In [44]:
from sklearn.neighbors import
In [45]:
knn1 = KNeighborsClassifier(n_neighbors=1)
In [46]:
knn1.fit(X_train, y_train)
Out[46]:
In [47]:
result1 = knn1.predict(X_test)
In [49]:
score1 = metrics.accuracy_score(result1, y_test)
In [51]:
print('{0:f}'.format(score1))
In [ ]: