|
| 1 | +import numpy as np |
| 2 | +# from scipy.spatial import distance |
| 3 | +from sklearn import datasets |
| 4 | +from sklearn.model_selection import train_test_split |
| 5 | +from sklearn.neighbors import KNeighborsClassifier |
| 6 | +from sklearn import metrics |
| 7 | + |
| 8 | + |
| 9 | +class KNNClassifier(object): |
| 10 | + def __init__(self): |
| 11 | + self.X_train = None |
| 12 | + self.y_train = None |
| 13 | + |
| 14 | + def euc_distance(self, a, b): |
| 15 | + return np.linalg.norm(a-b) |
| 16 | + # return distance.euclidean(a, b) |
| 17 | + |
| 18 | + def closest(self, row): |
| 19 | + """ |
| 20 | + Returns the label corresponding to the single closest training example. |
| 21 | + This is a k=1 nearest neighbor(s) implementation. |
| 22 | + :param row: |
| 23 | + :return: |
| 24 | + """ |
| 25 | + dist = [self.euc_distance(row, trainer) for trainer in self.X_train] |
| 26 | + best_index = dist.index(min(dist)) |
| 27 | + |
| 28 | + return self.y_train[best_index] |
| 29 | + |
| 30 | + def fit(self, training_data, training_labels): |
| 31 | + self.X_train = training_data |
| 32 | + self.y_train = training_labels |
| 33 | + |
| 34 | + def predict(self, to_classify): |
| 35 | + predictions = [] |
| 36 | + for row in to_classify: |
| 37 | + label = self.closest(row) |
| 38 | + predictions.append(label) |
| 39 | + |
| 40 | + return predictions |
| 41 | + |
| 42 | +iris = datasets.load_iris() |
| 43 | + |
| 44 | +X = iris.data |
| 45 | +y = iris.target |
| 46 | + |
| 47 | +X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25) |
| 48 | + |
| 49 | +classifier = KNeighborsClassifier() # k=5 by default |
| 50 | +# classifier = KNNClassifier() |
| 51 | +classifier.fit(X_train, y_train) |
| 52 | + |
| 53 | +# print(y_train) |
| 54 | + |
| 55 | +results = classifier.predict(X_test) |
| 56 | + |
| 57 | +score = metrics.accuracy_score(y_test, results) |
| 58 | + |
| 59 | +print("Accuracy: {0:f}".format(score)) |
0 commit comments