# KNN = (K) nearest neighbours

# (K) = K is the number of neighbours points we choose and it will 
# change the prediction of y

# KNN model trains a overfit model to track perfectly the data points

# KNN = Euclidian distance = distance in between the neighbours points

**# KNN MODEL EXAMPLE:**

from sklearn.neighbors import KNeighborsRegressor

kmodel = KNeighborsRegressor(n_neighbors = 4)
kmodel.fit(X_train, y_train)

predictions = kmodel.predict(X_test)
kmodel.score(X_test, y_test)

# writing a for loop to run all the possibilities to discover the 
# perfect K for the Model

scores = []

for i in range(2,20):
    model = KNeighborsRegressor(n_neighbors = i)
    model.fit(X_train, y_train)
    scores.append(model.score(X_test, y_test))

# visualising in a plot our accuracy changes as we introduce more K points
# elbow point is around 10.0

import matplotlib.pyplot as plt

plt.figure(figsize = (5,3))
plt.plot(range(2,20), scores)
plt.show()