Last active
October 24, 2016 08:58
-
-
Save ErbaAitbayev/3f40ba8daebf53e381d81114027cfc41 to your computer and use it in GitHub Desktop.
Python KNN (k-nearest neighbors) implementation with graphical representation (using matplotlib) of dependency between K and accuracy rate. Different K values tested (from 1 to 25). For each K 100 accuracy rates recorded and plotted.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import csv | |
import random | |
import math | |
import operator | |
from time import time | |
def loadDataset(filename, divFactor, trainData=[] , testData=[]): | |
with open(filename, 'r') as csvfile: | |
lines = csv.reader(csvfile) | |
dataset = list(lines) | |
for i in range(len(dataset)-1): | |
for j in range(len(dataset[0])): | |
dataset[i][j] = float(dataset[i][j]) | |
if random.random() < divFactor: | |
trainData.append(dataset[i]) | |
else: | |
testData.append(dataset[i]) | |
def Distance(instance1, instance2, length): | |
distance = 0 | |
for i in range(length): | |
distance += pow((instance1[i] - instance2[i]), 2) | |
return math.sqrt(distance) | |
def kNearestNeighbors(trainData, test, k): | |
distances = [] | |
length = len(test)-1 | |
for i in range(len(trainData)): | |
currentDistance = Distance(test, trainData[i], length) | |
distances.append((trainData[i], currentDistance)) | |
distances.sort(key=operator.itemgetter(1)) | |
kNeighbors = [] | |
for i in range(k): | |
kNeighbors.append(distances[i][0]) | |
return kNeighbors | |
def Classify(neighbors): | |
classVotesDict = {} | |
for i in range(len(neighbors)): | |
someClass = neighbors[i][-1] | |
if someClass in classVotesDict: | |
classVotesDict[someClass] += 1 | |
else: | |
classVotesDict[someClass] = 1 | |
sortedCVDict = sorted(classVotesDict.items(), key=operator.itemgetter(1), reverse=True) | |
return sortedCVDict[0][0] | |
def Accuracy(testData, predictions): | |
hits = 0 | |
for i in range(len(testData)): | |
if testData[i][-1] == predictions[i]: | |
hits += 1 | |
return (hits/float(len(testData))) * 100.0 | |
def feature_normalize(X): | |
mean = 0.0 | |
std = 0.0 | |
total = 0.0 | |
columnlist = [] | |
for col in range(0, len(X[0])-1): | |
for row in range(0, len(X)): | |
total += X[row][col] | |
columnlist.append(X[row][col]) | |
mean = total/len(X) | |
#std = 0.0000000000001 + max(columnlist) - min(columnlist) | |
std = 0.001 + max(columnlist) - min(columnlist) | |
for row in range(0, len(X)): | |
X[row][col] = (X[row][col] - mean)/std | |
columnlist[:] = [] | |
mean = 0.0 | |
std = 0.0 | |
total = 0.0 | |
def feature_normalize2(X): | |
std = 0.0 | |
columnlist = [] | |
for col in range(0, len(X[0])-1): | |
for row in range(0, len(X)): | |
columnlist.append(X[row][col]) | |
#std = 0.0000000000001 + max(columnlist) - min(columnlist) | |
std = 0.0000001 + max(columnlist) - min(columnlist) | |
for row in range(0, len(X)): | |
X[row][col] = (X[row][col] - min(columnlist))/std | |
columnlist[:] = [] | |
std = 0.0 | |
trainData=[] | |
testData=[] | |
K = range(1, 27, 2) | |
divFactor = 0.8 | |
averageAccuracy = 0 | |
numberOfTests = 100 | |
accuracies = [] | |
averageAccuracies = [] | |
predictions=[] | |
times = [] | |
KsForPlot = [] | |
AccuraciesForPlot = [] | |
for x in range(len(K)): | |
for n in range(100): | |
loadDataset('knn.txt', divFactor, trainData, testData) | |
#feature_normalize2(trainData) | |
#feature_normalize2(testData) | |
t0 = time() | |
for i in range(len(testData)): | |
kNeighbors = kNearestNeighbors(trainData, testData[i], K[x]) | |
predict = Classify(kNeighbors) | |
predictions.append(predict) | |
times.append(round(time()-t0, 3)) | |
accuracy = Accuracy(testData, predictions) | |
accuracies.append(accuracy) | |
KsForPlot.append(K[x]) | |
AccuraciesForPlot.append(accuracy) | |
del trainData[:] | |
del testData[:] | |
del predictions[:] | |
averageTime = sum(times)/float(len(times)) | |
averageAccuracy = sum(accuracies)/float(len(accuracies)) | |
print('Average Time for K=' + repr(K[x]) + " after " + repr(numberOfTests) + " tests with different data split: " + repr(round(averageTime, 6)) + ' sec') | |
print('Average accuracy for K=' + repr(K[x]) + " after " + repr(numberOfTests) + " tests with different data split: " + repr(averageAccuracy) + ' %') | |
print("") | |
#print 'Accuracies for each of the ', repr(numberOfTests), " tests with K =",K[x], ":", accuracies | |
#print("#####################################################################################################################") | |
del accuracies[:] | |
del times[:] | |
print("") | |
print('Checking predictions with K = 3:') | |
K = 3 | |
loadDataset('knn.txt', divFactor, trainData, testData) | |
#feature_normalize2(trainData) | |
#feature_normalize2(testData) | |
for i in range(len(testData)): | |
kNeighbors = kNearestNeighbors(trainData, testData[i], K) | |
predict = Classify(kNeighbors) | |
predictions.append(predict) | |
print('Predicted class: ' + repr(predict) + ', Real class: ' + repr(testData[i][-1])) | |
accuracy = Accuracy(testData, predictions) | |
print('Accuracy: ' + repr(accuracy) + '%') | |
############################################################################################################ | |
#Visualization part: | |
############################################################################################################ | |
import matplotlib.pyplot as plt | |
import numpy as np | |
left, width = .35, 0.5 | |
bottom, height = .27, .7 | |
right = left + width | |
top = bottom + height | |
Ks = np.array(KsForPlot) | |
Accs = np.array(AccuraciesForPlot) | |
#Data and prediction line | |
fig, ax = plt.subplots(figsize=(15,10)) | |
#ax.plot(Ks, Accs, 'b', label='Prediction') | |
ax.scatter(Ks, Accs, label='Traning Data', color='r') | |
#ax.scatter(Ks, Accs, color='r') | |
ax.legend(loc=3) | |
ax.set_xlabel('K (from 1 to 25)') | |
ax.set_ylabel('Accuracy') | |
ax.set_title('Accuracies against K used. For each K 100 accuracies plotted') | |
#ax.text(right, top, 'Accuracies against K used. For each K 100 accuracies plotted', size=18,horizontalalignment='right', verticalalignment='top', transform=ax.transAxes) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
1,1.52101,13.64,4.49,1.10,71.78,0.06,8.75,0.00,0.00,1 | |
2,1.51761,13.89,3.60,1.36,72.73,0.48,7.83,0.00,0.00,1 | |
3,1.51618,13.53,3.55,1.54,72.99,0.39,7.78,0.00,0.00,1 | |
4,1.51766,13.21,3.69,1.29,72.61,0.57,8.22,0.00,0.00,1 | |
5,1.51742,13.27,3.62,1.24,73.08,0.55,8.07,0.00,0.00,1 | |
6,1.51596,12.79,3.61,1.62,72.97,0.64,8.07,0.00,0.26,1 | |
7,1.51743,13.30,3.60,1.14,73.09,0.58,8.17,0.00,0.00,1 | |
8,1.51756,13.15,3.61,1.05,73.24,0.57,8.24,0.00,0.00,1 | |
9,1.51918,14.04,3.58,1.37,72.08,0.56,8.30,0.00,0.00,1 | |
10,1.51755,13.00,3.60,1.36,72.99,0.57,8.40,0.00,0.11,1 | |
11,1.51571,12.72,3.46,1.56,73.20,0.67,8.09,0.00,0.24,1 | |
12,1.51763,12.80,3.66,1.27,73.01,0.60,8.56,0.00,0.00,1 | |
13,1.51589,12.88,3.43,1.40,73.28,0.69,8.05,0.00,0.24,1 | |
14,1.51748,12.86,3.56,1.27,73.21,0.54,8.38,0.00,0.17,1 | |
15,1.51763,12.61,3.59,1.31,73.29,0.58,8.50,0.00,0.00,1 | |
16,1.51761,12.81,3.54,1.23,73.24,0.58,8.39,0.00,0.00,1 | |
17,1.51784,12.68,3.67,1.16,73.11,0.61,8.70,0.00,0.00,1 | |
18,1.52196,14.36,3.85,0.89,71.36,0.15,9.15,0.00,0.00,1 | |
19,1.51911,13.90,3.73,1.18,72.12,0.06,8.89,0.00,0.00,1 | |
20,1.51735,13.02,3.54,1.69,72.73,0.54,8.44,0.00,0.07,1 | |
21,1.51750,12.82,3.55,1.49,72.75,0.54,8.52,0.00,0.19,1 | |
22,1.51966,14.77,3.75,0.29,72.02,0.03,9.00,0.00,0.00,1 | |
23,1.51736,12.78,3.62,1.29,72.79,0.59,8.70,0.00,0.00,1 | |
24,1.51751,12.81,3.57,1.35,73.02,0.62,8.59,0.00,0.00,1 | |
25,1.51720,13.38,3.50,1.15,72.85,0.50,8.43,0.00,0.00,1 | |
26,1.51764,12.98,3.54,1.21,73.00,0.65,8.53,0.00,0.00,1 | |
27,1.51793,13.21,3.48,1.41,72.64,0.59,8.43,0.00,0.00,1 | |
28,1.51721,12.87,3.48,1.33,73.04,0.56,8.43,0.00,0.00,1 | |
29,1.51768,12.56,3.52,1.43,73.15,0.57,8.54,0.00,0.00,1 | |
30,1.51784,13.08,3.49,1.28,72.86,0.60,8.49,0.00,0.00,1 | |
31,1.51768,12.65,3.56,1.30,73.08,0.61,8.69,0.00,0.14,1 | |
32,1.51747,12.84,3.50,1.14,73.27,0.56,8.55,0.00,0.00,1 | |
33,1.51775,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,1 | |
34,1.51753,12.57,3.47,1.38,73.39,0.60,8.55,0.00,0.06,1 | |
35,1.51783,12.69,3.54,1.34,72.95,0.57,8.75,0.00,0.00,1 | |
36,1.51567,13.29,3.45,1.21,72.74,0.56,8.57,0.00,0.00,1 | |
37,1.51909,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0.00,1 | |
38,1.51797,12.74,3.48,1.35,72.96,0.64,8.68,0.00,0.00,1 | |
39,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 | |
40,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 | |
41,1.51793,12.79,3.50,1.12,73.03,0.64,8.77,0.00,0.00,1 | |
42,1.51755,12.71,3.42,1.20,73.20,0.59,8.64,0.00,0.00,1 | |
43,1.51779,13.21,3.39,1.33,72.76,0.59,8.59,0.00,0.00,1 | |
44,1.52210,13.73,3.84,0.72,71.76,0.17,9.74,0.00,0.00,1 | |
45,1.51786,12.73,3.43,1.19,72.95,0.62,8.76,0.00,0.30,1 | |
46,1.51900,13.49,3.48,1.35,71.95,0.55,9.00,0.00,0.00,1 | |
47,1.51869,13.19,3.37,1.18,72.72,0.57,8.83,0.00,0.16,1 | |
48,1.52667,13.99,3.70,0.71,71.57,0.02,9.82,0.00,0.10,1 | |
49,1.52223,13.21,3.77,0.79,71.99,0.13,10.02,0.00,0.00,1 | |
50,1.51898,13.58,3.35,1.23,72.08,0.59,8.91,0.00,0.00,1 | |
51,1.52320,13.72,3.72,0.51,71.75,0.09,10.06,0.00,0.16,1 | |
52,1.51926,13.20,3.33,1.28,72.36,0.60,9.14,0.00,0.11,1 | |
53,1.51808,13.43,2.87,1.19,72.84,0.55,9.03,0.00,0.00,1 | |
54,1.51837,13.14,2.84,1.28,72.85,0.55,9.07,0.00,0.00,1 | |
55,1.51778,13.21,2.81,1.29,72.98,0.51,9.02,0.00,0.09,1 | |
56,1.51769,12.45,2.71,1.29,73.70,0.56,9.06,0.00,0.24,1 | |
57,1.51215,12.99,3.47,1.12,72.98,0.62,8.35,0.00,0.31,1 | |
58,1.51824,12.87,3.48,1.29,72.95,0.60,8.43,0.00,0.00,1 | |
59,1.51754,13.48,3.74,1.17,72.99,0.59,8.03,0.00,0.00,1 | |
60,1.51754,13.39,3.66,1.19,72.79,0.57,8.27,0.00,0.11,1 | |
61,1.51905,13.60,3.62,1.11,72.64,0.14,8.76,0.00,0.00,1 | |
62,1.51977,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0.00,1 | |
63,1.52172,13.51,3.86,0.88,71.79,0.23,9.54,0.00,0.11,1 | |
64,1.52227,14.17,3.81,0.78,71.35,0.00,9.69,0.00,0.00,1 | |
65,1.52172,13.48,3.74,0.90,72.01,0.18,9.61,0.00,0.07,1 | |
66,1.52099,13.69,3.59,1.12,71.96,0.09,9.40,0.00,0.00,1 | |
67,1.52152,13.05,3.65,0.87,72.22,0.19,9.85,0.00,0.17,1 | |
68,1.52152,13.05,3.65,0.87,72.32,0.19,9.85,0.00,0.17,1 | |
69,1.52152,13.12,3.58,0.90,72.20,0.23,9.82,0.00,0.16,1 | |
70,1.52300,13.31,3.58,0.82,71.99,0.12,10.17,0.00,0.03,1 | |
71,1.51574,14.86,3.67,1.74,71.87,0.16,7.36,0.00,0.12,2 | |
72,1.51848,13.64,3.87,1.27,71.96,0.54,8.32,0.00,0.32,2 | |
73,1.51593,13.09,3.59,1.52,73.10,0.67,7.83,0.00,0.00,2 | |
74,1.51631,13.34,3.57,1.57,72.87,0.61,7.89,0.00,0.00,2 | |
75,1.51596,13.02,3.56,1.54,73.11,0.72,7.90,0.00,0.00,2 | |
76,1.51590,13.02,3.58,1.51,73.12,0.69,7.96,0.00,0.00,2 | |
77,1.51645,13.44,3.61,1.54,72.39,0.66,8.03,0.00,0.00,2 | |
78,1.51627,13.00,3.58,1.54,72.83,0.61,8.04,0.00,0.00,2 | |
79,1.51613,13.92,3.52,1.25,72.88,0.37,7.94,0.00,0.14,2 | |
80,1.51590,12.82,3.52,1.90,72.86,0.69,7.97,0.00,0.00,2 | |
81,1.51592,12.86,3.52,2.12,72.66,0.69,7.97,0.00,0.00,2 | |
82,1.51593,13.25,3.45,1.43,73.17,0.61,7.86,0.00,0.00,2 | |
83,1.51646,13.41,3.55,1.25,72.81,0.68,8.10,0.00,0.00,2 | |
84,1.51594,13.09,3.52,1.55,72.87,0.68,8.05,0.00,0.09,2 | |
85,1.51409,14.25,3.09,2.08,72.28,1.10,7.08,0.00,0.00,2 | |
86,1.51625,13.36,3.58,1.49,72.72,0.45,8.21,0.00,0.00,2 | |
87,1.51569,13.24,3.49,1.47,73.25,0.38,8.03,0.00,0.00,2 | |
88,1.51645,13.40,3.49,1.52,72.65,0.67,8.08,0.00,0.10,2 | |
89,1.51618,13.01,3.50,1.48,72.89,0.60,8.12,0.00,0.00,2 | |
90,1.51640,12.55,3.48,1.87,73.23,0.63,8.08,0.00,0.09,2 | |
91,1.51841,12.93,3.74,1.11,72.28,0.64,8.96,0.00,0.22,2 | |
92,1.51605,12.90,3.44,1.45,73.06,0.44,8.27,0.00,0.00,2 | |
93,1.51588,13.12,3.41,1.58,73.26,0.07,8.39,0.00,0.19,2 | |
94,1.51590,13.24,3.34,1.47,73.10,0.39,8.22,0.00,0.00,2 | |
95,1.51629,12.71,3.33,1.49,73.28,0.67,8.24,0.00,0.00,2 | |
96,1.51860,13.36,3.43,1.43,72.26,0.51,8.60,0.00,0.00,2 | |
97,1.51841,13.02,3.62,1.06,72.34,0.64,9.13,0.00,0.15,2 | |
98,1.51743,12.20,3.25,1.16,73.55,0.62,8.90,0.00,0.24,2 | |
99,1.51689,12.67,2.88,1.71,73.21,0.73,8.54,0.00,0.00,2 | |
100,1.51811,12.96,2.96,1.43,72.92,0.60,8.79,0.14,0.00,2 | |
101,1.51655,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,2 | |
102,1.51730,12.35,2.72,1.63,72.87,0.70,9.23,0.00,0.00,2 | |
103,1.51820,12.62,2.76,0.83,73.81,0.35,9.42,0.00,0.20,2 | |
104,1.52725,13.80,3.15,0.66,70.57,0.08,11.64,0.00,0.00,2 | |
105,1.52410,13.83,2.90,1.17,71.15,0.08,10.79,0.00,0.00,2 | |
106,1.52475,11.45,0.00,1.88,72.19,0.81,13.24,0.00,0.34,2 | |
107,1.53125,10.73,0.00,2.10,69.81,0.58,13.30,3.15,0.28,2 | |
108,1.53393,12.30,0.00,1.00,70.16,0.12,16.19,0.00,0.24,2 | |
109,1.52222,14.43,0.00,1.00,72.67,0.10,11.52,0.00,0.08,2 | |
110,1.51818,13.72,0.00,0.56,74.45,0.00,10.99,0.00,0.00,2 | |
111,1.52664,11.23,0.00,0.77,73.21,0.00,14.68,0.00,0.00,2 | |
112,1.52739,11.02,0.00,0.75,73.08,0.00,14.96,0.00,0.00,2 | |
113,1.52777,12.64,0.00,0.67,72.02,0.06,14.40,0.00,0.00,2 | |
114,1.51892,13.46,3.83,1.26,72.55,0.57,8.21,0.00,0.14,2 | |
115,1.51847,13.10,3.97,1.19,72.44,0.60,8.43,0.00,0.00,2 | |
116,1.51846,13.41,3.89,1.33,72.38,0.51,8.28,0.00,0.00,2 | |
117,1.51829,13.24,3.90,1.41,72.33,0.55,8.31,0.00,0.10,2 | |
118,1.51708,13.72,3.68,1.81,72.06,0.64,7.88,0.00,0.00,2 | |
119,1.51673,13.30,3.64,1.53,72.53,0.65,8.03,0.00,0.29,2 | |
120,1.51652,13.56,3.57,1.47,72.45,0.64,7.96,0.00,0.00,2 | |
121,1.51844,13.25,3.76,1.32,72.40,0.58,8.42,0.00,0.00,2 | |
122,1.51663,12.93,3.54,1.62,72.96,0.64,8.03,0.00,0.21,2 | |
123,1.51687,13.23,3.54,1.48,72.84,0.56,8.10,0.00,0.00,2 | |
124,1.51707,13.48,3.48,1.71,72.52,0.62,7.99,0.00,0.00,2 | |
125,1.52177,13.20,3.68,1.15,72.75,0.54,8.52,0.00,0.00,2 | |
126,1.51872,12.93,3.66,1.56,72.51,0.58,8.55,0.00,0.12,2 | |
127,1.51667,12.94,3.61,1.26,72.75,0.56,8.60,0.00,0.00,2 | |
128,1.52081,13.78,2.28,1.43,71.99,0.49,9.85,0.00,0.17,2 | |
129,1.52068,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,2 | |
130,1.52020,13.98,1.35,1.63,71.76,0.39,10.56,0.00,0.18,2 | |
131,1.52177,13.75,1.01,1.36,72.19,0.33,11.14,0.00,0.00,2 | |
132,1.52614,13.70,0.00,1.36,71.24,0.19,13.44,0.00,0.10,2 | |
133,1.51813,13.43,3.98,1.18,72.49,0.58,8.15,0.00,0.00,2 | |
134,1.51800,13.71,3.93,1.54,71.81,0.54,8.21,0.00,0.15,2 | |
135,1.51811,13.33,3.85,1.25,72.78,0.52,8.12,0.00,0.00,2 | |
136,1.51789,13.19,3.90,1.30,72.33,0.55,8.44,0.00,0.28,2 | |
137,1.51806,13.00,3.80,1.08,73.07,0.56,8.38,0.00,0.12,2 | |
138,1.51711,12.89,3.62,1.57,72.96,0.61,8.11,0.00,0.00,2 | |
139,1.51674,12.79,3.52,1.54,73.36,0.66,7.90,0.00,0.00,2 | |
140,1.51674,12.87,3.56,1.64,73.14,0.65,7.99,0.00,0.00,2 | |
141,1.51690,13.33,3.54,1.61,72.54,0.68,8.11,0.00,0.00,2 | |
142,1.51851,13.20,3.63,1.07,72.83,0.57,8.41,0.09,0.17,2 | |
143,1.51662,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,2 | |
144,1.51709,13.00,3.47,1.79,72.72,0.66,8.18,0.00,0.00,2 | |
145,1.51660,12.99,3.18,1.23,72.97,0.58,8.81,0.00,0.24,2 | |
146,1.51839,12.85,3.67,1.24,72.57,0.62,8.68,0.00,0.35,2 | |
147,1.51769,13.65,3.66,1.11,72.77,0.11,8.60,0.00,0.00,3 | |
148,1.51610,13.33,3.53,1.34,72.67,0.56,8.33,0.00,0.00,3 | |
149,1.51670,13.24,3.57,1.38,72.70,0.56,8.44,0.00,0.10,3 | |
150,1.51643,12.16,3.52,1.35,72.89,0.57,8.53,0.00,0.00,3 | |
151,1.51665,13.14,3.45,1.76,72.48,0.60,8.38,0.00,0.17,3 | |
152,1.52127,14.32,3.90,0.83,71.50,0.00,9.49,0.00,0.00,3 | |
153,1.51779,13.64,3.65,0.65,73.00,0.06,8.93,0.00,0.00,3 | |
154,1.51610,13.42,3.40,1.22,72.69,0.59,8.32,0.00,0.00,3 | |
155,1.51694,12.86,3.58,1.31,72.61,0.61,8.79,0.00,0.00,3 | |
156,1.51646,13.04,3.40,1.26,73.01,0.52,8.58,0.00,0.00,3 | |
157,1.51655,13.41,3.39,1.28,72.64,0.52,8.65,0.00,0.00,3 | |
158,1.52121,14.03,3.76,0.58,71.79,0.11,9.65,0.00,0.00,3 | |
159,1.51776,13.53,3.41,1.52,72.04,0.58,8.79,0.00,0.00,3 | |
160,1.51796,13.50,3.36,1.63,71.94,0.57,8.81,0.00,0.09,3 | |
161,1.51832,13.33,3.34,1.54,72.14,0.56,8.99,0.00,0.00,3 | |
162,1.51934,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,3 | |
163,1.52211,14.19,3.78,0.91,71.36,0.23,9.14,0.00,0.37,3 | |
164,1.51514,14.01,2.68,3.50,69.89,1.68,5.87,2.20,0.00,5 | |
165,1.51915,12.73,1.85,1.86,72.69,0.60,10.09,0.00,0.00,5 | |
166,1.52171,11.56,1.88,1.56,72.86,0.47,11.41,0.00,0.00,5 | |
167,1.52151,11.03,1.71,1.56,73.44,0.58,11.62,0.00,0.00,5 | |
168,1.51969,12.64,0.00,1.65,73.75,0.38,11.53,0.00,0.00,5 | |
169,1.51666,12.86,0.00,1.83,73.88,0.97,10.17,0.00,0.00,5 | |
170,1.51994,13.27,0.00,1.76,73.03,0.47,11.32,0.00,0.00,5 | |
171,1.52369,13.44,0.00,1.58,72.22,0.32,12.24,0.00,0.00,5 | |
172,1.51316,13.02,0.00,3.04,70.48,6.21,6.96,0.00,0.00,5 | |
173,1.51321,13.00,0.00,3.02,70.70,6.21,6.93,0.00,0.00,5 | |
174,1.52043,13.38,0.00,1.40,72.25,0.33,12.50,0.00,0.00,5 | |
175,1.52058,12.85,1.61,2.17,72.18,0.76,9.70,0.24,0.51,5 | |
176,1.52119,12.97,0.33,1.51,73.39,0.13,11.27,0.00,0.28,5 | |
177,1.51905,14.00,2.39,1.56,72.37,0.00,9.57,0.00,0.00,6 | |
178,1.51937,13.79,2.41,1.19,72.76,0.00,9.77,0.00,0.00,6 | |
179,1.51829,14.46,2.24,1.62,72.38,0.00,9.26,0.00,0.00,6 | |
180,1.51852,14.09,2.19,1.66,72.67,0.00,9.32,0.00,0.00,6 | |
181,1.51299,14.40,1.74,1.54,74.55,0.00,7.59,0.00,0.00,6 | |
182,1.51888,14.99,0.78,1.74,72.50,0.00,9.95,0.00,0.00,6 | |
183,1.51916,14.15,0.00,2.09,72.74,0.00,10.88,0.00,0.00,6 | |
184,1.51969,14.56,0.00,0.56,73.48,0.00,11.22,0.00,0.00,6 | |
185,1.51115,17.38,0.00,0.34,75.41,0.00,6.65,0.00,0.00,6 | |
186,1.51131,13.69,3.20,1.81,72.81,1.76,5.43,1.19,0.00,7 | |
187,1.51838,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0.00,7 | |
188,1.52315,13.44,3.34,1.23,72.38,0.60,8.83,0.00,0.00,7 | |
189,1.52247,14.86,2.20,2.06,70.26,0.76,9.76,0.00,0.00,7 | |
190,1.52365,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0.00,7 | |
191,1.51613,13.88,1.78,1.79,73.10,0.00,8.67,0.76,0.00,7 | |
192,1.51602,14.85,0.00,2.38,73.28,0.00,8.76,0.64,0.09,7 | |
193,1.51623,14.20,0.00,2.79,73.46,0.04,9.04,0.40,0.09,7 | |
194,1.51719,14.75,0.00,2.00,73.02,0.00,8.53,1.59,0.08,7 | |
195,1.51683,14.56,0.00,1.98,73.29,0.00,8.52,1.57,0.07,7 | |
196,1.51545,14.14,0.00,2.68,73.39,0.08,9.07,0.61,0.05,7 | |
197,1.51556,13.87,0.00,2.54,73.23,0.14,9.41,0.81,0.01,7 | |
198,1.51727,14.70,0.00,2.34,73.28,0.00,8.95,0.66,0.00,7 | |
199,1.51531,14.38,0.00,2.66,73.10,0.04,9.08,0.64,0.00,7 | |
200,1.51609,15.01,0.00,2.51,73.05,0.05,8.83,0.53,0.00,7 | |
201,1.51508,15.15,0.00,2.25,73.50,0.00,8.34,0.63,0.00,7 | |
202,1.51653,11.95,0.00,1.19,75.18,2.70,8.93,0.00,0.00,7 | |
203,1.51514,14.85,0.00,2.42,73.72,0.00,8.39,0.56,0.00,7 | |
204,1.51658,14.80,0.00,1.99,73.11,0.00,8.28,1.71,0.00,7 | |
205,1.51617,14.95,0.00,2.27,73.30,0.00,8.71,0.67,0.00,7 | |
206,1.51732,14.95,0.00,1.80,72.99,0.00,8.61,1.55,0.00,7 | |
207,1.51645,14.94,0.00,1.87,73.11,0.00,8.67,1.38,0.00,7 | |
208,1.51831,14.39,0.00,1.82,72.86,1.41,6.47,2.88,0.00,7 | |
209,1.51640,14.37,0.00,2.74,72.85,0.00,9.45,0.54,0.00,7 | |
210,1.51623,14.14,0.00,2.88,72.61,0.08,9.18,1.06,0.00,7 | |
211,1.51685,14.92,0.00,1.99,73.06,0.00,8.40,1.59,0.00,7 | |
212,1.52065,14.36,0.00,2.02,73.42,0.00,8.44,1.64,0.00,7 | |
213,1.51651,14.38,0.00,1.94,73.61,0.00,8.48,1.57,0.00,7 | |
214,1.51711,14.23,0.00,2.08,73.36,0.00,8.62,1.67,0.00,7 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment