Skip to content

Instantly share code, notes, and snippets.

@debonx
Last active December 19, 2018 10:53
Show Gist options
  • Save debonx/1d12924a90674519a46b86db86eb78be to your computer and use it in GitHub Desktop.
Save debonx/1d12924a90674519a46b86db86eb78be to your computer and use it in GitHub Desktop.
Example of Supervised Machine Learning with Support Vector Classifier from sklearn.
from sklearn.svm import SVC
from graph import points, labels
# Create SVC classifier (linear decision boundary)
classifier = SVC(kernel = 'linear')
# Training with points and labels
classifier.fit(points, labels)
# Print the prediction
print(classifier.predict([[3,4],[6,7]]))
import codecademylib3_seaborn
from sklearn.svm import SVC
from graph import points, labels
from sklearn.model_selection import train_test_split
training_data, validation_data, training_labels, validation_labels = train_test_split(points, labels, train_size = 0.8, test_size = 0.2, random_state = 100)
# Create SVC classifier (non linear decision boundary)
classifier = SVC(kernel = 'poly', degree = 2)
classifier.fit(training_data, training_labels)
# Always important to validate the model withh thhe .score() method
print(classifier.score(validation_data, validation_labels))
from data import points, labels
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
training_data, validation_data, training_labels, validation_labels = train_test_split(points, labels, train_size = 0.8, test_size = 0.2, random_state = 100)
# Create SVC classifier (Radial Bias Function)
# An rbf kernel has a gamma parameter. If gamma is large, the training data is more relevant, and as a result overfitting can occur.
classifier = SVC(kernel = "rbf", gamma = 0.1)
classifier.fit(training_data, training_labels)
# Test the score
print(classifier.score(validation_data, validation_labels))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment