Skip to content

Instantly share code, notes, and snippets.

@KalimAmzad
Created March 16, 2023 05:37
Show Gist options
  • Select an option

  • Save KalimAmzad/f9bfaaa6d8e8207bc91b67223e1c121d to your computer and use it in GitHub Desktop.

Select an option

Save KalimAmzad/f9bfaaa6d8e8207bc91b67223e1c121d to your computer and use it in GitHub Desktop.
ROC & AOC
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_curve, auc
from sklearn.datasets import make_classification
from sklearn.linear_model import LogisticRegression
# Generate a synthetic dataset for classification
X, y = make_classification(n_samples=1000, n_classes=2, n_features=20, random_state=42)
# Split the dataset into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Train a classifier (e.g., Logistic Regression) on the training set
classifier = LogisticRegression(random_state=42)
classifier.fit(X_train, y_train)
# Get predicted probabilities for the positive class from the classifier
y_score = classifier.predict_proba(X_test)[:, 1]
# Calculate the ROC curve and AUC score
fpr, tpr, thresholds = roc_curve(y_test, y_score)
roc_auc = auc(fpr, tpr)
# Visualize the ROC curve and display the AUC score
plt.figure()
plt.plot(fpr, tpr, color='darkorange', lw=2, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver Operating Characteristic (ROC)')
plt.legend(loc="lower right")
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment