Skip to content

Instantly share code, notes, and snippets.

@slitayem
Last active July 15, 2020 08:25
Show Gist options
  • Save slitayem/f49ccb137c1c53c89bce904edfdc6543 to your computer and use it in GitHub Desktop.
Save slitayem/f49ccb137c1c53c89bce904edfdc6543 to your computer and use it in GitHub Desktop.
Plot annotated confusion matrix for binary classifier
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
import numpy as np
def plot_confusion_matrix(cf_matrix, target_names=None):
"""Plot annotated confusion matrix with absolute values and percentages
Args:
cf_matrix: ndarray of shape (2, 2) Confusion Matrix
"""
group_names = ['TN', 'FP', 'FN', 'TP']
group_counts = ["{0:.0f}".format(value) for value in
cf_matrix.flatten()]
group_percentages = ["{0:.2%}".format(value) for value in
cf_matrix.flatten()/np.sum(cf_matrix)]
labels = [f"{v1}\n{v2}\n{v3}" for v1, v2, v3 in
zip(group_names,
group_counts,
group_percentages)
]
labels = np.asarray(labels).reshape(2, 2)
plt.figure(figsize = (8, 6))
sns.heatmap(cf_matrix, annot=labels, fmt='', cmap='Blues', annot_kws={"size": 14})
if target_names:
tick_marks = range(len(target_names))
plt.xticks(tick_marks, target_names)
plt.yticks(tick_marks, target_names)
precision = cf_matrix[1, 1] / sum(cf_matrix[:, 1])
recall = cf_matrix[1, 1] / sum(cf_matrix[1,:])
accuracy = np.trace(cf_matrix) / float(np.sum(cf_matrix))
f1_score = 2 * precision * recall / (precision + recall)
stats_text = "Precision.={:0.3f}\nRecall{:0.3f}\n\nAccuracy={:0.3f}\nF1 Score={:0.3f}".format(
precision, recall, accuracy, f1_score)
plt.xlabel('Predicted label {}'.format(stats_text))
plt.ylabel("True Label")
plt.show()
conf_matrix = confusion_matrix(y_label, pred_label)
plot_confusion_matrix(conf_matrix, target_names=['c1', 'c2'])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment