Skip to content

Instantly share code, notes, and snippets.

@prerakmody
Last active October 18, 2023 06:27
Show Gist options
  • Save prerakmody/3d1c2577a31f0f63814b974f058a3521 to your computer and use it in GitHub Desktop.
Save prerakmody/3d1c2577a31f0f63814b974f058a3521 to your computer and use it in GitHub Desktop.
import os
import numpy as np
from sklearn.metrics import log_loss
"""
- HARDCODED FORMULAE
- In this gist, we shall almost always use probabilities and not unscaled logits
"""
## BINARY CLASS CROSS ENTROPY
y_true = [0,0,0,1] # 4 observations
y_pred = [0.1, 0.1, 0.1, 0.7]
loss = 0.0
for true, pred in zip(y_true, y_pred):
loss += (true * np.log(pred) + (1.0-true) * np.log(1.0 - pred))
print ('\nHarcoded : Binary Class Loss :', -1 * loss / len(y_true)) #0.168189122728
## MULTI-CLASS / CATEGORICAL CROSS ENTROPY ERROR
y_true = [4] # 1 observation
y_pred = [[0.1, 0.1, 0.1, 0.7]]
loss = 0.0
for true, pred in zip(y_true, y_pred):
loss += np.log(pred[true-1])
print ('Hardcoded : Multi Class Loss: ', -1 * loss / len(y_true)) #0.356674943939
"""
LIBRARY 1
- scikit-learn : http://scikit-learn.org/stable/modules/generated/sklearn.metrics.log_loss.html
- Here we only have one `log_loss()` function (both binary and categorical/multi-class cross entropy)
"""
## BINARY CLASS CROSS ENTROPY
y_true = [0,0,0,1] #4 observations
y_pred = [0.1, 0.1, 0.1, 0.7]
loss = log_loss(y_true, y_pred, labels = [0,1])
print ('\nScikit : Binary Class Loss : ', loss) # Loss = 0.1681891227280528
## MULTI-CLASS / CATEGORICAL CROSS ENTROPY
y_true = [4] #1 observation
y_pred = [[0.1, 0.1, 0.1, 0.7]]
loss = log_loss(y_true, y_pred, labels = [1,2,3,4])
print ('Scikit : Multi Class Loss : ', loss) # Loss = 0.356674943939
try:
y_true = [[0,0,0,1]] #one-hot encoded (does not work for multi-class cross entropy)
y_pred = [[0.1, 0.1, 0.1, 0.7]]
loss = log_loss(y_true, y_pred, labels = [1,2,3,4])
print ('scikit : Multi Class Loss : ', loss) # Erroneous way to use formula
except:
pass
"""
LIBRARY 2
- tensorflow
"""
## There is no specific function for multiclass / categorical cross entropy. We write this operation ourselves
## BINARY CROSS ENTROPY ERROR
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
with tf.device('/cpu:0'):
y_true = tf.constant([[0,0,0,1]]) #one-hot encoding
y_pred = tf.constant([[0.1, 0.1, 0.1, 0.7]])
loss = tf.losses.log_loss(labels = y_true, predictions =y_pred)
with tf.Session() as sess:
loss = sess.run([loss])
print ('Tensorflow : Binary Class Loss : ', loss, '\n') # [0.16818902]
## MULTI-CLASS / CATEGORICAL CROSS ENTROPY ERROR
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
with tf.device('/cpu:0'):
y_true = tf.constant([[0,0,0,1]], dtype=tf.float32) #one-hot encoding
y_pred = tf.constant([[0.1, 0.1, 0.1, 0.7]], dtype=tf.float32)
loss = -tf.reduce_sum(y_true * tf.log(y_pred), 1)
with tf.Session() as sess:
loss = sess.run([loss])
print ('\nTensorflow : Multi Class Loss : ', loss) # [array([ 0.35667497], dtype=float32)]
"""
LIBRARY 3
- Keras
"""
## BINARY CROSS ENTROPY ERROR
import tensorflow as tf
import keras.backend as K
with tf.device('/cpu:0'):
y_true = tf.constant([[0,0,0,1]], dtype=tf.float32) #one-hot encoding
y_pred = tf.constant([[0.1, 0.1, 0.1, 0.7]], dtype=tf.float32)
loss = K.binary_crossentropy(target=y_true, output=y_pred)
with tf.Session() as sess:
loss = sess.run([loss])
print ('keras : Binary Class Loss : ', loss, sum(loss[0][0])/int(y_true.shape[1])) # [array([[ 0.10536051, 0.10536051, 0.10536051, 0.35667494]], dtype=float32)] sum(a)/4 = 0.16818911749999998
## MULTI-CLASS / CATEGORICAL CROSS ENTROPY ERROR
import tensorflow as tf
import keras.backend as K
with tf.device('/cpu:0'):
y_true = tf.constant([[0,0,0,1]], dtype=tf.float32) #one-hot encoding
y_pred = tf.constant([[0.1, 0.1, 0.1, 0.7]], dtype=tf.float32)
loss = K.categorical_crossentropy(target=y_true, output=y_pred)
with tf.Session() as sess:
loss = sess.run([loss])
print ('Keras : Multi Class Loss : ', loss) # [array([ 0.35667497], dtype=float32)]
"""
Other Examples
"""
## Other Examples
## - tf.nn.sparse_softmax_cross_entropy_with_logits() --> y_true does not have to be one-hot encoded
## - tf.nn.sigmoid_cross_entropy_with_logits(labels = y_true, logits =y_pred)
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
print ('\n')
for i in range(4):
y_true = tf.constant([i], dtype=tf.int32)
y_pred = tf.constant([[1, 10, 20, 24]], dtype=tf.float32)
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y_true, logits=y_pred)
with tf.Session() as sess:
loss = sess.run([loss])
print (' - sparse_softmax_cross_entropy_with_logits : ', loss)
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
print ('\n')
for i in range(4):
y_true = [[0,0,0,0]]
y_true[0][i] = 1
y_true = tf.constant(y_true, dtype=tf.float32)
y_pred = tf.constant([[1, 10, 20, 24]], dtype=tf.float32)
loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=y_true, logits=y_pred)
with tf.Session() as sess:
loss = sess.run([loss])
print (' - sigmoid_cross_entropy_with_logits:', loss, sum(loss[0][0])/4)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment