Forked from jerheff/binary_crossentropy_with_ranking.py
Created
October 30, 2019 01:44
-
-
Save vyraun/071568056d0cf6a13c0e32c789a70fc1 to your computer and use it in GitHub Desktop.
Experimental binary cross entropy with ranking loss function
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def binary_crossentropy_with_ranking(y_true, y_pred): | |
""" Trying to combine ranking loss with numeric precision""" | |
# first get the log loss like normal | |
logloss = K.mean(K.binary_crossentropy(y_pred, y_true), axis=-1) | |
# next, build a rank loss | |
# clip the probabilities to keep stability | |
y_pred_clipped = K.clip(y_pred, K.epsilon(), 1-K.epsilon()) | |
# translate into the raw scores before the logit | |
y_pred_score = K.log(y_pred_clipped / (1 - y_pred_clipped)) | |
# determine what the maximum score for a zero outcome is | |
y_pred_score_zerooutcome_max = K.max(y_pred_score * (y_true <1)) | |
# determine how much each score is above or below it | |
rankloss = y_pred_score - y_pred_score_zerooutcome_max | |
# only keep losses for positive outcomes | |
rankloss = rankloss * y_true | |
# only keep losses where the score is below the max | |
rankloss = K.square(K.clip(rankloss, -100, 0)) | |
# average the loss for just the positive outcomes | |
rankloss = K.sum(rankloss, axis=-1) / (K.sum(y_true > 0) + 1) | |
# return (rankloss + 1) * logloss - an alternative to try | |
return rankloss + logloss |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment