Created
February 8, 2016 19:21
-
-
Save OneRaynyDay/77f184f0316050cf4d48 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Softmax.""" | |
scores = [2.0, 1.0, 0.1] | |
import numpy as np | |
def softmax(x): | |
"""Compute softmax values for each sets of scores in x.""" | |
#We want to find the e^y_i/(sum(e^y) | |
''' Long way | |
ans = [] | |
for x_i in x: | |
numerator = np.exp(x_i) | |
denominator = np.sum(np.exp(x)) | |
ans.append(numerator/denominator) | |
return np.array(ans) | |
''' | |
''' Clever One-liner ''' | |
return np.exp(x)/np.sum(np.exp(x), axis=0) | |
print(softmax(scores)) | |
# Plot softmax curves | |
import matplotlib.pyplot as plt | |
x = np.arange(-2.0, 6.0, 0.1) | |
print(x) | |
scores = np.vstack([x, np.ones_like(x), 0.2 * np.ones_like(x)]) | |
plt.plot(x, softmax(scores).T, linewidth=2) | |
plt.show() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
A little softmax function for ML lecture.