-
-
Save cyphunk/6c255fa05dd30e69f438a930faeb53fe to your computer and use it in GitHub Desktop.
softmax function implementation in js
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Fork & examples for the one-line version by @vladimir-ivanov: | |
//let softmax = (arr) => (index) => Math.exp(arr[index]) / arr.map(y => Math.exp(y)).reduce((a, b) => a + b); | |
// | |
// Also see comments for improvements | |
function softmax(arr) { | |
return arr.map(function(value,index) { | |
return Math.exp(value) / arr.map( function(y /*value*/){ return Math.exp(y) } ).reduce( function(a,b){ return a+b }) | |
}) | |
} | |
example1=[ 0.9780449271202087, | |
0.01590355671942234, | |
0.0019390975357964635, | |
0.0015482910675927997, | |
0.0012942816829308867, | |
0.0006004497990943491, | |
0.0004827099328394979, | |
0.0001868270628619939 ] | |
softmax1=softmax(example1) | |
example2= [ | |
{ prob: 0.32289665937423706, cat: '25_32' }, | |
{ prob: 0.15404804050922394, cat: '38_43' }, | |
{ prob: 0.03673655539751053, cat: '4_6' }, | |
{ prob: 0.01545996405184269, cat: '48_53' }, | |
{ prob: 0.011709162034094334, cat: '15_20' }, | |
{ prob: 0.008010754361748695, cat: '8_13' }, | |
{ last: true, prob: 0.0054732030257582664, cat: '60+' } ].map(function(v){return v.prob}) | |
softmax2=softmax(example2) | |
example3=[ { prob: 0.125, cat: '25_32' }, | |
{ prob: 0.125, cat: '38_43' }, | |
{ prob: 0.125, cat: '15_20' }, | |
{ prob: 0.125, cat: '8_13' }, | |
{ prob: 0.125, cat: '4_6' }, | |
{ prob: 0.125, cat: '48_53' }, | |
{ prob: 0.125, cat: '60+' }, | |
{ prob: 0.125, cat: '0_2' } ].map(function(v){return v.prob}) | |
softmax3=softmax(example3) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Math.exp(x)
can grows fast, if you use very large number forx
, this may cause exponential overflowsinfinity
. For example,Math.exp(1000)
will cause overflow. Here is the implementation that used by PyTorch, Tensorflow, Cuda
Because the softmax formula is invariant under constant shifts, that means mathematically, subtracting the same value from all inputs doesn’t change the output