In [33]:
# softmax function
# 2017-03-30 jkang
# ref: Machine Learning by Andrew Ng on Corsera
import numpy as np

'''
e.g.
In classification task,
z = W*x + b
x shape: (example) x (feature)
'''

def softmax(x):
    rowmax = np.max(x, axis=1)
    x -= rowmax.reshape((x.shape[0] ,1)) # for numerical stability
    x = np.exp(x)
    sum_x = np.sum(x, axis=1).reshape((x.shape[0],1))
    return x / sum_x

z = np.random.random((10,3)) # (10 examples) x (3 features)
pred = softmax(z)
print(pred)


[[ 0.47740058  0.24317312  0.2794263 ]
 [ 0.22449371  0.32579308  0.44971321]
 [ 0.32122782  0.26051267  0.41825951]
 [ 0.20824966  0.47083396  0.32091638]
 [ 0.40125497  0.3779354   0.22080962]
 [ 0.34084004  0.33460656  0.32455339]
 [ 0.24321022  0.29527108  0.46151869]
 [ 0.23383066  0.3185628   0.44760654]
 [ 0.4192344   0.25025445  0.33051115]
 [ 0.31821301  0.34949982  0.33228717]]