In [32]:
import pandas as pd
import pylab
import matplotlib.pyplot as plt
import matplotlib
matplotlib.style.use('ggplot')
#import seaborn as sns
import numpy as np
%matplotlib inline
In [41]:
import numpy as np
scores = [3.0, 1.0, 0.2]
scores = scores * 10
def softmax(scores):
#vector = np.array(scores)
#pass
vector = np.exp(scores)
return vector / np.sum(vector,axis = 0)
#return vector / vector.sum() #will fail for multidim arrays
print(softmax(scores))
We can see that probablilty of desired input will be eventually weighted to 1, while other ones will be reduced to 0.
In [33]:
# Plot softmax curves
x = np.arange(-2.0, 6.0, 0.1)
scores = np.vstack([x, np.ones_like(x), 0.2 * np.ones_like(x)])
plt.plot(x, softmax(scores).T, linewidth=2)
plt.show()
If you multiply the score probabilities will become v small, if you divide by 10, probabilities will become
In [24]:
np.exp(scores)
Out[24]: