In [26]:
%matplotlib inline
import numpy as np
scores = np.array([1.0, 2.0, 3.0])
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
return np.exp(x) / np.sum(np.exp(x), axis=0)
print(softmax(scores))
# Plot softmax curves
import matplotlib.pyplot as plt
x = np.arange(-2.0, 6.0, 0.1)
scores = np.vstack([x, np.ones_like(x), 0.2 * np.ones_like(x)])
plt.plot(x, softmax(scores).T, linewidth=2)
plt.show()
In [27]:
# Cross entropy
x = np.arange(0, 1, 0.05)
plt.plot(x, -np.log(x))
Out[27]:
In [29]:
x = 1000000000
for i in range(1000000):
x += 0.000001
print(x - 1000000000)