In [2]:
import pdb
import theano
from theano import shared, tensor as T
from theano.tensor.nnet import conv2d, nnet
from theano.tensor.signal import pool
from keras.utils.np_utils import to_categorical
import numpy as np
In [ ]:
def sigmoid(x): return 1/(1+np.exp(-x))
def sigmoid_d(x):
output = sigmoid(x)
return output*(1-output)
# ---
def relu(x): return np.maximum(0., x)
def relu_d(x): return (x > 0.)*1.
# ---
def dist(a,b): return pow(a-b,2)
def dist_d(a,b): return 2*(a-b)
# ---
eps = 1e-7
def x_entropy(pred, actual):
return -np.sum(actual * np.log(np.clip(pred, eps, 1-eps)))
def x_entropy_d(pred, actual): return -actual/pred
# ---
def softmax(x): return np.exp(x)/np.exp(x).sum()
# ---
def softmax_d(x):
sm = softmax(x)
res = np.expand_dims(-sm,-1)*sm
res[np.diag_indices_from(res)] = sm*(1-sm)
return res
# ---
test_preds = np.array([0.2,0.7,0.1])
test_actuals = np.array([0.,1.,0.])
# nnet.categorical_crossentropy(test_preds, test_actuals).eval()
# ---
test_inp = T.dvector()
test_out = nnet.categorical_crossentropy(test_inp, test_actuals)
test_grad = theano.function([test_inp], T.grad(test_out, test_inp))
# ---
# okay from here need to do things...
pre_pred = random(oh_x_rnn[0][0].shape)
preds = softmax(pre_pred)
actual = oh_x_rnn[0][0]