Why do we need tf.nn.softmax_cross_entropy_with_logits ?
In [59]:
print(tf.nn.softmax_cross_entropy_with_logits.__doc__)
$softmax(x) = \frac{\exp(x)}{\sum_j \exp(x_j)}$
$layer(x) = \frac{\exp(W x + b)}{\sum_j \exp(W x_j + b)}$
tf.nn.softmax_cross_entropy_with_logits
Numeric error!
$\sum_{i=1}^N \log softmax_i(x_i) = \sum_{i=1}^N \sum_{j=1}^C [y_i = j] \log softmax_j(x_i) =$
$\sum_{i=1}^N \sum_{j=1}^C [y_i = j](x_{ij} - \log \sum_k \exp(x_k) =$
$\sum_{i=1}^N \sum_{j=1}^C [y_i = j](x_{ij} - \log exp(x_{max})(\sum_k \exp(x_k - x_{max}))$
In [60]:
import tensorflow as tf
from keras.layers.advanced_activations import LeakyReLU, PReLU
In [62]:
def LeakyRelu(x, alpha):
return tf.maximum(alpha*x, x)
In [48]:
with tf.Session() as sess:
inp = tf.Variable(initial_value=tf.random_uniform(shape=[5], minval=-5, maxval=5, dtype=tf.float32))
alpha = 0.5
res = LeakyRelu(inp, alpha)
sess.run(tf.global_variables_initializer())
before, after = sess.run([inp, res])
print('before', before)
print('after', after)
In [50]:
def PRelu(x):
alpha = tf.Variable(initial_value=tf.random_normal(shape=x.shape))
return tf.where(x < 0, alpha * x, tf.nn.relu(x))
In [51]:
with tf.Session() as sess:
inp = tf.Variable(initial_value=tf.random_uniform(shape=[5], minval=-5, maxval=5, dtype=tf.float32))
alpha = 0.5
res = PRelu(inp)
sess.run(tf.global_variables_initializer())
before, after = sess.run([inp, res])
print('before', before)
print('after', after)
In [66]:
def spp_layer(input_, levels=[2, 1], name = 'SPP_layer'):
'''Multiple Level SPP layer.
Works for levels=[1, 2, 3, 6].'''
shape = input_.get_shape().as_list()
with tf.variable_scope(name):
pool_outputs = []
for l in levels:
pool = tf.nn.max_pool(input_, ksize=[1, np.ceil(shape[1] * 1. / l).astype(np.int32),
np.ceil(shape[2] * 1. / l).astype(np.int32), 1],
strides=[1, np.floor(shape[1] * 1. / l + 1).astype(np.int32),
np.floor(shape[2] * 1. / l + 1), 1],
padding='SAME')
pool_outputs.append(tf.reshape(pool, [shape[0], -1]))
spp_pool = tf.concat(1, pool_outputs)
return spp_pool
Spartial Pyramid Pooling coming soon
PR: https://github.com/tensorflow/tensorflow/pull/12852/files
In [ ]: