``````

In [1]:

import tensorflow as tf
import numpy as np
import pandas as pd

``````
``````

In [17]:

logit_list = [0.1, 0.5, 0.4]
label_list = [0]

``````
``````

In [18]:

logits = tf.Variable(np.array([logit_list]), dtype=tf.float32, name="logits")
labels = tf.Variable(np.array(label_list), dtype=tf.int32, name="labels")

``````
``````

In [19]:

result = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits, name='result')

``````
``````

In [20]:

init = tf.global_variables_initializer()

``````
``````

In [21]:

with tf.Session() as sess:
init.run()
ret = sess.run(result)
print(ret)

``````
``````

[ 1.34591067]

``````
``````

In [22]:

# Next, I am trying to implement softmax cross entropy my self
# reference: https://deepnotes.io/softmax-crossentropy

``````
``````

In [23]:

def softmax(logits):
exps = np.exp(logits)
return exps / np.sum(exps)

``````
``````

In [24]:

print(softmax(logit_list))

``````
``````

[ 0.26030255  0.38832577  0.35137169]

``````
``````

In [13]:

def cross_entropy(label, y_hat):
return  -np.log(y_hat[label])

``````
``````

In [25]:

y_hat = np.array(softmax(logit_list))

``````
``````

In [26]:

print(cross_entropy(label_list[0], y_hat=y_hat))

``````
``````

1.34591068334

``````

We can see my implementation matches the tensorflow results