In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
sess = tf.Session()
zero_values_op = tf.zeros([300])
one_values_op = tf.ones([300])
predictions_op = tf.linspace(-3.0, 3.0, 300)
logits_op = tf.linspace(-3.0, 5.0, 300)
predictions = sess.run(predictions_op)
logits = sess.run(logits_op)
In [2]:
l1_loss_values = sess.run(tf.losses.absolute_difference(
zero_values_op,
predictions_op,
reduction=tf.losses.Reduction.NONE
))
In [3]:
l2_loss_values = sess.run(tf.square(zero_values_op - predictions_op))
In [4]:
huber_loss_values = sess.run(tf.losses.huber_loss(
zero_values_op,
predictions_op,
reduction=tf.losses.Reduction.NONE
))
In [5]:
plt.plot(predictions, l1_loss_values, 'r:', label='L1 Loss')
plt.plot(predictions, l2_loss_values, 'b:', label='L2 Loss')
plt.plot(predictions, huber_loss_values, 'g:', label='Huber Loss')
plt.ylim(-2.0, 4.0)
plt.grid(color='gray')
plt.legend(loc='lower right', prop={'size': 11})
plt.show()
In [6]:
hinge_loss_values = sess.run(tf.losses.hinge_loss(
one_values_op,
logits_op,
reduction=tf.losses.Reduction.NONE
))
In [7]:
cross_entropy_loss_values = sess.run(
-tf.multiply(tf.constant(1.0), tf.log(logits)) - tf.multiply((1.0 - tf.constant(1.0)), tf.log(1.0 - logits))
)
In [8]:
sigmoid_cross_entropy_loss_values = sess.run(tf.nn.sigmoid_cross_entropy_with_logits(
labels=one_values_op,
logits=logits_op
))
In [9]:
pos_weight = tf.constant(0.5)
weighted_cross_entropy_loss_values = sess.run(tf.nn.weighted_cross_entropy_with_logits(
one_values_op,
logits_op,
pos_weight
))
In [10]:
softmax_cross_entropy_loss_values = sess.run(
tf.nn.softmax_cross_entropy_with_logits(
labels=tf.constant([[0.1, 0.02, 0.88]]),
logits=tf.constant([[1.0, -3.0, 10.0]])
)
)
print(softmax_cross_entropy_loss_values)
In [11]:
sparse_softmax_cross_entropy_loss_values = sess.run(
tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=tf.constant([2]),
logits=tf.constant([[1.0, -3.0, 10.0]])
)
)
print(sparse_softmax_cross_entropy_loss_values)
In [12]:
plt.plot(logits, hinge_loss_values, 'r:', label='Hinge Loss')
plt.plot(logits, cross_entropy_loss_values, 'b:', label='Cross Entropy Loss')
plt.plot(logits, sigmoid_cross_entropy_loss_values, 'g:', label='Sigmoid Cross Entropy Loss')
plt.plot(logits, weighted_cross_entropy_loss_values, 'k--', label='Weighted Cross Entropy Loss')
plt.ylim(-2, 4.0)
plt.grid(color='gray')
plt.legend(loc='lower right', prop={'size': 11})
plt.show()