In [2]:
import tensorflow as tf
import math
In [3]:
interactive_sesssion=tf.InteractiveSession()
In [21]:
y=[[5.,4.,1.],[1.,9.,20.],[3.,4.,3.]]
In [22]:
y_=[[1,0,0],[1,0,0],[1,0,0]]
In [23]:
cross_entropy=tf.nn.softmax_cross_entropy_with_logits(y,y_)
In [24]:
interactive_sesssion.run(cross_entropy)
Out[24]:
In [35]:
softmax=tf.nn.softmax(y)
In [36]:
interactive_sesssion.run(softmax)
Out[36]:
In [38]:
-1*math.log(0.721399188)
Out[38]:
following is to make a deep learning model using softmax, it looks like optim don't action The reason why optim don't take action, it is because loss_function. It is a regression problem using softmax method.
In [4]:
x=tf.constant([[3.0,4.0,5.0]])
In [5]:
#weight=tf.Variable(tf.random_normal([3,1],stddev=1,seed=1))
weight=tf.Variable([[0.6],[0.5],[0.3]])
In [6]:
y_=tf.constant([1.0])
In [7]:
y=tf.matmul(x,weight)
In [8]:
y_softmax=tf.nn.softmax(y)
In [14]:
#loss_function=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(model,y_))
#loss_function=-tf.reduce_mean(y_*tf.log(tf.clip_by_value(y_softmax,1e-10,1.0)))
loss_function=tf.reduce_mean(tf.square(y_-y))
In [15]:
optim = tf.train.AdamOptimizer(0.001)
In [16]:
train_step=optim.minimize(loss_function)
In [17]:
init_op=tf.initialize_all_variables()
In [18]:
interactive_sesssion.run(init_op)
In [20]:
for i in range(1000):
interactive_sesssion.run(train_step)
loss=interactive_sesssion.run(loss_function)
w=interactive_sesssion.run(weight)
if i%100 == 0:
print(w)
print(loss)
In [ ]: