In [1]:
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib
%matplotlib inline
In [15]:
# the ops add the default graph by default
matrix1 = tf.constant([[2.,3.]])
matrix2 = tf.constant([[2.,3.],[2.,3.]])
product = tf.matmul(matrix1,matrix2)
In [16]:
sess = tf.Session()
result = sess.run(product)
print(result)
sess.close()
In [19]:
sess = tf.InteractiveSession()
x = tf.Variable([1.,2.])
a = tf.constant([3.,4.])
# must initilize the variable
x.initializer.run()
# sub a from x
sub = tf.sub(a,x)
sub.eval()
sess.close()
In [31]:
# reset the graph
tf.reset_default_graph()
# counter
state = tf.Variable(0,name="counter")
# constant
one = tf.constant(1)
# new state
new_state = tf.add(state,one)
# assign new_state to state
# note that assign return the ref of state
update = tf.assign(state,new_state)
# init ops
init = tf.initialize_all_variables()
with tf.Session() as sess:
# initialize variable
sess.run(init)
# print the initial state
print(sess.run(state))
# update three times
for _ in range(3):
print(sess.run(update))
In [38]:
tf.reset_default_graph()
input1 = tf.placeholder(tf.float32,shape=[None,2])
input2 = tf.placeholder(tf.float32,shape=[2,1])
output = tf.matmul(input1,input2)
with tf.Session() as sess:
res = sess.run([output],feed_dict={input1:[[4.,5.]],input2:[[3.],[5.]]})
print(res)
In [5]:
with tf.Session() as sess:
logits = tf.Variable([[0.2,0.3,0.5],[0.3,0.3,0.4]],name='logits')
labels = [0,1]
init = tf.initialize_all_variables()
sess.run(init)
correct_number = tf.nn.in_top_k(logits,labels,1)
true = tf.reduce_sum(tf.cast(correct_number,tf.int32))
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits,labels,
name='xentropy')
# print(true)
print(sess.run(true))
print(sess.run(cross_entropy))