In [1]:
from __future__ import print_function
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
In [2]:
from datetime import date
date.today()
Out[2]:
In [3]:
author = "kyubyong. https://github.com/Kyubyong/tensorflow-exercises"
In [4]:
tf.__version__
Out[4]:
In [5]:
np.__version__
Out[5]:
NOTE on notation
_x, _y, _z, ...: NumPy 0-d or 1-d arrays
_X, _Y, _Z, ...: NumPy 2-d or higer dimensional arrays
x, y, z, ...: 0-d or 1-d tensors
X, Y, Z, ...: 2-d or higher dimensional tensors
Q0. Create a variable w
with an initial value of 1.0 and name weight
.
Then, print out the value of w
.
In [6]:
Q1. Complete this code.
In [7]:
# Create a variable w.
w = tf.Variable(1.0, name="Weight")
# Q. Add 1 to w and assign the value to w.
assign_op = ...
with tf.Session() as sess:
sess.run(w.initializer)
for _ in range(10):
print(sess.run(w), "=>", end="")
sess.run(assign_op)
Q2. Complete this code.
In [8]:
w1 = tf.Variable(1.0)
w2 = tf.Variable(2.0)
w3 = tf.Variable(3.0)
out = w1 + w2 + w3
# Q. Add an Op to initialize global variables.
init_op = ...
with tf.Session() as sess:
sess.run(init_op) # Initialize all variables.
print(sess.run(out))
Q3-4. Complete this code.
In [9]:
V = tf.Variable(tf.truncated_normal([1, 10]))
# Q3. Initialize `W` with 2 * W
W = ...
# Q4. Add an Op to initialize global variables.
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op) # Initialize all variables.
_V, _W = sess.run([V, W])
print(_V)
print(_W)
assert np.array_equiv(_V * 2.0, _W)
# Compare the value of `W` with Z = 2 * V.
Q5-8. Complete this code.
In [10]:
g = tf.Graph()
with g.as_default():
W = tf.Variable([[0,1],[2,3]], name="Weight", dtype=tf.float32)
# Q5. Print the name of `W`.
print("Q5.", ...)
# Q6. Print the name of the op of `W`.
print("Q6.", ...)
# Q7. Print the data type of `w`.
print("Q7.", ...)
# Q8. Print the shape of `w`.
print("Q8.", ...)
# Q9. Print the rank (or ndims) of `W`.
print("Q9.", ...)
# Q10. Check if the graph of `W` is the same as `g`.
print("Q10.", ...)
Q11-15. Complete this code.
In [11]:
tf.reset_default_graph()
w1 = tf.Variable(1.0, name="weight1")
w2 = tf.Variable(2.0, name="weight2", trainable=False)
w3 = tf.Variable(3.0, name="weight3")
with tf.Session() as sess:
# Q11. Initialize the variables w1 and w2.
sess.run(...)
# Q12. Print the name of all global variables
for v in tf.global_variables():
print("global variable =>", ...)
# Q13. Print the name of all trainable variables
for v in tf.trainable_variables():
print("trainable_variable =>", ...)
Q14-15. This is a simple example code to find the coefficient of a linear function. (Here y = 2x). Complete the code.
In [12]:
tf.reset_default_graph()
w = tf.Variable(0.2, 'weight') # <- This is what we want to find. The true value is 2.
x = tf.random_uniform([1])
y = 2. * x # Let's pretend we don't know the coefficient 2 here.
y_hat = w * x
loss = tf.squared_difference(y, y_hat)
train_op = tf.train.GradientDescentOptimizer(0.001).minimize(loss)
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
saver = tf.train.Saver()
for step in range(1, 10001):
sess.run(train_op)
import os
if not os.path.exists('model'): os.mkdir('model') # Let's make a folder in order not to discrupt our current folder.
if step % 1000 == 0:
print(sess.run(w), "=>", end="")
# Q14. Save the checkpoint file, giving it a base name of `model/my-model`
save_path = ...
print("Saved successfully")
print(os.listdir('model'))
# Q15. Find the filename of the latest saved checkpoint file.
# And if it exists, restore it.
ckpt = tf.train.latest_checkpoint('model')
print(ckpt)
if ckpt is not None:
...
print("Restored succesfully!")
Q16. Complete this code.
In [13]:
g = tf.Graph()
with g.as_default():
with tf.variable_scope("foo"):
v = tf.get_variable("vv", [1,]) # v.name == "foo/vv:0"
#Q. Get the existing variable `v`
...
assert v1 == v
Q17. Predict the result of this code.
In [2]:
with tf.variable_scope("foo"):
with tf.variable_scope("bar"):
v = tf.get_variable("vv", [1])
print("v.name =", v.name)
Q18. Complete this code.
In [15]:
value = [0, 1, 2, 3, 4, 5, 6, 7]
# Q. Create an initializer with `value`.
init = ...
tf.reset_default_graph()
x = tf.get_variable('x', shape=[2, 4], initializer=init)
with tf.Session() as sess:
sess.run(x.initializer)
print("x =\n", sess.run(x))
Q19. Complete this code.
In [16]:
# Q. Create an initializer with a normal distribution of mean equals 0 and standard deviation equals 2.
init = ...
tf.reset_default_graph()
x = tf.get_variable('x', shape=[10, 1000], initializer=init)
with tf.Session():
x.initializer.run()
_x = x.eval()
print("Make sure the mean", np.mean(_x), "is close to 0" )
print("Make sure the standard deviation", np.std(_x), "is close to 2" )
Q20. Complete this code.
In [17]:
# Q. Create an initializer with a truncated normal distribution of mean equals 0 and standard deviation equals 2.
init = ...
tf.reset_default_graph()
x = tf.get_variable('x', shape=[1000,], initializer=init)
with tf.Session():
x.initializer.run()
_x = x.eval()
plt.scatter(np.arange(1000), _x)
_avg = np.array([np.mean(_x)] * 1000)
_std = np.array([np.std(_x)] * 1000)
plt.plot(np.arange(1000), _avg, 'r-')
plt.plot(np.arange(1000), _avg + 2*_std, 'g-')
plt.plot(np.arange(1000), _avg - 2*_std, 'k-')
plt.legend(['mean', 'upper 2*std', 'lower 2*std'])
plt.show()
# Note that few data points exceed the bounaries.
Q21. Complete this code.
In [18]:
# Q. Create an initializer with a random normal distribution of minimum 0 and maximum 1.
init = ...
tf.reset_default_graph()
x = tf.get_variable('x', shape=[5000,], initializer=init)
with tf.Session():
x.initializer.run()
_x = x.eval()
count, bins, ignored = plt.hist(_x, 20, normed=True)
plt.plot(bins, np.ones_like(bins), linewidth=2, color='r')
plt.show()
# Note that few data points exceed the bounaries.
In [19]:
# Check other initializers such as zeros_initializer, ones_initializer, or orthogonal_initializer, as well.
Q22. Complete the code. Make sure you've done questions 14-15.
In [20]:
tf.reset_default_graph()
print("Of course, there're no variables since we reset the graph. See", tf.global_variables())
with tf.Session() as sess:
# Q. Import the saved graph of `model/my-model-10000`.
new_saver = ...
new_saver.restore(sess, 'model/my-model-10000')
for v in tf.global_variables():
print("Now we have a variable", v.name)
In [ ]: