In [ ]:
import tensorflow as tf
from tensorflow.python.client import timeline
import pylab
import numpy as np
%matplotlib inline
%config InlineBackend.figure_format = 'retina'
tf.logging.set_verbosity(tf.logging.INFO)
In [ ]:
tf.reset_default_graph()
In [ ]:
num_samples = 100000
In [ ]:
from datetime import datetime
version = int(datetime.now().strftime("%s"))
print(version)
In [ ]:
x_train = np.random.rand(num_samples).astype(np.float32)
print(x_train)
noise = np.random.normal(scale=0.01, size=len(x_train))
y_train = x_train * 0.1 + 0.3 + noise
print(y_train)
pylab.plot(x_train, y_train, '.')
In [ ]:
x_test = np.random.rand(len(x_train)).astype(np.float32)
print(x_test)
noise = np.random.normal(scale=0.01, size=len(x_train))
y_test = x_test * 0.1 + 0.3 + noise
print(y_test)
pylab.plot(x_train, y_train, '.')
In [ ]:
with tf.device("/cpu:0"):
W = tf.get_variable(shape=[], name='weights')
print(W)
b = tf.get_variable(shape=[], name='bias')
print(b)
x_observed = tf.placeholder(shape=[None], dtype=tf.float32, name='x_observed')
print(x_observed)
with tf.device("/cpu:0"):
y_pred = W * x_observed + b
print(y_pred)
In [ ]:
with tf.device("/cpu:0"):
y_observed = tf.placeholder(shape=[None], dtype=tf.float32, name='y_observed')
print(y_observed)
loss_op = tf.reduce_mean(tf.square(y_pred - y_observed))
optimizer_op = tf.train.GradientDescentOptimizer(0.025)
train_op = optimizer_op.minimize(loss_op)
print("loss:", loss_op)
print("optimizer:", optimizer_op)
print("train:", train_op)
In [ ]:
with tf.device("/cpu:0"):
init_op = tf.global_variables_initializer()
print(init_op)
In [ ]:
train_summary_writer = tf.summary.FileWriter('/root/tensorboard/linear/cpu/%s/train' % version, graph=tf.get_default_graph())
test_summary_writer = tf.summary.FileWriter('/root/tensorboard/linear/cpu/%s/test' % version, graph=tf.get_default_graph())
In [ ]:
config = tf.ConfigProto(
log_device_placement=True,
)
print(config)
sess = tf.Session(config=config)
In [ ]:
sess.run(init_op)
print(sess.run(W))
print(sess.run(b))
In [ ]:
def test(x, y):
return sess.run(loss_op, feed_dict={x_observed: x, y_observed: y})
In [ ]:
test(x=x_test, y=y_test)
In [ ]:
loss_summary_scalar_op = tf.summary.scalar('loss', loss_op)
loss_summary_merge_all_op = tf.summary.merge_all()
In [ ]:
%%time
max_steps = 400
run_metadata = tf.RunMetadata()
for step in range(max_steps):
if (step < max_steps):
test_summary_log, _ = sess.run([loss_summary_merge_all_op, loss_op], feed_dict={x_observed: x_test, y_observed: y_test})
train_summary_log, _ = sess.run([loss_summary_merge_all_op, train_op], feed_dict={x_observed: x_train, y_observed: y_train})
else:
test_summary_log, _ = sess.run([loss_summary_merge_all_op, loss_op], feed_dict={x_observed: x_test, y_observed: y_test})
train_summary_log, _ = sess.run([loss_summary_merge_all_op, train_op], feed_dict={x_observed: x_train, y_observed: y_train}, options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE), run_metadata=run_metadata)
trace = timeline.Timeline(step_stats=run_metadata.step_stats)
with open('cpu-timeline.json', 'w') as trace_file:
trace_file.write(trace.generate_chrome_trace_format(show_memory=True))
if step % 1 == 0:
print(step, sess.run([W, b]))
train_summary_writer.add_summary(train_summary_log, step)
train_summary_writer.flush()
test_summary_writer.add_summary(test_summary_log, step)
test_summary_writer.flush()
In [ ]:
pylab.plot(x_train, y_train, '.', label="target")
pylab.plot(x_train, sess.run(y_pred, feed_dict={x_observed: x_train, y_observed: y_train}), ".", label="predicted")
pylab.legend()
pylab.ylim(0, 1.0)
In [ ]:
test(x=x_test, y=y_test)
In [ ]:
from tensorflow.python.saved_model import utils
tensor_info_x_observed = utils.build_tensor_info(x_observed)
print(tensor_info_x_observed)
tensor_info_y_pred = utils.build_tensor_info(y_pred)
print(tensor_info_y_pred)
In [ ]:
export_path = "/root/models/linear/cpu/%s" % version
print(export_path)
In [ ]:
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.saved_model import tag_constants
with tf.device("/cpu:0"):
builder = saved_model_builder.SavedModelBuilder(export_path)
prediction_signature = signature_def_utils.build_signature_def(
inputs = {'x_observed': tensor_info_x_observed},
outputs = {'y_pred': tensor_info_y_pred},
method_name = signature_constants.PREDICT_METHOD_NAME)
legacy_init_op = tf.group(tf.initialize_all_tables(), name='legacy_init_op')
builder.add_meta_graph_and_variables(sess,
[tag_constants.SERVING],
signature_def_map={'predict':prediction_signature,
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:prediction_signature},
legacy_init_op=legacy_init_op)
builder.save()
In [ ]:
%%bash
ls -l /root/models/linear/cpu/[version]
In [ ]:
from tensorflow.python.framework import graph_io
graph_io.write_graph(sess.graph, "/root/models/optimize_me/", "unoptimized_cpu.pb")
In [ ]:
sess.close()