In [25]:
import tensorflow as tf
In [26]:
import numpy as np
In [27]:
n_inputs = 3
In [28]:
n_neurons = 5
In [29]:
X0 = tf.placeholder(tf.float32, [None, n_inputs])
In [30]:
X1 = tf.placeholder(tf.float32, [None, n_inputs])
In [31]:
Wx = tf.Variable(tf.random_normal(shape=[n_inputs, n_neurons], dtype=tf.float32))
In [32]:
Wy = tf.Variable(tf.random_normal(shape=[n_neurons, n_neurons], dtype=tf.float32))
In [33]:
b = tf.Variable(tf.zeros([1, n_neurons], dtype=tf.float32))
In [34]:
Y0 = tf.tanh(tf.matmul(X0, Wx) + b)
In [35]:
Y1 = tf.tanh(tf.matmul(X1, Wx) + tf.matmul(Y0, Wy) + b)
In [36]:
init = tf.global_variables_initializer()
In [37]:
''' This network looks much like a tow-layer feedforward neural network, with a few twists:
1. the same weights and bias terms are shared by both layers.
2. feed inputs at each layer, and get outputs from each layer.
'''
Out[37]:
In [38]:
X0_batch = np.array([[0, 1, 2], [3, 4, 5], [6,7, 8],[9,0,1]]) # t = 0
In [39]:
X1_batch = np.array([[9,8,7], [0,0,0],[6,5,4],[3,2,1]]) # t = 1
In [40]:
with tf.Session() as sess:
sess.run(init)
Y0_val, Y1_val = sess.run([Y0, Y1], feed_dict = {X0:X0_batch, X1:X1_batch})
In [41]:
print(Y0_val)
In [42]:
print(Y1_val)
In [44]:
X3 = tf.placeholder(tf.float32, [None, n_inputs])
In [46]:
X4 = tf.placeholder(tf.float32, [None, n_inputs])
In [68]:
basic_cell = tf.nn.rnn_cell.BasicRNNCell(num_units = n_neurons)
In [48]:
output_seqs, states = tf.nn.rnn(basic_cell, [X3, X4], dtype=tf.float32)
In [51]:
Y3, Y4 = output_seqs
In [52]:
init1 = tf.global_variables_initializer()
In [53]:
with tf.Session() as sess:
sess.run(init1)
Y3_val, Y4_val = sess.run([Y3, Y4], feed_dict={X3:X0_batch, X4:X1_batch})
In [54]:
print(Y3_val)
In [55]:
print(Y4_val)
In [57]:
n_steps = 5
In [58]:
X = tf.placeholder(tf.float32, [None, n_steps, n_inputs])
In [59]:
X_seqs = tf.unpack(tf.transpose(X, perm=[1, 0, 2]))
In [66]:
X_seqs
Out[66]: