In [25]:
import tensorflow as tf

In [26]:
import numpy as np

In [27]:
n_inputs = 3

In [28]:
n_neurons = 5

In [29]:
X0 = tf.placeholder(tf.float32, [None, n_inputs])

In [30]:
X1 = tf.placeholder(tf.float32, [None, n_inputs])

In [31]:
Wx = tf.Variable(tf.random_normal(shape=[n_inputs, n_neurons], dtype=tf.float32))

In [32]:
Wy = tf.Variable(tf.random_normal(shape=[n_neurons, n_neurons], dtype=tf.float32))

In [33]:
b = tf.Variable(tf.zeros([1, n_neurons], dtype=tf.float32))

In [34]:
Y0 = tf.tanh(tf.matmul(X0, Wx) + b)

In [35]:
Y1 = tf.tanh(tf.matmul(X1, Wx) + tf.matmul(Y0, Wy) + b)

In [36]:
init = tf.global_variables_initializer()

In [37]:
''' This network looks much like a tow-layer feedforward neural network, with a few twists:
1. the same weights and bias terms are shared by both layers.
2. feed inputs at each layer, and get outputs from each layer.
'''


Out[37]:
' This network looks much like a tow-layer feedforward neural network, with a few twists:\n1. the same weights and bias terms are shared by both layers.\n2. feed inputs at each layer, and get outputs from each layer.\n'

In [38]:
X0_batch = np.array([[0, 1, 2], [3, 4, 5], [6,7, 8],[9,0,1]]) # t = 0

In [39]:
X1_batch = np.array([[9,8,7], [0,0,0],[6,5,4],[3,2,1]]) # t = 1

In [40]:
with tf.Session() as sess:
    sess.run(init)
    Y0_val, Y1_val = sess.run([Y0, Y1], feed_dict = {X0:X0_batch, X1:X1_batch})

In [41]:
print(Y0_val)


[[ 0.95313758 -0.17996429  0.25595349  0.99995422  0.95708007]
 [ 1.         -0.99662399  0.87401539  1.          1.        ]
 [ 1.         -0.99999183  0.98485768  1.          1.        ]
 [ 1.         -1.          0.99977297  1.          1.        ]]

In [42]:
print(Y1_val)


[[ 1.         -1.          0.99913961  1.          1.        ]
 [ 0.99348378 -0.92470729  0.59619802 -0.950019    0.96663409]
 [ 1.         -0.99999946  0.99068093  1.          1.        ]
 [ 1.         -0.99979377  0.92245167  0.99699968  1.        ]]

In [44]:
X3 = tf.placeholder(tf.float32, [None, n_inputs])

In [46]:
X4 = tf.placeholder(tf.float32, [None, n_inputs])

In [68]:
basic_cell = tf.nn.rnn_cell.BasicRNNCell(num_units = n_neurons)

In [48]:
output_seqs, states = tf.nn.rnn(basic_cell, [X3, X4], dtype=tf.float32)

In [51]:
Y3, Y4 = output_seqs

In [52]:
init1 = tf.global_variables_initializer()

In [53]:
with tf.Session() as sess:
    sess.run(init1)
    Y3_val, Y4_val = sess.run([Y3, Y4], feed_dict={X3:X0_batch, X4:X1_batch})

In [54]:
print(Y3_val)


[[ 0.36895558 -0.7123453  -0.91218674 -0.65849072  0.1567992 ]
 [ 0.50603181 -0.99882776 -0.99994904  0.30092672  0.50690198]
 [ 0.62156337 -0.99999589 -1.          0.88775307  0.74383038]
 [-0.45230895 -0.99997234 -0.99736899  0.99970359  0.43973276]]

In [55]:
print(Y4_val)


[[ 0.04022945 -0.99999976 -1.          0.99851537  0.73603499]
 [-0.58808035 -0.47645965  0.14024077 -0.30263644 -0.49604744]
 [-0.7768271  -0.99993831 -0.99998355  0.99298751 -0.1583083 ]
 [-0.80477601 -0.96781379 -0.9651143   0.92127007 -0.50737476]]

In [57]:
n_steps = 5

In [58]:
X = tf.placeholder(tf.float32, [None, n_steps, n_inputs])

In [59]:
X_seqs = tf.unpack(tf.transpose(X, perm=[1, 0, 2]))

In [66]:
X_seqs


Out[66]:
[<tf.Tensor 'unpack:0' shape=(?, 3) dtype=float32>,
 <tf.Tensor 'unpack:1' shape=(?, 3) dtype=float32>,
 <tf.Tensor 'unpack:2' shape=(?, 3) dtype=float32>,
 <tf.Tensor 'unpack:3' shape=(?, 3) dtype=float32>,
 <tf.Tensor 'unpack:4' shape=(?, 3) dtype=float32>]