In [1]:
import numpy as np
import tensorflow as tf

# Meta-parameters and debugging knobs
learning_rate = 0.01
training_epochs = 1000
display_step = 50

# Test data
y = np.asarray([1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 3.0, 4.0])
num_steps = y.shape[0]

# Input data placeholders
data_in = tf.placeholder('float')
data_out = tf.placeholder('float')

# ETS params
initial = tf.Variable(0.0, name = 'initial', dtype = tf.float32)
alpha = tf.Variable(0.5, name = 'alpha', dtype = tf.float32)

# Definition of the ETS update
def update(y, level):
    return level, level + alpha * (y - level)

# Unrolled ETS loop
outputs = []
level = initial
for time_step in range(num_steps):
    output, level = update(data_in[time_step], level)
    outputs.append(output)

# Mean squared error
cost = tf.reduce_sum(tf.pow(tf.pack(outputs) - data_out, 2))

# Gradient descent
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

# Initializing the variables
init = tf.initialize_all_variables()

# Launch the graph
with tf.Session() as sess:
    sess.run(init)

    # Fit the data.    
    for epoch in range(training_epochs):
        sess.run(optimizer, feed_dict={data_in: y, data_out: y})

        # Display logs per epoch step
        if (epoch + 1) % display_step == 0:
            c = sess.run(cost, feed_dict={data_in: y, data_out: y})
            print "Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c), \
                "initial=", sess.run(initial), "alpha=", sess.run(alpha)

    print "Optimization Finished!"
    training_cost = sess.run(cost, feed_dict={data_in: y, data_out: y})
    print "Training cost=", training_cost, "initial=", sess.run(initial), "alpha=", sess.run(alpha), '\n'


Epoch: 0050 cost= 6.768255711 initial= 0.745227 alpha= 0.825916
Epoch: 0100 cost= 6.608967781 initial= 1.01697 alpha= 0.793894
Epoch: 0150 cost= 6.585595131 initial= 1.12098 alpha= 0.782416
Epoch: 0200 cost= 6.582287788 initial= 1.1601 alpha= 0.778266
Epoch: 0250 cost= 6.581826210 initial= 1.1747 alpha= 0.776743
Epoch: 0300 cost= 6.581762314 initial= 1.18014 alpha= 0.77618
Epoch: 0350 cost= 6.581752777 initial= 1.18217 alpha= 0.775971
Epoch: 0400 cost= 6.581751823 initial= 1.18292 alpha= 0.775893
Epoch: 0450 cost= 6.581752300 initial= 1.1832 alpha= 0.775865
Epoch: 0500 cost= 6.581751823 initial= 1.1833 alpha= 0.775854
Epoch: 0550 cost= 6.581751823 initial= 1.18334 alpha= 0.77585
Epoch: 0600 cost= 6.581751347 initial= 1.18336 alpha= 0.775848
Epoch: 0650 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 0700 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 0750 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 0800 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 0850 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 0900 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 0950 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Epoch: 1000 cost= 6.581751823 initial= 1.18336 alpha= 0.775848
Optimization Finished!
Training cost= 6.58175 initial= 1.18336 alpha= 0.775848 

BOOYAH

And that's a match! The results from R were:

> ets(c(1, 2, 1, 2, 1, 2, 3, 4), model = 'ANN')
ETS(A,N,N) 

Call:
 ets(y = c(1, 2, 1, 2, 1, 2, 3, 4), model = "ANN") 

  Smoothing parameters:
    alpha = 0.7759 

  Initial states:
    l = 1.1834 

  sigma:  0.907

     AIC     AICc      BIC 
21.07441 27.07441 21.31273

In [ ]: