In [95]:
import numpy as np
from cntk.layers import Dense, Recurrence, Embedding
from cntk.models import Sequential
import cntk.ops as C
from cntk.utils import log_number_of_parameters
from cntk import combine
from cntk import Function
from cntk.blocks import Placeholder, LSTM

In [111]:
input_dim = 100
label_dim = 12
input = C.input_variable(input_dim, np.float32)
label = C.input_variable(label_dim, np.float32)
#label = Placeholder(label_dim)
model_mlp = Sequential([Dense(20, activation=C.relu),
                    Dense(label_dim, activation=None)])(input)

emb_dim = 150
hidden_dim = 222

model_rnn = Sequential([Embedding(emb_dim),
                        Recurrence(LSTM(hidden_dim), go_backwards=False),
                        Dense(label_dim, activation=C.softmax)])(input)

loss = C.cross_entropy_with_softmax(model_rnn, label)
error = C.classification_error(model_rnn, label)


[(12,), (222, 12), (888,), (150, 888), (100, 150), (222, 888)]
[(12,), (222, 12), (888,), (150, 888), (100, 150), (222, 888)]
Training 348900 parameters in 6 parameter tensors.
None

In [115]:
x = np.random.uniform(size=(300, input_dim)).astype(np.float32)
y = np.zeros(shape=(300, label_dim)).astype(np.float32)

y_pred = model_rnn.eval({input: x})
print (y_pred.shape)


(1, 300, 12)

In [ ]: