A demo of the use of Sequential()
In order to construct a network layer by layer, we can use Sequential() which provides a much more intuitive manner
In [1]:
import cntk
from cntk.layers import Dense
from cntk.models import Sequential, LayerStack
import cntk.ops as C
from cntk.blocks import default_options
import numpy as np
In [2]:
with default_options(init=cntk.initializer.glorot_uniform()): # reset the default options for model_1
model_1 = Sequential([
Dense(1024, activation=C.relu),
Dense(9000, activation=C.softmax)])
model_2 = Sequential([
LayerStack(6, lambda:Dense(2048, activation=C.sigmoid)),
Dense(9000, activation=C.softmax)])
In [3]:
input_dim = 784
input = C.input_variable(input_dim, np.float32)
output_1 = model_1(input)
output_2 = model_2(input)
In [4]:
x = np.asarray(np.random.uniform(size=(input_dim, )), dtype=np.float32)
y = np.asarray(np.zeros((10, )), dtype=np.float32); y[4] = 1.
print (output_1.eval({input:x}))
print (output_2.eval({input:x}))
In [5]:
from cntk.layers import Embedding
emb = Embedding(10)(input)
y_emb = emb.eval({input:x})
print (y_emb)
In [6]:
input = C.input_variable(2, np.float32)
layer_1 = Dense(3, activation=C.relu)(input)
print (layer_1.parameters[0].value)
print (layer_1.parameters[1].value)
In [25]:
input_dim = 10
hidden_dim = 20
output_dim = 30
input = C.input_variable(input_dim, np.float32)
mlp = Sequential([Dense(hidden_dim, activation=C.relu),
Dense(output_dim, activation=C.softmax)])(input)
print ([x.shape for x in mlp.parameters])
In [ ]: