A demo of building fully_connected_layer network by cntk, we will:

  1. import necessory modules (mainly cntk)
  2. build the symbolic network with cntk
  3. run the network with real data that generated by numpy

before running the following code, make sure you have activate the cntk env with command:

source "/home/jian/cntk/activate-cntk"


In [1]:
# import cntk and its modules
import cntk
from cntk.layers import Dense
import cntk.ops as C

In [2]:
import cntk
from cntk.layers import Dense
import cntk.ops as C
import numpy as np

In [5]:
# build network by cntk, "input" and "label" are symbolic
input_dim = 784
label_dim = 10
input = cntk.input_variable(input_dim, np.float32)
label = cntk.input_variable(label_dim, np.float32)
h1 = Dense(100, activation=C.relu)(input)
h2 = Dense(10, activation=C.softmax)(h1)

In [6]:
# generate real data with numpy, and run the built "network"
x = np.asarray(np.random.uniform(size=(input_dim, )), dtype=np.float32)
y = np.asarray(np.zeros((10, )), dtype=np.float32); y[4] = 1.
y_pred = h2.eval({input:x})
print (y)
print (y_pred)


[ 0.  0.  0.  0.  1.  0.  0.  0.  0.  0.]
[[[ 0.07237756  0.12026989  0.0320004   0.03301869  0.1552022   0.11792201
    0.16315858  0.10797322  0.10000271  0.0980747 ]]]

In [7]:
# obtain "cross entropy loss" and "classification error"
get_cross_entropy = C.cross_entropy_with_softmax(h2, label)
get_error = C.classification_error(h2, label)

In [8]:
print (get_error.eval({input:x.reshape(1, 1, input_dim), label:y.reshape(1, 1, label_dim)}))
print (get_cross_entropy.eval({input:x.reshape(1, 1, input_dim), label:y.reshape(1, 1, label_dim)}))


[[[ 1.]]]
[[[ 2.24826503]]]

test Dense activation


In [9]:
input = C.input_variable(shape=100, data_type=np.float32)
layer_1 = Dense(5, activation=C.relu)(input)
layer_2 = Dense(5, activation=None)(input)  ### layer_2 和 layer_3 的无区别, activation--Identity
layer_3 = Dense(5)(input)                   ###

In [14]:
x = np.random.uniform(size=(100, )).astype(np.float32)
y_1 = layer_1.eval({input: x})
y_2 = layer_2.eval({input: x})
y_3 = layer_3.eval({input: x})
print (y_1)
print (y_2)
print (y_3)


[[[ 0.          0.03368222  0.          0.          0.        ]]]
[[[-0.44802982  0.84256035 -1.14385903  0.22013612  0.67124766]]]
[[[-0.50391674 -0.87814212 -0.97890955  1.27035201 -0.51800776]]]

In [16]:
print (len(layer_2.parameters))
print (layer_2.parameters[0].value.shape, layer_2.parameters[1].value.shape)


2
(5,) (100, 5)

In [27]:
from cntk.models import Sequential
model = Sequential([Dense(11, activation=C.relu),
                    Dense(22, activation=C.softmax)])
layer = Dense(33, activation=C.tanh)

print (model.layers[0].b.shape, model.layers[0].W.shape)
print (layer.b.shape, layer.W.shape)


(11,) (-1, 11)
(33,) (-1, 33)

In [ ]: