network
with cntknetwork
with real data that generated by numpybefore running the following code, make sure you have activate the cntk env with command:
source "/home/jian/cntk/activate-cntk"
In [1]:
# import cntk and its modules
import cntk
from cntk.layers import Dense
import cntk.ops as C
In [2]:
import cntk
from cntk.layers import Dense
import cntk.ops as C
import numpy as np
In [5]:
# build network by cntk, "input" and "label" are symbolic
input_dim = 784
label_dim = 10
input = cntk.input_variable(input_dim, np.float32)
label = cntk.input_variable(label_dim, np.float32)
h1 = Dense(100, activation=C.relu)(input)
h2 = Dense(10, activation=C.softmax)(h1)
In [6]:
# generate real data with numpy, and run the built "network"
x = np.asarray(np.random.uniform(size=(input_dim, )), dtype=np.float32)
y = np.asarray(np.zeros((10, )), dtype=np.float32); y[4] = 1.
y_pred = h2.eval({input:x})
print (y)
print (y_pred)
In [7]:
# obtain "cross entropy loss" and "classification error"
get_cross_entropy = C.cross_entropy_with_softmax(h2, label)
get_error = C.classification_error(h2, label)
In [8]:
print (get_error.eval({input:x.reshape(1, 1, input_dim), label:y.reshape(1, 1, label_dim)}))
print (get_cross_entropy.eval({input:x.reshape(1, 1, input_dim), label:y.reshape(1, 1, label_dim)}))
In [9]:
input = C.input_variable(shape=100, data_type=np.float32)
layer_1 = Dense(5, activation=C.relu)(input)
layer_2 = Dense(5, activation=None)(input) ### layer_2 和 layer_3 的无区别, activation--Identity
layer_3 = Dense(5)(input) ###
In [14]:
x = np.random.uniform(size=(100, )).astype(np.float32)
y_1 = layer_1.eval({input: x})
y_2 = layer_2.eval({input: x})
y_3 = layer_3.eval({input: x})
print (y_1)
print (y_2)
print (y_3)
In [16]:
print (len(layer_2.parameters))
print (layer_2.parameters[0].value.shape, layer_2.parameters[1].value.shape)
In [27]:
from cntk.models import Sequential
model = Sequential([Dense(11, activation=C.relu),
Dense(22, activation=C.softmax)])
layer = Dense(33, activation=C.tanh)
print (model.layers[0].b.shape, model.layers[0].W.shape)
print (layer.b.shape, layer.W.shape)
In [ ]: