In [3]:
from cntk.layers import Dense
from cntk.models import Sequential
import cntk.ops as C
from cntk.ops import element_times, constant
from cntk.io import MinibatchSource, CTFDeserializer, StreamDef, StreamDefs, INFINITELY_REPEAT
from cntk.learner import sgd
from cntk import Trainer
import numpy as np
In [4]:
input_dim = 10
input = C.input_variable(input_dim, np.float32)
In [5]:
output = element_times(constant(2.), input)
x = np.asarray(np.random.uniform(size=(10, )), dtype=np.float32)
y = output.eval({input:x})
print (x)
print (y)
In [6]:
def create_reader(path, is_training, input_dim, label_dim):
return MinibatchSource(CTFDeserializer(path, StreamDefs(
features = StreamDef(field='features', shape=input_dim, is_sparse=False),
labels = StreamDef(field='labels', shape=label_dim, is_sparse=False)
)), randomize=is_training, epoch_size = INFINITELY_REPEAT if is_training else FULL_DATA_SWEEP)
In [7]:
from cntk.ops import cross_entropy_with_softmax, classification_error
def simple_mnist(debug_output=False):
input_dim = 784
num_output_classes = 10
num_hidden_layers = 1
hidden_layer_dim = 200
input = C.input_variable(input_dim, np.float32)
label = C.input_variable(num_output_classes, np.float32)
scaled_input = element_times(constant(0.00390625), input)
model = Sequential([Dense(hidden_layer_dim, activation=C.relu),
Dense(num_output_classes)])
netout = model(scaled_input)
cross_entropy = cross_entropy_with_softmax(netout, label)
error = classification_error(netout, label)
trainer = Trainer(model=netout, loss_function=cross_entropy, eval_function=error,
parameter_learners=sgd(netout.parameters, lr=0.0001))
#trainer = Trainer(netout, cross_entropy, error, sgd(netout.parameters, lr=0.003125))
data_file = '/home/jian/repos/cntk/Examples/Image/Datasets/MNIST/Train-28x28_cntk_text.txt'
reader_train = create_reader(data_file, True, input_dim, num_output_classes)
input_map = {
input: reader_train.streams.features,
label: reader_train.streams.labels
}
In [8]:
simple_mnist()
In [ ]: