In [4]:
import os
import numpy as np
import matplotlib.pyplot as plt
import math

from cntk.blocks import default_options
from cntk.layers import Convolution, MaxPooling, AveragePooling, Dropout, BatchNormalization, Dense
from cntk.models import Sequential, LayerStack
from cntk.io import MinibatchSource, ImageDeserializer, StreamDef, StreamDefs
from cntk.initializer import glorot_uniform, he_normal
from cntk import Trainer
from cntk.learner import momentum_sgd, learning_rate_schedule, momentum_as_time_constant_schedule
import cntk.ops as C
from cntk.ops import cross_entropy_with_softmax, classification_error, relu, input_variable, softmax, element_times
from cntk.utils import *


---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-4-0589cb5f166f> in <module>()
     10 from cntk.initializer import glorot_uniform, he_normal
     11 from cntk import Trainer
---> 12 from cntk.learner import momentum_sgd, learning_rate_schedule, momentum_as_time_constant_schedule
     13 import cntk.ops as C
     14 from cntk.ops import cross_entropy_with_softmax, classification_error, relu, input_variable, softmax, element_times

ImportError: cannot import name 'momentum_as_time_constant_schedule'

In [51]:
# model dimensions
image_height = 32
image_width  = 32
num_channels = 3
num_classes  = 10
def create_reader(map_file, mean_file, train):
    if not os.path.exists(map_file) or not os.path.exists(mean_file):
        raise RuntimeError("This tutorials depends 201A tutorials, please run 201A first.")

    # transformation pipeline for the features has jitter/crop only when training
    transforms = []
    if train:
        transforms += [
            ImageDeserializer.crop(crop_type='Random', ratio=0.8, jitter_type='uniRatio') # train uses jitter
        ]
    transforms += [
        ImageDeserializer.scale(width=image_width, height=image_height, channels=num_channels, interpolations='linear'),
        ImageDeserializer.mean(mean_file)
    ]
    # deserializer
    return MinibatchSource(ImageDeserializer(map_file, StreamDefs(
        features = StreamDef(field='image', transforms=transforms), # first column in map file is referred to as 'image'
        labels   = StreamDef(field='label', shape=num_classes)      # and second as 'label'
    )))

In [52]:
data_dir = '/home/xtalpi/git_test/test_data/examples/cifar-10/cntk/'
train_map = data_dir + 'train_map.txt'
test_map = data_dir + 'test_map.txt'
mean_xml = data_dir + 'CIFAR-10_mean.xml'
reader_train = create_reader(train_map, mean_xml, True)
reader_test = create_reader(test_map, mean_xml, False)

In [53]:
def create_model(input, out_dims):
    net = Convolution(filter_shape=(5, 5), num_filters=32, activation=C.relu, init=glorot_uniform(), pad=True)(input)
    net = MaxPooling(filter_shape=(3, 3), strides=(2, 2))(net)
    
    net = Convolution(filter_shape=(5, 5), num_filters=32, activation=C.relu, init=glorot_uniform(), pad=True)(net)
    net = MaxPooling(filter_shape=(3, 3), strides=(2, 2))(net)
    
    net = Convolution(filter_shape=(5, 5), num_filters=64, activation=C.relu, init=glorot_uniform(), pad=True)(net)
    net = MaxPooling(filter_shape=(3, 3), strides=(2, 2))(net)
    
    net = Dense(64, init=glorot_uniform())(net)
    net = Dense(out_dims, init=glorot_uniform(), activation=None)(net)
    
    return net

def train_and_evaluate(reader_train, reader_test, max_epochs, model_func):
    # Input variables denoting the features and label data
    input_var = input_variable((num_channels, image_height, image_width))
    label_var = input_variable((num_classes))
    feature_scale = 1.0 / 256.0
    input_var_norm = element_times(input_var, feature_scale)
    
    net = create_model(input_var_norm, num_classes)
    
    cross_entropy = C.cross_entropy_with_softmax(net, label_var)
    error = C.classification_error(net, label_var)
    
    epoch_size     = 50000
    minibatch_size = 64

    # Set training parameters
    lr_per_minibatch       = learning_rate_schedule([0.01]*10 + [0.003]*10 + [0.001], epoch_size)
    momentum_time_constant = momentum_as_time_constant_schedule(-minibatch_size/np.log(0.9))
    l2_reg_weight          = 0.001
    print (lr_per_minibatch)

In [55]:
#train_and_evaluate(reader_train, reader_test, 100, create_model)


---------------------------------------
NameErrorTraceback (most recent call last)
<ipython-input-55-535fb5123ce1> in <module>()
----> 1 train_and_evaluate(reader_train, reader_test, 100, create_model)

<ipython-input-53-139412a7bdbe> in train_and_evaluate(reader_train, reader_test, max_epochs, model_func)
     31     # Set training parameters
     32     lr_per_minibatch       = learning_rate_schedule([0.01]*10 + [0.003]*10 + [0.001], epoch_size)
---> 33     momentum_time_constant = momentum_as_time_constant_schedule(-minibatch_size/np.log(0.9))
     34     l2_reg_weight          = 0.001
     35     print (lr_per_minibatch)

NameError: name 'momentum_as_time_constant_schedule' is not defined

In [42]:
input_var = input_variable((3, 480, 640))
net = create_model(input_var, 10)
img_mat = np.asarray(np.random.uniform(size=(3, 480, 640)), dtype=np.float32)
y = net.eval({input_var: img_mat})
print (img_mat.shape)
print (y.shape)


(3, 480, 640)
(1, 1, 10)

In [43]:
input = C.input_variable(100, np.float32)
layer_1 = Dense(10, activation=C.relu)(input)
layer_2 = Dense(10, activation=None)(input)
layer_3 = Dense(10)(input)

In [46]:
x = np.random.uniform(size=(100)).astype(np.float32)
para = C.parameter(shape=(list(input.shape)+[10]), init=glorot_uniform())
print (para)
y_1 = layer_1.eval({input: x})
y_2 = layer_2.eval({input: x})
y_3 = layer_3.eval({input: x})
print (y_1)
print (y_2)
print (y_3)


<cntk.ops.variables.Parameter; proxy of <Swig Object of type 'CNTK::Parameter *' at 0x7f939594d180> >
[[[ 0.          0.          0.          0.61932898  0.          0.          0.
    0.          0.          0.43507668]]]
[[[ 0.24702844 -0.89035076  0.54310364 -0.3095201   0.06112466  0.29654005
   -0.91910577  0.61358446  0.05095169 -0.10280191]]]
[[[ 0.64680076  0.51406795 -0.47399288 -0.79891557 -0.04575209  0.33376914
    0.0175259  -0.3553077   0.52166581  0.33447912]]]

In [ ]: