In [1]:
from theano import tensor
x = tensor.matrix('features')

In [2]:
from blocks.bricks import Linear, Rectifier, Softmax

input_to_hidden = Linear(name='input_to_hidden', input_dim=784, output_dim=100)
h = Rectifier().apply(input_to_hidden.apply(x))

hidden_to_output = Linear(name='hidden_to_output', input_dim=100, output_dim=10)
y_hat = Softmax().apply(hidden_to_output.apply(h))

In [3]:
y = tensor.lmatrix('targets')
from blocks.bricks.cost import CategoricalCrossEntropy
cost = CategoricalCrossEntropy().apply(y.flatten(), y_hat)

In [4]:
from blocks.bricks import WEIGHT
from blocks.graph import ComputationGraph
from blocks.filter import VariableFilter

cg = ComputationGraph(cost)
W1, W2 = VariableFilter(roles=[WEIGHT])(cg.variables)
cost = cost + 0.005 * (W1 ** 2).sum() + 0.005 * (W2 ** 2).sum()
cost.name = 'cost_with_regularization'

In [5]:
from blocks.bricks import MLP
mlp = MLP(activations=[Rectifier(), Softmax()], dims=[784, 100, 10]).apply(x)

In [6]:
from blocks.initialization import IsotropicGaussian, Constant
input_to_hidden.weights_init = hidden_to_output.weights_init = IsotropicGaussian(0.01)
input_to_hidden.biases_init = hidden_to_output.biases_init = Constant(0)
input_to_hidden.initialize()
hidden_to_output.initialize()

In [7]:
from pprint import PrettyPrinter
pp = PrettyPrinter(depth=6)

In [8]:
pp.pprint(input_to_hidden.__dict__)


{'_bound_applications': {'Linear.apply': <blocks.bricks.base.BoundApplication object at 0x107b22590>},
 '_children': [],
 '_params': [W, b],
 '_rng': <mtrand.RandomState object at 0x107cac310>,
 '_seed': 1791095845,
 'allocated': True,
 'allocation_args': ['input_dim', 'output_dim'],
 'allocation_config_pushed': True,
 'auxiliary_variables': [W_norm, b_norm],
 'biases_init': <blocks.initialization.Constant object at 0x107c79e50>,
 'initialization_args': [],
 'initialization_config_pushed': True,
 'initialized': True,
 'input_dim': 784,
 'name': 'input_to_hidden',
 'output_dim': 100,
 'parents': [],
 'updates': OrderedDict(),
 'use_bias': True,
 'weights_init': <blocks.initialization.IsotropicGaussian object at 0x107c798d0>}

In [9]:
pp.pprint(input_to_hidden.weights_init.__dict__)
pp.pprint(input_to_hidden.biases_init.__dict__)
pp.pprint(hidden_to_output.weights_init.__dict__)
pp.pprint(hidden_to_output.biases_init.__dict__)


{'_mean': 0, '_std': 0.01}
{'_constant': array(0)}
{'_mean': 0, '_std': 0.01}
{'_constant': array(0)}

In [10]:
W1.get_value()


Out[10]:
array([[ 0.00764556, -0.01124291, -0.00137316, ...,  0.00715281,
        -0.00843766,  0.00726915],
       [-0.00300742, -0.00752019,  0.00470734, ...,  0.01553072,
         0.00510337, -0.00332438],
       [-0.00816994, -0.00480912, -0.00021066, ..., -0.01991693,
        -0.00769432, -0.00765804],
       ..., 
       [ 0.00511479,  0.00822329, -0.02027499, ..., -0.01535202,
        -0.00611927,  0.00228889],
       [ 0.00611224,  0.01116154,  0.00436344, ...,  0.00152842,
         0.00196134,  0.00373074],
       [ 0.01813583,  0.00565259,  0.00071371, ...,  0.00566098,
        -0.01152798,  0.01192834]])

In [12]:
from fuel.datasets import MNIST
mnist = MNIST("train")

In [15]:
from fuel.streams import DataStream
from fuel.schemes import SequentialScheme
from fuel.transformers import Flatten
data_stream = Flatten(DataStream.default_stream(
    mnist,
    iteration_scheme=SequentialScheme(mnist.num_examples, batch_size=256)))

In [16]:
from blocks.algorithms import GradientDescent, Scale
algorithm = GradientDescent(cost=cost, params=cg.parameters,
                            step_rule=Scale(learning_rate=0.1))

In [17]:
mnist_test = MNIST("test")
data_stream_test = Flatten(DataStream.default_stream(
    mnist_test,
    iteration_scheme=SequentialScheme(
        mnist_test.num_examples, batch_size=1024)))

In [18]:
from blocks.extensions.monitoring import DataStreamMonitoring
monitor = DataStreamMonitoring(
    variables=[cost], data_stream=data_stream_test, prefix="test")

In [19]:
from blocks.main_loop import MainLoop
from blocks.extensions import FinishAfter, Printing
main_loop = MainLoop(data_stream=data_stream, algorithm=algorithm,
                     extensions=[monitor, FinishAfter(after_n_epochs=2), Printing()])
main_loop.run()


-------------------------------------------------------------------------------
BEFORE FIRST EPOCH
-------------------------------------------------------------------------------
Training status:
	 batch_interrupt_received: False
	 epoch_interrupt_received: False
	 epoch_started: True
	 epochs_done: 0
	 iterations_done: 0
	 received_first_batch: False
	 training_started: True
Log records from the iteration 0:
	 test_cost_with_regularization: 2.34190669218


-------------------------------------------------------------------------------
AFTER ANOTHER EPOCH
-------------------------------------------------------------------------------
Training status:
	 batch_interrupt_received: False
	 epoch_interrupt_received: False
	 epoch_started: False
	 epochs_done: 1
	 iterations_done: 235
	 received_first_batch: True
	 training_started: True
Log records from the iteration 235:
	 test_cost_with_regularization: 0.657896590087


-------------------------------------------------------------------------------
AFTER ANOTHER EPOCH
-------------------------------------------------------------------------------
Training status:
	 batch_interrupt_received: False
	 epoch_interrupt_received: False
	 epoch_started: False
	 epochs_done: 2
	 iterations_done: 470
	 received_first_batch: True
	 training_started: True
Log records from the iteration 470:
	 test_cost_with_regularization: 0.572149991875
	 training_finish_requested: True


-------------------------------------------------------------------------------
TRAINING HAS BEEN FINISHED:
-------------------------------------------------------------------------------
Training status:
	 batch_interrupt_received: False
	 epoch_interrupt_received: False
	 epoch_started: False
	 epochs_done: 2
	 iterations_done: 470
	 received_first_batch: True
	 training_started: True
Log records from the iteration 470:
	 test_cost_with_regularization: 0.572149991875
	 training_finish_requested: True
	 training_finished: True


In [23]:
import theano
from theano import tensor
from blocks.bricks import Tanh
x = tensor.vector('x')
y = Tanh().apply(x)
print y
print isinstance(y, theano.Variable)


tanh_apply_output
True

In [24]:
from blocks.bricks import Linear
from blocks.initialization import IsotropicGaussian, Constant
linear = Linear(input_dim=10, output_dim=5,
                weights_init=IsotropicGaussian(),
                biases_init=Constant(0.01))
y = linear.apply(x)

In [26]:
print linear.params
print linear.params[1].get_value()


[W, b]
[ nan  nan  nan  nan  nan]

In [28]:
print linear.initialize()
print linear.params[1].get_value()


None
[ 0.01  0.01  0.01  0.01  0.01]

In [29]:
z = tensor.max(y + 4)

In [32]:
linear2 = Linear(output_dim=10)
print linear2.input_dim


NoneAllocation

In [33]:
linear2.input_dim = linear.output_dim
linear2.apply(x)


Out[33]:
linear_apply_output

In [35]:
linear3 = Linear(input_dim=10, output_dim=5)
print linear3.params


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-35-76520455977a> in <module>()
      1 linear3 = Linear(input_dim=10, output_dim=5)
----> 2 print linear3.params

/Users/dikien/anaconda/lib/python2.7/site-packages/blocks-0.0.1-py2.7.egg/blocks/bricks/__init__.pyc in __getattr__(self, name)
    176             message += (" (which is a part of 'Feedforward' interface it"
    177                         " claims to support)")
--> 178         raise AttributeError(message)
    179 
    180 

AttributeError: 'Linear' object does not have an attribute 'params'

In [36]:
print linear3.allocate()
print linear3.params


None
[W, b]

In [37]:
from blocks.bricks import MLP, Logistic
mlp = MLP(activations=[Logistic(name='sigmoid_0'),
          Logistic(name='sigmoid_1')], dims=[16, 8, 4],
          weights_init=IsotropicGaussian(), biases_init=Constant(0.01))

In [38]:
[child.name for child in mlp.children]


Out[38]:
['linear_0', 'sigmoid_0', 'linear_1', 'sigmoid_1']

In [39]:
y = mlp.apply(x)
print mlp.children[0].input_dim


16

In [41]:
mlp.initialize()
mlp.children[0].params[0].get_value()


Out[41]:
array([[-1.35058998, -0.15197637,  0.53502738, -0.47966629,  0.92966459,
        -1.22113506,  0.96468899,  0.73510008],
       [-0.45670381,  0.19266354, -0.07672695,  0.28511651, -0.06442363,
        -1.41937145, -0.04914698,  2.33591523],
       [ 1.16478325, -0.34623004,  0.5345563 , -1.71769844, -0.42011699,
         0.47864268,  0.73424183,  2.0425785 ],
       [-0.0125837 ,  0.83327666, -0.94332879, -0.72426034, -1.1723934 ,
         0.3113583 ,  1.59868435,  0.3198274 ],
       [-1.15886087, -0.44906055, -3.79311256,  0.9768675 , -1.27585145,
        -0.92906794,  0.53590255,  1.24693625],
       [-1.06661713,  0.61910394, -1.07465622, -1.03195817, -1.11904791,
        -0.04534472, -0.1913915 ,  0.10530464],
       [ 0.14946873, -0.51227525,  0.02485354,  0.4446007 ,  0.07820676,
         1.12570614,  0.18953891, -1.11370218],
       [ 0.12264441,  0.42791051,  0.18152155,  2.65083586, -0.56622578,
         1.90305533,  1.01967496, -0.12577045],
       [ 0.84206732, -1.38335095, -1.11673754, -0.69418529, -0.6821722 ,
        -0.41538969,  1.07936735, -0.16090952],
       [ 0.94302499,  0.08017168,  1.13333723, -0.22460097,  0.27759994,
        -0.74203216, -0.9304304 , -0.11058776],
       [ 0.69545019, -1.40431261, -0.52496591, -0.69136644, -1.00345628,
         1.32085419, -0.23084864,  0.61464397],
       [ 0.99973141,  1.02931605,  2.26670998, -0.85315681, -0.47203316,
        -0.43730791,  0.01376218, -0.37159365],
       [ 1.94926501,  0.33525743, -0.16447811, -0.88453705, -1.24470596,
        -0.35060332, -0.89134472,  0.03600519],
       [ 0.04438801, -0.68683602, -0.04441493,  0.29935948,  1.17864468,
         2.36255129,  0.4550282 , -0.78141993],
       [-1.49097226, -1.07247194,  1.57032937,  1.61931369, -0.04771239,
        -0.35347446,  1.31771297,  0.24390814],
       [-1.23620203,  0.37014498, -0.68229067, -1.53530658,  1.13191786,
        -1.75742619, -0.37807416, -0.13312945]])

In [45]:
mlp = MLP(activations=[Logistic(name='sigmoid_0'),
          Logistic(name='sigmoid_1')], dims=[16, 8, 4],
          weights_init=IsotropicGaussian(), biases_init=Constant(0.01))
y = mlp.apply(x)
mlp.push_initialization_config()
mlp.children[0].weights_init = Constant(0.01)

In [46]:
mlp.initialize()
print mlp.children[0].params[0].get_value()


[[ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]
 [ 0.01  0.01  0.01  0.01  0.01  0.01  0.01  0.01]]

In [47]:
import theano
a = theano.shared(3.)
a.name = 'a'
x = theano.tensor.scalar('data')
cost = abs(x ** 2 - x ** a)
cost.name = 'cost'

In [48]:
import numpy
from fuel.streams import DataStream
from fuel.datasets import IterableDataset
data_stream = DataStream(IterableDataset(
    numpy.random.rand(150).astype(theano.config.floatX)))

In [49]:
from blocks.main_loop import MainLoop
from blocks.algorithms import GradientDescent, Scale
from blocks.extensions import FinishAfter
from blocks.extensions.monitoring import TrainingDataMonitoring
from blocks.extras.extensions.plot import Plot  
main_loop = MainLoop(
    model=None, data_stream=data_stream,
    algorithm=GradientDescent(cost=cost,
                              params=[a],
                              step_rule=Scale(learning_rate=0.1)),
    extensions=[FinishAfter(after_n_epochs=1),
                TrainingDataMonitoring([cost, a], after_batch=True),
                Plot('Plotting example', channels=[['cost'], ['a']],
                     after_batch=True)])  
main_loop.run()


---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-49-c9da6271c58c> in <module>()
      3 from blocks.extensions import FinishAfter
      4 from blocks.extensions.monitoring import TrainingDataMonitoring
----> 5 from blocks.extras.extensions.plot import Plot
      6 main_loop = MainLoop(
      7     model=None, data_stream=data_stream,

ImportError: No module named extras.extensions.plot

In [ ]: