In [1]:
import random
import karma
import pandas as pd
import numpy as np
reload(karma)
Out[1]:
<module 'karma' from '/Users/bayerj/devel/karma/karma.pyc'>
In [2]:
import climin.initialize
import climin.stops
gnumpy: failed to import cudamat. Using npmat instead. No GPU will be used.
In [3]:
class MushroomEnvironment(karma.ContextBanditEnvironment):
def __init__(self, filename):
self.filename = filename
self._init_data()
super(MushroomEnvironment, self).__init__(self.X.shape[1], 2)
def _init_data(self):
df = pd.read_csv(self.filename, header=None,
names=['class'] + [str(i) for i in range(22)]
)
df = pd.get_dummies(df)
self.X = np.asarray(df.ix[:, '0_b':'21_w'])
self.Z = np.asarray(df.ix[:, 'class_p']).reshape((-1, 1))
In [4]:
env = MushroomEnvironment(filename='data/mushroom/agaricus-lepiota.data')
In [5]:
from breze.learn import mlp
In [6]:
m = mlp.FastDropoutNetwork(env.n_state + env.n_action, [100], 1, ['rectifier'], 'identity', 'squared')
In [7]:
import itertools
class BrezeEpsilonGreedyContextBanditAgent(karma.Agent):
def __init__(self, model, epsilon, n_state, n_action):
self.model = model
self.epsilon = epsilon
self.histories = []
super(BrezeEpsilonGreedyContextBanditAgent, self).__init__(n_state, n_action)
def reset(self):
pass
def action(self, state, reward):
if np.random.random() > self.epsilon:
action_idx = np.random.randint(0, self.n_action)
else:
many_states = np.repeat(state[np.newaxis], self.n_action, 0)
many_actions = np.eye(self.n_action)
state_actions = np.concatenate([many_states, many_actions], 1)
est_rewards = self.model.predict(state_actions)
action_idx = est_rewards.argmax(0)
res = np.zeros(self.n_action)
res[action_idx] = 1
return res.astype('float32')
def _make_train_data(self):
n_samples = sum(len(i) for i in self.histories)
X = np.empty((n_samples, self.n_state + self.n_action))
Z = np.empty((n_samples, 1))
c = itertools.count(0)
i = c.next()
for history in self.histories:
for state, action, reward, _ in history:
X[i, :self.n_state] = state
X[i, self.n_state:] = action
Z[i] = reward
i = c.next()
return X.astype('float32'), Z.astype('float32')
def train(self):
X, Z = self._make_train_data()
pause = climin.stops.ModuloNIterations(100)
stop = climin.stops.Any([
climin.stops.Patience('val_loss', 1000, 1.1, 0.01),
climin.stops.TimeElapsed(30)
])
for i, info in enumerate(self.model.powerfit((X, Z), (X, Z), stop=stop, report=pause)):
print '%(n_iter)i %(loss)g %(val_loss)g' % info
In [8]:
agent = BrezeEpsilonGreedyContextBanditAgent(m, .9, env.n_state, env.n_action)
In [9]:
climin.initialize.randomize_normal(m.parameters.data, 0, .01)
In [ ]:
for i in range(25):
history = []
for state, action, reward, state_, regret in karma.rollout(env, agent, 100):
history.append((state, action, reward, state_))
agent.histories.append(history)
rewards = [reward for state, action, reward, _, in agent.histories[-1]]
print np.mean(rewards)
agent.train()
ERROR (theano.gof.opt): SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR:theano.gof.opt:SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR (theano.gof.opt): Traceback:
ERROR:theano.gof.opt:Traceback:
ERROR (theano.gof.opt): Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5731, in local_fuse
ret = local_fuse(n)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 301, in _init_exprs
inpt = T.matrix('inpt')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 0.00000000e+000 0.00000000e+000 2.20264652e-314 2.20264655e-314
2.19871775e-314 2.20267441e-314 2.20267443e-314 2.20266141e-314
2.20266145e-314 2.20266547e-314 2.12593807e-314 2.20266551e-314
2.12593807e-314 2.12593807e-314 2.20264657e-314 2.20265175e-314
2.20266554e-314 2.20267446e-314 2.20266558e-314 2.20266148e-314
2.20264659e-314 2.20267280e-314 2.20266151e-314 2.20267449e-314
2.20267061e-314 2.12593807e-314 2.20264662e-314 2.20267452e-314
2.20266358e-314 2.12593807e-314 2.12593807e-314 2.20266154e-314
2.20266851e-314 2.20267065e-314 2.20267454e-314 2.20266561e-314
2.20267069e-314 2.20266157e-314 2.20264664e-314 2.20266160e-314
2.12593807e-314 2.12593807e-314 2.20266164e-314 2.20265180e-314
2.20267073e-314 2.20266372e-314 2.20266167e-314 2.20266565e-314
2.20267285e-314 2.20267077e-314 2.20267081e-314 2.20267085e-314
2.20266568e-314 2.20267457e-314 2.20267088e-314 2.12593807e-314
2.20266376e-314 2.20267460e-314 2.12593807e-314 2.12593807e-314
2.20267291e-314 2.20267296e-314 2.20267463e-314 2.20266753e-314
2.20266759e-314 2.20266170e-314 2.12593807e-314 2.20266856e-314
2.20266173e-314 2.12593807e-314 2.20267092e-314 2.20266572e-314
2.12593807e-314 2.20267096e-314 2.20267100e-314 2.20266176e-314
2.12593807e-314 2.20266179e-314 2.20267104e-314 2.20266380e-314
2.20266385e-314 2.20266183e-314 2.12593807e-314 2.20266765e-314
2.20267108e-314 2.20267112e-314 2.12593807e-314 2.20267466e-314
2.12593807e-314 2.12593807e-314 2.20266575e-314 2.20266771e-314
2.20267468e-314 2.20266579e-314 2.20266186e-314 2.20266389e-314
2.20266189e-314 2.20266393e-314 2.20266192e-314 2.20266398e-314
2.20266195e-314 2.20265185e-314 2.20266198e-314 2.20265190e-314
2.20264666e-314 2.20266402e-314 2.12593807e-314 2.20267471e-314
2.12593807e-314 2.12593807e-314 2.20266406e-314 2.20266777e-314
2.20265194e-314 2.12593807e-314 2.20267302e-314 2.20267307e-314
2.20264669e-314 2.20266202e-314 2.20267474e-314]
[ 2.20266583e-314 2.20266205e-314 2.20266586e-314 2.20266208e-314
2.20266590e-314 2.20267477e-314 2.20267116e-314 2.20266593e-314
2.20267120e-314 2.12593807e-314 2.20265199e-314 2.12593807e-314
2.12593807e-314 2.20267124e-314 2.20266862e-314 2.12593807e-314
2.20267313e-314 2.20267319e-314 2.20264671e-314 2.20267128e-314
2.20267324e-314 2.20267132e-314 2.12593807e-314 2.20266211e-314
2.20267479e-314 2.12593807e-314 2.12593807e-314 2.20266411e-314
2.20266783e-314 2.20264673e-314 2.12593807e-314 2.20267330e-314
2.20267335e-314 2.20264676e-314 2.20266597e-314 2.20266867e-314
2.20267136e-314 2.12593807e-314 2.12593807e-314 2.20267140e-314
2.20266600e-314 2.20266872e-314 2.20266604e-314 2.20267482e-314
2.12593807e-314 2.12593807e-314 2.20267144e-314 2.20266607e-314
2.20266877e-314 2.20266611e-314 2.20267485e-314 2.12593807e-314
2.12593807e-314 2.20266214e-314 2.20267341e-314 2.20266615e-314
2.20266217e-314 2.20266415e-314 2.20267843e-314 2.20266618e-314
2.20265204e-314 2.20267488e-314 2.20268045e-314 2.20268050e-314
2.20268054e-314 2.20267490e-314 2.20267493e-314 2.12593807e-314
2.12593807e-314 2.20267148e-314 2.20266788e-314 2.20267846e-314
2.20267152e-314 2.20266419e-314 2.20264678e-314 2.20267496e-314
2.20267499e-314 2.12593807e-314 2.12593807e-314 2.20267346e-314
2.20266794e-314 2.20267849e-314 2.12593807e-314 2.20266800e-314
2.20266882e-314 2.20264681e-314 2.20266887e-314 2.20266892e-314
2.12593807e-314 2.20267502e-314 2.20267852e-314 2.20268059e-314
2.20268247e-314 2.20267855e-314 2.12593807e-314 2.12593807e-314
2.20268252e-314 2.20266806e-314 2.20266622e-314 2.12593807e-314
2.20268256e-314 2.20264683e-314 2.20267504e-314 2.20267507e-314
2.20268450e-314 2.20267858e-314 2.20267510e-314 2.20267513e-314
2.12593807e-314 2.12593807e-314 2.20267862e-314 2.20266812e-314
2.20267515e-314 2.20267865e-314 2.20267518e-314 2.20267868e-314
2.20267521e-314 2.20267871e-314 2.20267156e-314]]
ERROR:theano.gof.opt:Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5731, in local_fuse
ret = local_fuse(n)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 301, in _init_exprs
inpt = T.matrix('inpt')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 0.00000000e+000 0.00000000e+000 2.20264652e-314 2.20264655e-314
2.19871775e-314 2.20267441e-314 2.20267443e-314 2.20266141e-314
2.20266145e-314 2.20266547e-314 2.12593807e-314 2.20266551e-314
2.12593807e-314 2.12593807e-314 2.20264657e-314 2.20265175e-314
2.20266554e-314 2.20267446e-314 2.20266558e-314 2.20266148e-314
2.20264659e-314 2.20267280e-314 2.20266151e-314 2.20267449e-314
2.20267061e-314 2.12593807e-314 2.20264662e-314 2.20267452e-314
2.20266358e-314 2.12593807e-314 2.12593807e-314 2.20266154e-314
2.20266851e-314 2.20267065e-314 2.20267454e-314 2.20266561e-314
2.20267069e-314 2.20266157e-314 2.20264664e-314 2.20266160e-314
2.12593807e-314 2.12593807e-314 2.20266164e-314 2.20265180e-314
2.20267073e-314 2.20266372e-314 2.20266167e-314 2.20266565e-314
2.20267285e-314 2.20267077e-314 2.20267081e-314 2.20267085e-314
2.20266568e-314 2.20267457e-314 2.20267088e-314 2.12593807e-314
2.20266376e-314 2.20267460e-314 2.12593807e-314 2.12593807e-314
2.20267291e-314 2.20267296e-314 2.20267463e-314 2.20266753e-314
2.20266759e-314 2.20266170e-314 2.12593807e-314 2.20266856e-314
2.20266173e-314 2.12593807e-314 2.20267092e-314 2.20266572e-314
2.12593807e-314 2.20267096e-314 2.20267100e-314 2.20266176e-314
2.12593807e-314 2.20266179e-314 2.20267104e-314 2.20266380e-314
2.20266385e-314 2.20266183e-314 2.12593807e-314 2.20266765e-314
2.20267108e-314 2.20267112e-314 2.12593807e-314 2.20267466e-314
2.12593807e-314 2.12593807e-314 2.20266575e-314 2.20266771e-314
2.20267468e-314 2.20266579e-314 2.20266186e-314 2.20266389e-314
2.20266189e-314 2.20266393e-314 2.20266192e-314 2.20266398e-314
2.20266195e-314 2.20265185e-314 2.20266198e-314 2.20265190e-314
2.20264666e-314 2.20266402e-314 2.12593807e-314 2.20267471e-314
2.12593807e-314 2.12593807e-314 2.20266406e-314 2.20266777e-314
2.20265194e-314 2.12593807e-314 2.20267302e-314 2.20267307e-314
2.20264669e-314 2.20266202e-314 2.20267474e-314]
[ 2.20266583e-314 2.20266205e-314 2.20266586e-314 2.20266208e-314
2.20266590e-314 2.20267477e-314 2.20267116e-314 2.20266593e-314
2.20267120e-314 2.12593807e-314 2.20265199e-314 2.12593807e-314
2.12593807e-314 2.20267124e-314 2.20266862e-314 2.12593807e-314
2.20267313e-314 2.20267319e-314 2.20264671e-314 2.20267128e-314
2.20267324e-314 2.20267132e-314 2.12593807e-314 2.20266211e-314
2.20267479e-314 2.12593807e-314 2.12593807e-314 2.20266411e-314
2.20266783e-314 2.20264673e-314 2.12593807e-314 2.20267330e-314
2.20267335e-314 2.20264676e-314 2.20266597e-314 2.20266867e-314
2.20267136e-314 2.12593807e-314 2.12593807e-314 2.20267140e-314
2.20266600e-314 2.20266872e-314 2.20266604e-314 2.20267482e-314
2.12593807e-314 2.12593807e-314 2.20267144e-314 2.20266607e-314
2.20266877e-314 2.20266611e-314 2.20267485e-314 2.12593807e-314
2.12593807e-314 2.20266214e-314 2.20267341e-314 2.20266615e-314
2.20266217e-314 2.20266415e-314 2.20267843e-314 2.20266618e-314
2.20265204e-314 2.20267488e-314 2.20268045e-314 2.20268050e-314
2.20268054e-314 2.20267490e-314 2.20267493e-314 2.12593807e-314
2.12593807e-314 2.20267148e-314 2.20266788e-314 2.20267846e-314
2.20267152e-314 2.20266419e-314 2.20264678e-314 2.20267496e-314
2.20267499e-314 2.12593807e-314 2.12593807e-314 2.20267346e-314
2.20266794e-314 2.20267849e-314 2.12593807e-314 2.20266800e-314
2.20266882e-314 2.20264681e-314 2.20266887e-314 2.20266892e-314
2.12593807e-314 2.20267502e-314 2.20267852e-314 2.20268059e-314
2.20268247e-314 2.20267855e-314 2.12593807e-314 2.12593807e-314
2.20268252e-314 2.20266806e-314 2.20266622e-314 2.12593807e-314
2.20268256e-314 2.20264683e-314 2.20267504e-314 2.20267507e-314
2.20268450e-314 2.20267858e-314 2.20267510e-314 2.20267513e-314
2.12593807e-314 2.12593807e-314 2.20267862e-314 2.20266812e-314
2.20267515e-314 2.20267865e-314 2.20267518e-314 2.20267868e-314
2.20267521e-314 2.20267871e-314 2.20267156e-314]]
ERROR (theano.gof.opt): SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR:theano.gof.opt:SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR (theano.gof.opt): Traceback:
ERROR:theano.gof.opt:Traceback:
ERROR (theano.gof.opt): Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 302, in _init_exprs
target = T.matrix('target')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 2.12199579e-314]
[ 6.36598737e-314]]
ERROR:theano.gof.opt:Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 302, in _init_exprs
target = T.matrix('target')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 2.12199579e-314]
[ 6.36598737e-314]]
ERROR (theano.gof.opt): SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR:theano.gof.opt:SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR (theano.gof.opt): Traceback:
ERROR:theano.gof.opt:Traceback:
ERROR (theano.gof.opt): Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 302, in _init_exprs
target = T.matrix('target')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 2.12199579e-314]
[ 6.36598737e-314]]
ERROR:theano.gof.opt:Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 302, in _init_exprs
target = T.matrix('target')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 2.12199579e-314]
[ 6.36598737e-314]]
/Users/bayerj/devel/climin/climin/util.py:150: UserWarning: Argument named f is not expected by <class 'climin.adam.Adam'>
% (i, klass))
ERROR (theano.gof.opt): SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR:theano.gof.opt:SeqOptimizer apply <theano.tensor.opt.FusionOptimizer object at 0x108d6de10>
ERROR (theano.gof.opt): Traceback:
ERROR:theano.gof.opt:Traceback:
ERROR (theano.gof.opt): Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 302, in _init_exprs
target = T.matrix('target')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 2.12199579e-314]
[ 6.36598737e-314]]
ERROR:theano.gof.opt:Traceback (most recent call last):
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 193, in apply
sub_prof = optimizer.optimize(fgraph)
File "/Users/bayerj/devel/Theano/theano/gof/opt.py", line 78, in optimize
ret = self.apply(fgraph, *args, **kwargs)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5784, in apply
new_outputs = self.optimizer(node)
File "/Users/bayerj/devel/Theano/theano/tensor/opt.py", line 5620, in local_fuse
tv = gof.op.get_test_value(ii)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 829, in get_test_value
return PureOp._get_test_value(v_var)
File "/Users/bayerj/devel/Theano/theano/gof/op.py", line 455, in _get_test_value
ret = v.type.filter(v.tag.test_value)
File "/Users/bayerj/devel/Theano/theano/tensor/type.py", line 130, in filter
raise TypeError(err_msg, data)
TypeError: For compute_test_value, one input test value does not have the requested type.
Backtrace when that variable is created:
File "/Users/bayerj/devel/breze/breze/learn/mlp.py", line 302, in _init_exprs
target = T.matrix('target')
The error when converting the test value to that variable type:
TensorType(float32, matrix) cannot store a value of dtype float64 without risking loss of precision. If you do not mind this loss, you can: 1) explicitly cast your data to float32, or 2) set "allow_input_downcast=True" when calling "function".
[[ 2.12199579e-314]
[ 6.36598737e-314]]
-7.0
100 293.275 293.275
200 269.001 269.001
300 236.184 236.184
400 204.875 204.875
500 180.162 180.162
600 162.344 162.344
700 149.04 149.04
800 138.525 138.525
900 130.014 130.014
1000 122.866 122.866
1100 116.77 116.77
1200 111.577 111.577
1300 107.177 107.177
1400 103.469 103.469
1500 100.351 100.351
1600 97.7313 97.7313
1700 95.5261 95.5261
1800 93.6632 93.6632
1900 92.0798 92.0798
2000 90.7223 90.7223
2100 89.5466 89.5466
2200 88.5186 88.5186
2300 87.6086 87.6086
2400 86.7902 86.7902
2500 86.0397 86.0397
2600 85.3355 85.3355
2700 84.6655 84.6655
2800 84.0168 84.0168
2900 83.3772 83.3772
3000 82.7406 82.7406
3100 82.1013 82.1013
3200 81.456 81.456
3300 80.8093 80.8093
3400 80.1519 80.1519
3500 79.4814 79.4814
3600 78.7971 78.7971
3700 78.0892 78.0892
3800 77.34 77.34
3900 76.5531 76.5531
4000 75.7581 75.7581
4100 74.9405 74.9405
4200 74.0758 74.0758
4300 73.1767 73.1767
4400 72.2859 72.2859
4500 71.391 71.391
4600 70.4782 70.4782
4700 69.5449 69.5449
4800 68.5803 68.5803
4900 67.5682 67.5682
5000 66.5169 66.5169
5100 65.4263 65.4263
5200 64.3307 64.3307
5300 63.2395 63.2395
5400 62.142 62.142
5500 61.0298 61.0298
5600 59.8934 59.8934
5700 58.6939 58.6939
5800 57.4449 57.4449
5900 56.1992 56.1992
6000 54.9577 54.9577
6100 53.6704 53.6704
6200 52.3266 52.3266
6300 50.9374 50.9374
6400 49.5084 49.5084
6500 48.0516 48.0516
6600 46.5329 46.5329
6700 44.9478 44.9478
6800 43.3049 43.3049
6900 41.6519 41.6519
7000 40.0183 40.0183
7100 38.4387 38.4387
7200 36.9143 36.9143
7300 35.4286 35.4286
7400 33.9568 33.9568
7500 32.4916 32.4916
7600 31.0929 31.0929
7700 29.7641 29.7641
7800 28.4975 28.4975
7900 27.2858 27.2858
8000 26.1198 26.1198
8100 24.9916 24.9916
8200 23.9085 23.9085
8300 22.8695 22.8695
8400 21.8631 21.8631
8500 20.876 20.876
8600 19.9117 19.9117
8700 18.9703 18.9703
8800 18.0497 18.0497
8900 17.1585 17.1585
9000 16.2986 16.2986
9100 15.447 15.447
9200 14.5839 14.5839
9300 13.7283 13.7283
9400 12.9096 12.9096
9500 12.1086 12.1086
9600 11.3044 11.3044
9700 10.5257 10.5257
9800 9.77082 9.77082
9900 9.03577 9.03577
10000 8.30975 8.30975
10100 7.62329 7.62329
10200 6.98059 6.98059
10300 6.38182 6.38182
10400 5.81171 5.81171
10500 5.27185 5.27185
10600 4.77646 4.77646
10700 4.32058 4.32058
10800 3.89786 3.89786
10900 3.49863 3.49863
11000 3.12463 3.12463
11100 2.77009 2.77009
11200 2.4334 2.4334
11300 2.13245 2.13245
11400 1.86497 1.86497
11500 1.62402 1.62402
11600 1.40561 1.40561
11700 1.20964 1.20964
11800 1.0361 1.0361
11900 0.882043 0.882043
12000 0.745535 0.745535
12100 0.625473 0.625473
12200 0.521063 0.521063
12300 0.430534 0.430534
12400 0.353021 0.353021
12500 0.2866 0.2866
-1.15
100 252.474 252.474
200 207.886 207.886
300 175.815 175.815
400 151.815 151.815
500 133.338 133.338
600 118.79 118.79
700 107.111 107.111
800 97.5604 97.5604
900 89.6133 89.6133
1000 82.8912 82.8912
1100 77.1185 77.1185
1200 72.0918 72.0918
1300 67.6576 67.6576
1400 63.6992 63.6992
1500 60.1274 60.1274
1600 56.8748 56.8748
1700 53.891 53.891
1800 51.1385 51.1385
1900 48.5877 48.5877
2000 46.2133 46.2133
2100 43.9926 43.9926
2200 41.9057 41.9057
2300 39.9364 39.9364
2400 38.0735 38.0735
2500 36.3099 36.3099
2600 34.6422 34.6422
2700 33.0672 33.0672
2800 31.5806 31.5806
2900 30.177 30.177
3000 28.85 28.85
3100 27.5931 27.5931
3200 26.4004 26.4004
3300 25.2658 25.2658
3400 24.184 24.184
3500 23.1502 23.1502
3600 22.1598 22.1598
3700 21.209 21.209
3800 20.2944 20.2944
3900 19.4131 19.4131
4000 18.5625 18.5625
4100 17.7406 17.7406
4200 16.9457 16.9457
4300 16.1762 16.1762
4400 15.4313 15.4313
4500 14.7101 14.7101
4600 14.012 14.012
4700 13.3364 13.3364
4800 12.6827 12.6827
4900 12.0504 12.0504
5000 11.4391 11.4391
5100 10.8483 10.8483
5200 10.2776 10.2776
5300 9.7269 9.7269
5400 9.19587 9.19587
5500 8.68436 8.68436
5600 8.19209 8.19209
5700 7.71869 7.71869
5800 7.26376 7.26376
5900 6.82688 6.82688
6000 6.40767 6.40767
6100 6.0058 6.0058
6200 5.62102 5.62102
6300 5.25309 5.25309
6400 4.90182 4.90182
6500 4.56705 4.56705
6600 4.24861 4.24861
6700 3.94633 3.94633
6800 3.66002 3.66002
6900 3.38943 3.38943
7000 3.13431 3.13431
7100 2.89436 2.89436
7200 2.66923 2.66923
-0.55
100 53.7035 53.7035
200 45.6676 45.6676
300 40.2369 40.2369
400 36.1321 36.1321
500 32.8562 32.8562
600 30.1534 30.1534
700 27.8709 27.8709
800 25.9143 25.9143
900 24.2148 24.2148
1000 22.7167 22.7167
1100 21.3773 21.3773
1200 20.1655 20.1655
1300 19.0581 19.0581
1400 18.038 18.038
1500 17.092 17.092
1600 16.2103 16.2103
1700 15.3847 15.3847
1800 14.6088 14.6088
1900 13.8771 13.8771
2000 13.1848 13.1848
2100 12.528 12.528
2200 11.9033 11.9033
2300 11.3076 11.3076
2400 10.7387 10.7387
2500 10.1945 10.1945
2600 9.67326 9.67326
2700 9.17337 9.17337
2800 8.69347 8.69347
2900 8.23238 8.23238
3000 7.78918 7.78918
3100 7.36332 7.36332
3200 6.95458 6.95458
3300 6.56298 6.56298
3400 6.18862 6.18862
3500 5.8315 5.8315
3600 5.49137 5.49137
3700 5.1678 5.1678
3800 4.86019 4.86019
3900 4.56786 4.56786
4000 4.29012 4.29012
4100 4.02622 4.02622
4200 3.77545 3.77545
4300 3.53708 3.53708
4400 3.31043 3.31043
4500 3.09492 3.09492
4600 2.89004 2.89004
4700 2.69524 2.69524
4800 2.51008 2.51008
4900 2.33419 2.33419
5000 2.16724 2.16724
5100 2.00898 2.00898
5200 1.8592 1.8592
5300 1.71755 1.71755
5400 1.5839 1.5839
2.05
100 22.7646 22.7646
200 19.1087 19.1087
300 16.6103 16.6103
400 14.7461 14.7461
500 13.2742 13.2742
600 12.0688 12.0688
700 11.0576 11.0576
800 10.1952 10.1952
In [201]:
100 10.1517 10.1517
200 8.69 8.69
300 7.77533 7.77533
400 7.10081 7.10081
500 6.56822 6.56822
600 6.13114 6.13114
700 5.7619 5.7619
800 5.44252 5.44252
900 5.16092 5.16092
1000 4.90885 4.90885
1100 4.68053 4.68053
1200 4.47195 4.47195
1300 4.27994 4.27994
1400 4.10213 4.10213
1500 3.93673 3.93673
1600 3.78234 3.78234
1700 3.63735 3.63735
1800 3.50125 3.50125
1900 3.37352 3.37352
2000 3.25272 3.25272
2100 3.1386 3.1386
2200 3.03064 3.03064
2300 2.92856 2.92856
2400 2.83168 2.83168
2500 2.74016 2.74016
2600 2.65315 2.65315
2700 2.57093 2.57093
2800 2.49277 2.49277
2900 2.41899 2.41899
3000 2.34884 2.34884
3100 2.28248 2.28248
3200 2.21947 2.21947
3300 2.15999 2.15999
3400 2.10344 2.10344
3500 2.04936 2.04936
3600 1.99824 1.99824
3700 1.94993 1.94993
In [202]:
for h in agent.histories:
rewards = [reward for state, action, reward, _, in h]
print np.mean(rewards)
-6.75
-0.6
0.45
-0.05
1.65
Content source: bayerj/karma
Similar notebooks: