---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
S:\Anaconda3\lib\site-packages\theano\compile\function_module.py in __call__(self, *args, **kwargs)
883 outputs =\
--> 884 self.fn() if output_subset is None else\
885 self.fn(output_subset=output_subset)
S:\Anaconda3\lib\site-packages\theano\gof\op.py in rval(p, i, o, n)
871 def rval(p=p, i=node_input_storage, o=node_output_storage, n=node):
--> 872 r = p(n, [x[0] for x in i], o)
873 for o in node.outputs:
S:\Anaconda3\lib\site-packages\theano\tensor\subtensor.py in perform(self, node, inputs, out_)
2243 elif config.cxx:
-> 2244 inplace_increment(out[0], tuple(inputs[2:]), inputs[1])
2245 else:
IndexError: index 20908 is out of bounds for axis 1 with size 17485
During handling of the above exception, another exception occurred:
IndexError Traceback (most recent call last)
<ipython-input-16-a71f0b7ead28> in <module>()
1 clf = SimpleRNNClassifier(4)
----> 2 clf.fit(X_t, Y_t, show_fig=True)
S:\git\tacticsiege\tactictoolkit\ttk\sandbox\udemy\SimpleRNNClassifier.py in fit(self, X, Y, learning_rate, mu, reg, activation, epochs, show_fig)
93 print ('X[j]:', X[j], 'Y[j]:', Y[j])
94
---> 95 c, p, rout = self.train_op(X[j], Y[j])
96 print ('c:', c)
97 cost += c
S:\Anaconda3\lib\site-packages\theano\compile\function_module.py in __call__(self, *args, **kwargs)
896 node=self.fn.nodes[self.fn.position_of_error],
897 thunk=thunk,
--> 898 storage_map=getattr(self.fn, 'storage_map', None))
899 else:
900 # old-style linkers raise their own exceptions
S:\Anaconda3\lib\site-packages\theano\gof\link.py in raise_with_op(node, thunk, exc_info, storage_map)
323 # extra long error message in that case.
324 pass
--> 325 reraise(exc_type, exc_value, exc_trace)
326
327
S:\Anaconda3\lib\site-packages\six.py in reraise(tp, value, tb)
683 value = tp()
684 if value.__traceback__ is not tb:
--> 685 raise value.with_traceback(tb)
686 raise value
687
S:\Anaconda3\lib\site-packages\theano\compile\function_module.py in __call__(self, *args, **kwargs)
882 try:
883 outputs =\
--> 884 self.fn() if output_subset is None else\
885 self.fn(output_subset=output_subset)
886 except Exception:
S:\Anaconda3\lib\site-packages\theano\gof\op.py in rval(p, i, o, n)
870 # default arguments are stored in the closure of `rval`
871 def rval(p=p, i=node_input_storage, o=node_output_storage, n=node):
--> 872 r = p(n, [x[0] for x in i], o)
873 for o in node.outputs:
874 compute_map[o][0] = True
S:\Anaconda3\lib\site-packages\theano\tensor\subtensor.py in perform(self, node, inputs, out_)
2242 out[0][inputs[2:]] = inputs[1]
2243 elif config.cxx:
-> 2244 inplace_increment(out[0], tuple(inputs[2:]), inputs[1])
2245 else:
2246 raise NotImplementedError(
IndexError: index 20908 is out of bounds for axis 1 with size 17485
Apply node that caused the error: AdvancedIncSubtensor{inplace=False, set_instead_of_inc=False}(Alloc.0, HostFromGpu(gpuarray).0, ARange{dtype='int64'}.0, Y)
Toposort index: 165
Inputs types: [TensorType(float64, matrix), TensorType(float64, vector), TensorType(int64, vector), TensorType(int32, vector)]
Inputs shapes: [(32, 17485), (32,), (32,), (32,)]
Inputs strides: [(139880, 8), (8,), (8,), (4,)]
Inputs values: ['not shown', 'not shown', 'not shown', 'not shown']
Outputs clients: [[GpuFromHost<None>(AdvancedIncSubtensor{inplace=False, set_instead_of_inc=False}.0)]]
Backtrace when the node is created(use Theano flag traceback.limit=N to make it longer):
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 1272, in access_grad_cache
term = access_term_cache(node)[idx]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 967, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 967, in <listcomp>
output_grads = [access_grad_cache(var) for var in node.outputs]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 1272, in access_grad_cache
term = access_term_cache(node)[idx]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 967, in access_term_cache
output_grads = [access_grad_cache(var) for var in node.outputs]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 967, in <listcomp>
output_grads = [access_grad_cache(var) for var in node.outputs]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 1272, in access_grad_cache
term = access_term_cache(node)[idx]
File "S:\Anaconda3\lib\site-packages\theano\gradient.py", line 1108, in access_term_cache
new_output_grads)
HINT: Use the Theano flag 'exception_verbosity=high' for a debugprint and storage map footprint of this apply node.