---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-6-6694a78c7bfd> in <module>()
----> 1 pred = dynamicRNN(x, seqlen, weights, biases)
2
3 # Define loss and optimizer
4 cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))
5 optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
<ipython-input-4-f223e7cdfbf9> in dynamicRNN(x, seqlen, weights, biases)
16 # calculation.
17 outputs, states = tf.contrib.rnn.static_rnn(lstm_cell, x, dtype=tf.float32,
---> 18 sequence_length=seqlen)
19
20 # When performing dynamic calculation, we must retrieve the last
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn.py in static_rnn(cell, inputs, initial_state, dtype, sequence_length, scope)
193 state=state,
194 call_cell=call_cell,
--> 195 state_size=cell.state_size)
196 else:
197 (output, state) = call_cell()
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/rnn.py in _rnn_step(time, sequence_length, min_sequence_length, max_sequence_length, zero_output, state, call_cell, state_size, skip_conditionals)
186 time >= max_sequence_length, empty_update,
187 # otherwise calculation is required: copy some or all of it through
--> 188 _maybe_copy_some_through)
189
190 if len(final_output_and_state) != len(flat_zero_output) + len(flat_state):
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/control_flow_ops.py in cond(pred, fn1, fn2, name)
1743 context_f = CondContext(pred, pivot_2, branch=0)
1744 context_f.Enter()
-> 1745 _, res_f = context_f.BuildCondBranch(fn2)
1746 context_f.ExitResult(res_f)
1747 context_f.Exit()
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/control_flow_ops.py in BuildCondBranch(self, fn)
1637 def BuildCondBranch(self, fn):
1638 """Add the subgraph defined by fn() to the graph."""
-> 1639 r = fn()
1640 original_r = r
1641 result = []
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/rnn.py in _maybe_copy_some_through()
156 def _maybe_copy_some_through():
157 """Run RNN step. Pass through either no or some past state."""
--> 158 new_output, new_state = call_cell()
159
160 nest.assert_same_structure(state, new_state)
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn.py in <lambda>()
182 if time > 0: varscope.reuse_variables()
183 # pylint: disable=cell-var-from-loop
--> 184 call_cell = lambda: cell(input_, state)
185 # pylint: enable=cell-var-from-loop
186 if sequence_length is not None:
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py in __call__(self, inputs, state, scope)
177 else:
178 c, h = array_ops.split(value=state, num_or_size_splits=2, axis=1)
--> 179 concat = _linear([inputs, h], 4 * self._num_units, True, scope=scope)
180
181 # i = input_gate, j = new_input, f = forget_gate, o = output_gate
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py in _linear(args, output_size, bias, bias_start, scope)
745 with vs.variable_scope(scope) as outer_scope:
746 weights = vs.get_variable(
--> 747 "weights", [total_arg_size, output_size], dtype=dtype)
748 if len(args) == 1:
749 res = math_ops.matmul(args[0], weights)
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
986 collections=collections, caching_device=caching_device,
987 partitioner=partitioner, validate_shape=validate_shape,
--> 988 custom_getter=custom_getter)
989 get_variable_or_local_docstring = (
990 """%s
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
888 collections=collections, caching_device=caching_device,
889 partitioner=partitioner, validate_shape=validate_shape,
--> 890 custom_getter=custom_getter)
891
892 def _get_partitioned_variable(self,
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, custom_getter)
346 reuse=reuse, trainable=trainable, collections=collections,
347 caching_device=caching_device, partitioner=partitioner,
--> 348 validate_shape=validate_shape)
349
350 def _get_partitioned_variable(
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape)
331 initializer=initializer, regularizer=regularizer, reuse=reuse,
332 trainable=trainable, collections=collections,
--> 333 caching_device=caching_device, validate_shape=validate_shape)
334
335 if custom_getter is not None:
/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/python/ops/variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape)
637 " Did you mean to set reuse=True in VarScope? "
638 "Originally defined at:\n\n%s" % (
--> 639 name, "".join(traceback.format_list(tb))))
640 found_var = self._vars[name]
641 if not shape.is_compatible_with(found_var.get_shape()):
ValueError: Variable rnn/basic_lstm_cell/weights already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:
File "/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py", line 747, in _linear
"weights", [total_arg_size, output_size], dtype=dtype)
File "/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py", line 179, in __call__
concat = _linear([inputs, h], 4 * self._num_units, True, scope=scope)
File "/home/ram/tensorflowGPU3/lib/python3.5/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn.py", line 184, in <lambda>
call_cell = lambda: cell(input_, state)