---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-28-ce26135605fb> in <module>()
8
9 d_train_opt = tf.train.AdamOptimizer(learning_rate).minimize(d_loss, var_list=d_vars)
---> 10 g_train_opt = tf.train.AdamOptimizer(learning_rate).minimize(g_loss, var_list=g_vars)
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in minimize(self, loss, global_step, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, name, grad_loss)
323
324 return self.apply_gradients(grads_and_vars, global_step=global_step,
--> 325 name=name)
326
327 def compute_gradients(self, loss, var_list=None,
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in apply_gradients(self, grads_and_vars, global_step, name)
444 ([str(v) for _, _, v in converted_grads_and_vars],))
445 with ops.control_dependencies(None):
--> 446 self._create_slots([_get_variable_for(v) for v in var_list])
447 update_ops = []
448 with ops.name_scope(name, self._name) as name:
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/adam.py in _create_slots(self, var_list)
120 # Create slots for the first and second moments.
121 for v in var_list:
--> 122 self._zeros_slot(v, "m", self._name)
123 self._zeros_slot(v, "v", self._name)
124
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in _zeros_slot(self, var, slot_name, op_name)
764 named_slots = self._slot_dict(slot_name)
765 if _var_key(var) not in named_slots:
--> 766 named_slots[_var_key(var)] = slot_creator.create_zeros_slot(var, op_name)
767 return named_slots[_var_key(var)]
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py in create_zeros_slot(primary, name, dtype, colocate_with_primary)
172 return create_slot_with_initializer(
173 primary, initializer, slot_shape, dtype, name,
--> 174 colocate_with_primary=colocate_with_primary)
175 else:
176 val = array_ops.zeros(slot_shape, dtype=dtype)
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py in create_slot_with_initializer(primary, initializer, shape, dtype, name, colocate_with_primary)
144 with ops.colocate_with(primary):
145 return _create_slot_var(primary, initializer, "", validate_shape, shape,
--> 146 dtype)
147 else:
148 return _create_slot_var(primary, initializer, "", validate_shape, shape,
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/training/slot_creator.py in _create_slot_var(primary, val, scope, validate_shape, shape, dtype)
64 use_resource=_is_resource(primary),
65 shape=shape, dtype=dtype,
---> 66 validate_shape=validate_shape)
67 variable_scope.get_variable_scope().set_partitioner(current_partitioner)
68
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
1047 collections=collections, caching_device=caching_device,
1048 partitioner=partitioner, validate_shape=validate_shape,
-> 1049 use_resource=use_resource, custom_getter=custom_getter)
1050 get_variable_or_local_docstring = (
1051 """%s
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
946 collections=collections, caching_device=caching_device,
947 partitioner=partitioner, validate_shape=validate_shape,
--> 948 use_resource=use_resource, custom_getter=custom_getter)
949
950 def _get_partitioned_variable(self,
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
354 reuse=reuse, trainable=trainable, collections=collections,
355 caching_device=caching_device, partitioner=partitioner,
--> 356 validate_shape=validate_shape, use_resource=use_resource)
357
358 def _get_partitioned_variable(
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)
339 trainable=trainable, collections=collections,
340 caching_device=caching_device, validate_shape=validate_shape,
--> 341 use_resource=use_resource)
342
343 if custom_getter is not None:
~/anaconda3/envs/tf/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)
651 " Did you mean to set reuse=True in VarScope? "
652 "Originally defined at:\n\n%s" % (
--> 653 name, "".join(traceback.format_list(tb))))
654 found_var = self._vars[name]
655 if not shape.is_compatible_with(found_var.get_shape()):
ValueError: Variable generator/dense/kernel/Adam/ already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:
File "<ipython-input-9-ce26135605fb>", line 10, in <module>
g_train_opt = tf.train.AdamOptimizer(learning_rate).minimize(g_loss, var_list=g_vars)
File "/home/quoniammm/anaconda3/envs/tf/lib/python3.6/site-packages/IPython/core/interactiveshell.py", line 2862, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "/home/quoniammm/anaconda3/envs/tf/lib/python3.6/site-packages/IPython/core/interactiveshell.py", line 2802, in run_ast_nodes
if self.run_code(code, result):