/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn.py in <lambda>() 182 if time > 0: varscope.reuse_variables() 183 # pylint: disable=cell-var-from-loop --> 184 call_cell = lambda: cell(input_, state) 185 # pylint: enable=cell-var-from-loop 186 if sequence_length is not None:
/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py in __call__(self, inputs, state, scope) 233 def __call__(self, inputs, state, scope=None): 234 """Long short-term memory cell (LSTM).""" --> 235 with _checked_scope(self, scope or "basic_lstm_cell", reuse=self._reuse): 236 # Parameters of gates are concatenated into one multiply for efficiency. 237 if self._state_is_tuple:
/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py in _checked_scope(cell, scope, reuse, **kwargs) 91 "To share the weights of an RNNCell, simply " 92 "reuse it in your second calculation, or create a new one with " ---> 93 "the argument reuse=True." % (scope_name, type(cell).__name__)) 94 95 # Everything is OK. Update the cell's scope and yield it.
ValueError: Attempt to have a second RNNCell use the weights of a variable scope that already has weights: 'rnn/basic_lstm_cell'; and the cell was not constructed as BasicLSTMCell(..., reuse=True). To share the weights of an RNNCell, simply reuse it in your second calculation, or create a new one with the argument reuse=True.
You must be logged in to post. Please login or register an account.