RNN code returns ValueError

by: piyushgoyal443, 7 years ago

Last edited: 7 years ago


def rnn_model(x):
    layer = {"weights" : tf.Variable(tf.random_normal([rnn_size, n_classes])),
             "biases" : tf.Variable(tf.random_normal([n_classes]))}
    
    x = tf.transpose(x, [1,0,2])
    x = tf.reshape(x, [-1, chunk_size])
    x = tf.split(x, n_chunk, 0)
    
    lstm_cell = rnn.BasicLSTMCell(rnn_size)
    
    outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32)
    
    output = tf.add(tf.matmul(outputs[-1], layer["weights"]), layer["biases"])
    
    return output


I have changed the code for tensorflow 1.1.0 but i get this error


ValueError                                Traceback (most recent call last)
<ipython-input-26-a4e029ad920c> in <module>()
     24         print('Accuracy:',accuracy.eval({x:mnist.test.images.reshape((-1, n_chunks, chunk_size)), y:mnist.test.labels}))
     25
---> 26 train_neural_network(x)

<ipython-input-26-a4e029ad920c> in train_neural_network(x)
      1 def train_neural_network(x):
----> 2     prediction = rnn_model(x)
      3     cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits = prediction, labels = y))
      4     optimizer = tf.train.AdamOptimizer().minimize(cost)
      5

<ipython-input-24-13e8709a80fd> in rnn_model(x)
      9     lstm_cell = rnn.BasicLSTMCell(rnn_size)
     10
---> 11     outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32)
     12
     13     output = tf.add(tf.matmul(outputs[-1], layer["weights"]), layer["biases"])

/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn.py in static_rnn(cell, inputs, initial_state, dtype, sequence_length, scope)
    195             state_size=cell.state_size)
    196       else:
--> 197         (output, state) = call_cell()
    198
    199       outputs.append(output)

/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn.py in <lambda>()
    182       if time > 0: varscope.reuse_variables()
    183       # pylint: disable=cell-var-from-loop
--> 184       call_cell = lambda: cell(input_, state)
    185       # pylint: enable=cell-var-from-loop
    186       if sequence_length is not None:

/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py in __call__(self, inputs, state, scope)
    233   def __call__(self, inputs, state, scope=None):
    234     """Long short-term memory cell (LSTM)."""
--> 235     with _checked_scope(self, scope or "basic_lstm_cell", reuse=self._reuse):
    236       # Parameters of gates are concatenated into one multiply for efficiency.
    237       if self._state_is_tuple:

/home/piyush/anaconda3/lib/python3.6/contextlib.py in __enter__(self)
     80     def __enter__(self):
     81         try:
---> 82             return next(self.gen)
     83         except StopIteration:
     84             raise RuntimeError("generator didn't yield") from None

/home/piyush/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py in _checked_scope(cell, scope, reuse, **kwargs)
     91             "To share the weights of an RNNCell, simply "
     92             "reuse it in your second calculation, or create a new one with "
---> 93             "the argument reuse=True." % (scope_name, type(cell).__name__))
     94
     95     # Everything is OK.  Update the cell's scope and yield it.

ValueError: Attempt to have a second RNNCell use the weights of a variable scope that already has weights: 'rnn/basic_lstm_cell'; and the cell was not constructed as BasicLSTMCell(..., reuse=True).  To share the weights of an RNNCell, simply reuse it in your second calculation, or create a new one with the argument reuse=True.




You must be logged in to post. Please login or register an account.