2017-05-18 28 views
0

當我使用tf.nn.dynamic_rnn構造LSTM時,關於name_scope的問題導致其失敗,平臺爲WINDOWS 10.我無法解決它,任何幫助將不勝感激。ValueError:變量A3C_net/basic_lstm_cell /權重不存在或未使用tf.get_variable()創建

非常感謝!

整個犯錯是如下:

回溯(最近通話最後一個):

File "a3c_prediction.py", line 157, in <module> 
    main() 
    File "a3c_prediction.py", line 88, in main 
    global_network = ACLSTMNetwork(ACTION_SIZE, device) 
    File "C:\Users\xjZhan\Desktop\a3c_predication\a3c_net_lstm.py", line 139, in __init__ 
    scope = scope) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\rnn.py", line 553, in dynamic_rnn 
    dtype=dtype) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\rnn.py", line 720, in _dynamic_rnn_loop 
    swap_memory=swap_memory) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2623, in while_loop 
    result = context.BuildLoop(cond, body, loop_vars, shape_invariants) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2456, in BuildLoop 
    pred, body, original_loop_vars, loop_vars, shape_invariants) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2406, in _BuildLoop 
    body_result = body(*packed_vars_for_body) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\rnn.py", line 703, in _time_step 
    skip_conditionals=True) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\rnn.py", line 177, in _rnn_step 
    new_output, new_state = call_cell() 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\rnn.py", line 691, in <lambda> 
    call_cell = lambda: cell(input_t, state) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\contrib\rnn\python\ops\core_rnn_cell_impl.py", line 241, in __call__ 
    concat = _linear([inputs, h], 4 * self._num_units, True) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\contrib\rnn\python\ops\core_rnn_cell_impl.py", line 1044, in _linear 
    _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 1049, in get_variable 
    use_resource=use_resource, custom_getter=custom_getter) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 948, in get_variable 
    use_resource=use_resource, custom_getter=custom_getter) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 356, in get_variable 
    validate_shape=validate_shape, use_resource=use_resource) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 341, in _true_getter 
    use_resource=use_resource) 
    File "C:\Program Install\Anaconda3-4.1.0\lib\site-packages\tensorflow\python\ops\variable_scope.py", line 671, in _get_single_variable 
    "VarScope?" % name) 

ValueError異常:可變A3C_net/basic_lstm_cell /權重不存在,或者不是用TF創建。 get_variable()。你是否想在VarScope中設置重用=無?


scope_name = "A3C_net" 
with tf.device(self._device),tf.variable_scope(scope_name) as scope: 
    self.W_conv1, self.b_conv1 = self._conv_variable([8, 8, 4, 16]) 
    self.W_conv2, self.b_conv2 = self._conv_variable([4, 4, 16, 32]) 

    self.W_fc1, self.b_fc1 = self._fc_variable([2592, 256]) 
    # lstm 
    self.lstm = tf.contrib.rnn.BasicLSTMCell(256, state_is_tuple=True, reuse=True)# 
    # weight for policy output layer 
    self.W_fc2, self.b_fc2 = self._fc_variable([256, action_size]) 
    # weight for value output layer 
    self.W_fc3, self.b_fc3 = self._fc_variable([256, 1]) 

    self.s = tf.placeholder("float", [None, 84, 84, 4]) 

    h_conv1 = tf.nn.relu(self._conv2d(self.s, self.W_conv1, 4) + self.b_conv1) 
    h_conv2 = tf.nn.relu(self._conv2d(h_conv1, self.W_conv2, 2) + self.b_conv2) 
    h_conv2_flat = tf.reshape(h_conv2, [-1, 2592]) 
    h_fc1 = tf.nn.relu(tf.matmul(h_conv2_flat, self.W_fc1) + self.b_fc1) 
    h_fc1_reshaped = tf.reshape(h_fc1, [1,-1,256]) # batches steps inputs 

    self.step_size = tf.placeholder(tf.float32, [1]) 
    self.initial_lstm_state0 = tf.placeholder(tf.float32, [1, 256]) 
    self.initial_lstm_state1 = tf.placeholder(tf.float32, [1, 256]) 
    self.initial_lstm_state = tf.contrib.rnn.LSTMStateTuple(
             self.initial_lstm_state0, 
             self.initial_lstm_state1) 
    lstm_outputs, self.lstm_state = tf.nn.dynamic_rnn(self.lstm, 
    h_fc1_reshaped,initial_state = self.initial_lstm_state,dtype= tf.float32, 
            sequence_length = self.step_size, 
            time_major = False, 
            scope = scope) 

    lstm_outputs = tf.reshape(lstm_outputs, [-1,256]) 

    # policy (output) 
    self.pi = tf.nn.softmax(tf.matmul(lstm_outputs, self.W_fc2) + self.b_fc2) 
    # value (output) 
    v_ = tf.matmul(lstm_outputs, self.W_fc3) + self.b_fc3 
    self.v = tf.reshape(v_, [-1]) 

    scope.reuse_variables() 
    self.W_lstm = tf.get_variabl("basic_lstm_cell/weights")     
    self.b_lstm = tf.get_variable("basic_lstm_cell/biases") 
    self.reset_state() 

回答

0

解決它非常樣品和如下:與tf.Variabl

替換tf.get_variabl
相關問題