2017-08-24 143 views
0

我仍然試圖掌握如何從磁盤恢復已保存的張量流圖,並通過字典反饋給模型。我看過multiple sources,但無法解決此問題。下面的通用MLP代碼(第一段代碼)將文件保存到磁盤,但是在恢復(第二段代碼)後,我的準確性返回值爲。任何想法這可能是什麼原因?恢復Tensorflow模型,但結果不正確

# Import MINST data 
from tensorflow.examples.tutorials.mnist import input_data 
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 
import tensorflow as tf 

# Parameters 
learning_rate = 0.001 
training_epochs = 15 
batch_size = 100 
display_step = 1 

# Network Parameters 
n_hidden_1 = 256 # 1st layer number of features 
n_hidden_2 = 256 # 2nd layer number of features 
n_input = 784 # MNIST data input (img shape: 28*28) 
n_classes = 10 # MNIST total classes (0-9 digits) 

with tf.name_scope('placeholders'): 
# tf Graph input 
    x = tf.placeholder("float", [None, n_input],name='x') 
    y = tf.placeholder("float", [None, n_classes],name='y') 

with tf.name_scope('Layer-1'): 
    NN_weights_1=tf.Variable(tf.random_normal([n_input, n_hidden_1],seed=1),name='NN_weights_1') 
    NN_biases_1=tf.Variable(tf.constant(0.0,shape=[n_hidden_1],name='Const'),name='NN_biases_1') 
    func=tf.add(tf.matmul(x, NN_weights_1,name='matmul'), NN_biases_1,name='Addition') 
    func_2=tf.nn.relu(func) 

with tf.name_scope('Layer-2'): 
    NN_weights_2=tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2],seed=2),name='NN_weights_2') 
    NN_biases_2=tf.Variable(tf.constant(0.0,shape=[n_hidden_2],name='Const'),name='NN_biases_2') 
    func_3=tf.add(tf.matmul(func_2, NN_weights_2,name='matmul'), NN_biases_2,name='Addition') 
    func_4=tf.nn.relu(func_3) 

with tf.name_scope('Output'): 
    NN_weights_3=tf.Variable(tf.random_normal([n_hidden_2, n_classes],seed=3),name='NN_weights_3') 
    NN_biases_3=tf.Variable(tf.constant(0.0,shape=[n_classes],name='Const'),name='NN_biases_3') 
    func_3=tf.add(tf.matmul(func_4, NN_weights_3,name='matmul'), NN_biases_3,name='Addition') 
    func_4=tf.nn.sigmoid(func_3) 

    # Define loss and optimizer 
with tf.name_scope('Operations_'): 
    cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=func_4, labels=y),name='cost') 
    optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost) 
    # Test model 
    correct_prediction = tf.equal(tf.argmax(func_4, 1), tf.argmax(y, 1),name='correct_prediction') 
    # Calculate accuracy 
    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"),name='accuracy') 
    # Initializing the variables 
    init = tf.global_variables_initializer() 

# Launch the graph 
with tf.Session() as sess: 
    sess.run(init) 
    saver = tf.train.Saver() 

    # Training cycle 
    for epoch in range(training_epochs): 
     avg_cost = 0. 
     total_batch = int(mnist.train.num_examples/batch_size) 
     # Loop over all batches 
     for i in range(total_batch): 
      batch_x, batch_y = mnist.train.next_batch(batch_size) 
      # Run optimization op (backprop) and cost op (to get loss value) 
      _, c = sess.run([optimizer, cost], feed_dict={x: batch_x, 
                  y: batch_y}) 
      # Compute average loss 
      avg_cost += c/total_batch 
     # Display logs per epoch step 
     if epoch % display_step == 0: 
      print (("Epoch:", '%04d' % (epoch+1), "cost="), \ 
       "{:.9f}".format(avg_cost)) 
    print ("Optimization Finished!") 
    print ("Accuracy:", accuracy.eval({x: mnist.test.images, y: mnist.test.labels})) 
    saver.save(sess, 'my_test_model',global_step=1000) 

恢復模型及傳遞字典精度:

import tensorflow as tf 

sess=tf.Session()  
#First let's load meta graph and restore weights 
saver = tf.train.import_meta_graph('my_test_model-1000.meta') 
saver.restore(sess,"my_test_model-1000") 
graph = tf.get_default_graph() 
accuracy=graph.get_operation_by_name("Operations_/accuracy") 
# Access saved Variables directly 
print(sess.run('Layer-1/NN_weights_1:0')) 
# This will print 2, which is the value of bias that we saved 

print ("Accuracy:", sess.run([accuracy],feed_dict={'placeholders/x:0': mnist.test.images, 'placeholders/y:0': mnist.test.labels})) 

回答

0

更改爲:

accuracy=graph.get_operation_by_name("Operations_/accuracy").outputs[0] 

Tensorflow丟棄通過Session.run的裝置執行的操作的對象的輸出。看到這裏的詳細解釋:TensorFlow: eval restored graph

+0

真正感謝您的答案,並參考呢! – mamafoku

相關問題