0
import tensorflow as tf
tf_X = tf.placeholder("float",[None,3073])
tf_Y = tf.placeholder("float",[None,10])
tf_W = tf.Variable(0.001*tf.random_normal([3073,10]))
#tf.random_uniform([3073,10],-0.1,0.1)#
tf_learning_rate = 0.0001
hypothesis = tf.nn.softmax(tf.matmul(tf_X,tf_W)) #out put is softmax value for each class
cost = tf.reduce_mean(tf.reduce_sum(tf_Y*-tf.log(hypothesis), reduction_indices=1))
optimizer = tf.train.GradientDescentOptimizer(tf_learning_rate).minimize(cost)
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
print sess.run(cost, feed_dict = {tf_X:X_dev,tf_Y:onehot_y_dev})
print sess.run
for step in xrange(400):
sess.run(optimizer, feed_dict = {tf_X:X_dev,tf_Y:onehot_y_dev}) # we have to make one hot coding for y
if step % 200 ==0:
print step,sess.run(cost, feed_dict={tf_X:X_dev,tf_Y:onehot_y_dev})
我試圖在tensorflow中實現softmax-cross entropy。 當我sess.run(成本)它返回一個數字(2.322) ,但是當我運行GradientDescentOptimizer,返回的成本是南... 這裏發生了什麼?我是否錯誤地實現了優化器功能?Tensorflow Gradient Optimizer返回Nan,而交叉熵成本返回一個數