logits=tf.constant ([[1.0,2.0,3.0],[1.0,2.0,3.0],[1.0,2.0,3.0]])
y=tf.nn.softmax (logits)
y_=tf.constant ([[0.0,0.0,1.0],[1.0,0.0,0.0],[1.0,0.0,0.0]])
cross_entropy=-tf.reduce_sum (y_*tf.log (tf.clip_by_value (y,1e-10,1.0)))
cross_entropy2=tf.reduce_sum (tf.nn.softmax_cross_entropy_with_logits (labels=y_,logits=logits))
with tf.Session () as sess:
softmax=sess.run (y)
ce=sess.run (cross_entropy)
ce2=sess.run (cross_entropy2)
print ("cross_entropy result",ce)
print ("softmax_cross_entropy_with_logist result=",ce2)
print ("softmax result=", softmax) 为什么最后一行不能打印出来
WARNING: Logging before flag parsing goes to stderr.
W0524 17:57:02.851681 3600 deprecation.py:323] From D:/untitled/demp.py:156: softmax_cross_entropy_with_logits (from tensorflow.python.ops.nn_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Future major versions of TensorFlow will allow gradients to flow
into the labels input on backprop by default.
See `tf.nn.softmax_cross_entropy_with_logits_v2`.
2019-05-24 17:57:02.879936: I tensorflow/core/platform/cpu_feature_guard.cc:142] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2
cross_entropy result 5.2228174
softmax_cross_entropy_with_logist result= 5.2228174
mwf 2019-5-24 17:58:15