I created a simple tensorflow model:
import tensorflow as tf
tf.reset_default_graph()
x_data = [1,2,3]
y_data = [3,4,5]
X = tf.placeholder(tf.float32, name="X")
Y = tf.placeholder(tf.float32, name="Y")
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='W')
b = tf.Variable(tf.random_uniform([1], 0.0, 2.0), name='b')
hypothesis = tf.add(b, tf.multiply(X,W), name="op_restore")
saver = tf.train.Saver()
cost = tf.reduce_mean(tf.square(hypothesis - Y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train_op = optimizer.minimize(cost)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
tf.train.write_graph(sess.graph_def, '.', 'tfandroid.pbtxt')
for step in range(100):
_, cost_val = sess.run([train_op, cost], feed_dict={X:x_data, Y:y_data})
print((step, cost_val, sess.run(W), sess.run(b)))
saver.save(sess, './tfandroid.ckpt')
print("\n == Test ==")
print("X: 5, Y: ", sess.run(hypothesis, feed_dict={X:5}))
print("X: 2.5, Y: ", sess.run(hypothesis, feed_dict={X:2.5}))
However, the result is not the one that I expect. I expected '6'
but I got 'None'
.
Below is the re-use code, please can you tell me what's wrong with this code?
import tensorflow as tf
tf.reset_default_graph()
with tf.Session() as sess:
saver = tf.train.import_meta_graph('tfandroid.ckpt.meta')
saver.restore(sess, tf.train.latest_checkpoint('./'))
graph = tf.get_default_graph()
W = graph.get_tensor_by_name("W:0")
b = graph.get_tensor_by_name("b:0")
X = graph.get_tensor_by_name("X:0")
print('sess.run(W) = ', sess.run(W))
print('sess.run(b) = ', sess.run(b))
feed_dict = {X: 4.0}
hypothesis = graph.get_operation_by_name("op_restore")
print(hypothesis)
print(sess.run(hypothesis, feed_dict))