I have a trained model which I want to use to predict some data and serve it using flask + uwsgi. Everything works fine but when I hit the server sometimes it throws an error:
File "api.py", line 150, in get_data
im = sess.run(image_tf)
`ValueError: Fetch argument <tf.Tensor 'DecodeJpeg_2:0' shape=(?, ?, 3) dtype=uint8> cannot be interpreted as a Tensor.
(Tensor Tensor("DecodeJpeg_2:0", shape=(?, ?, 3), dtype=uint8)
is not an element of this graph.)`
Few requests work correctly but fe throws the above error. I figured out it has something to do with the tensorflow session but I'm not able to figure out how can I use it in flask efficiently. Below is the snippet:
config = my_config_pb2.MyConfig()
with tf.gfile.FastGFile('myconfig.pbtxt', 'r') as f:
text_format.Merge(f.read(), config)
sess_config = tf.ConfigProto(
log_device_placement=False,
allow_soft_placement = True
)
sess = tf.Session(config=sess_config)
init_op = tf.global_variables_initializer()
sess.run(init_op)
def load_model():
# Loading model that will be used.
tf.saved_model.loader.load(sess,
[tf.saved_model.tag_constants.SERVING],config.model_path)
graph = tf.get_default_graph()
input_image = graph.get_tensor_by_name('input_image:0')
....
....
params = some parameters
return params
def get_data(im,ftype,params):
try:
img = tf.read_file(im)
image_tf = tf.image.decode_jpeg(img, channels=3)
except Exception as e:
print ("## Got Exception while reading: ", e)
return None
im = sess.run(image_tf)
pred = sess.run(....)
return pred
params = load_model(sess)
@app.route("/data", methods=['GET', 'POST'])
def data():
if request.method == 'GET':
url = request.args.get('path')
print ("##","recieved")
if not path:
abort(400)
pred = get_data(image,"url",params)
return pred
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=8011,use_reloader=False)
Finally I run the app using:
`uwsgi --http 0.0.0.0:8011 --wsgi-file api.py --master --processes 4 --threads 2 --callable app --lazy --lazy-apps`
Can anyone show me what I'm doing wrong ?