@srkreddy1238 when i used tvm/tutorials/nnvm/from_tensorflow.py . I find that the tvm inference time is too long.
tvm inference time: 15.335524s
and tf inference time : 3.372391s
Does I have something wrong? Does anyone can give me some advise? Thanks in advance.
diff:
+import time
from tvm.contrib import graph_runtime
dtype = ‘uint8’
+
+start = time.clock()
+
m = graph_runtime.create(graph, lib, ctx)
m.set_input(‘DecodeJpeg/contents’, tvm.nd.array(x.astype(dtype)))
m.set_input(**params)
m.run()
+
+end = time.clock()
+print “execute time:”
+print (str(end - start))
tvm_output = m.get_output(0, tvm.nd.empty(((1, 1008)), ‘float32’))
@@ -194,6 +217,7 @@ def run_inference_on_image(image):
+start = time.clock()
if not tf.gfile.Exists(image):
tf.logging.fatal(‘File does not exist %s’, image)
image_data = tf.gfile.FastGFile(image, ‘rb’).read()
@@ -219,5 +243,8 @@ def run_inference_on_image(image):
human_string = node_lookup.id_to_string(node_id)
score = predictions[node_id]
print(’%s (score = %.5f)’ % (human_string, score))
+end = time.clock()
+print “tf execute time:”
+print (str(end - start))