i convert the torch model mobilenetv3-small to onnx, then onnx to tvm ,it gives some errors, @tqchen can you give me some help? thanks:
root@tvm_demo:/workspace//onnx2tvm# python3 from_onnx.py
WARNING:root:Attribute momentum is ignored in relay.sym.batch_norm
WARNING:root:Attribute momentum is ignored in relay.sym.batch_norm
WARNING:root:Attribute momentum is ignored in relay.sym.batch_norm
WARNING:root:Infering Reshape argument by precompute
Traceback (most recent call last):
File "from_onnx_native.py", line 36, in <module>
sym, params = relay.frontend.from_onnx(onnx_model, shape_dict)
File "/workspace/tvm_new/tvm/python/tvm/relay/frontend/onnx.py", line 1258, in from_onnx
sym, params = g.from_onnx(graph, opset)
File "/workspace/tvm_new/tvm/python/tvm/relay/frontend/onnx.py", line 1086, in from_onnx
op = self._convert_operator(op_name, inputs, attr, opset)
File "/workspace/tvm_new/tvm/python/tvm/relay/frontend/onnx.py", line 1192, in _convert_operator
sym = convert_map[op_name](inputs, attrs, self._params)
File "/workspace/tvm_new/tvm/python/tvm/relay/frontend/onnx.py", line 417, in _impl_v1
graph, lib, params = tvm.relay.build(func, target="llvm", params=params)
File "/workspace/tvm_new/tvm/python/tvm/relay/build_module.py", line 356, in build
params)
File "/workspace/tvm_new/tvm/python/tvm/relay/build_module.py", line 183, in build
self._build(func, target, target_host)
File "/workspace/tvm_new/tvm/python/tvm/_ffi/_ctypes/function.py", line 209, in __call__
raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
[bt] (8) /workspace/tvm_new/tvm/build/libtvm.so(+0x51398a) [0x7fb67baec98a]
[bt] (7) /workspace/tvm_new/tvm/build/libtvm.so(+0x5d2cdb) [0x7fb67bbabcdb]
[bt] (6) /workspace/tvm_new/tvm/build/libtvm.so(+0x5ec55c) [0x7fb67bbc555c]
[bt] (5) /workspace/tvm_new/tvm/build/libtvm.so(tvm::compute(tvm::Array<HalideIR::Expr, void>, std::function<HalideIR::Expr (tvm::Array<tvm::Var, void> const&)>, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::Map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::NodeRef, void, void>)+0x4fe) [0x7fb67ba5918e]
[bt] (4) /workspace/tvm_new/tvm/build/libtvm.so(+0x5ebeb0) [0x7fb67bbc4eb0]
[bt] (3) /workspace/tvm_new/tvm/build/libtvm.so(+0x5ebcf0) [0x7fb67bbc4cf0]
[bt] (2) /workspace/tvm_new/tvm/build/libtvm.so(tvm::Tensor::operator()(tvm::Array<HalideIR::Expr, void>) const+0x8ee) [0x7fb67b934ebe]
[bt] (1) /workspace/tvm_new/tvm/build/libtvm.so(HalideIR::Internal::Call::make(HalideIR::Type, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::Array<HalideIR::Expr, void>, HalideIR::Internal::Call::CallType, HalideIR::IR::FunctionRef, int)+0x48b) [0x7fb67beddaeb]
[bt] (0) /workspace/tvm_new/tvm/build/libtvm.so(+0x1beee2) [0x7fb67b797ee2]
File "/workspace/tvm_new/tvm/3rdparty/HalideIR/src/ir/IR.cpp", line 468
TVMError: Check failed: args[i].type() == Int(32): Args to call to halide function must be type Int(32)
the script file: from_onnx.py
import onnx
import numpy as np
import tvm
import tvm.relay as relay
from tvm.contrib.download import download_testdata
from PIL import Image
def preprocess_image(image_file):
resized_image = Image.open(image_file).resize((224, 224))
image_data = np.asarray(resized_image).astype("float32")
# convert HWC to CHW
image_data = image_data.transpose((2, 0, 1))
# after expand_dims, we have format NCHW
image_data = np.expand_dims(image_data, axis = 0)
image_data[:,0,:,:] = 2.0 / 255.0 * image_data[:,0,:,:] - 1
image_data[:,1,:,:] = 2.0 / 255.0 * image_data[:,1,:,:] - 1
image_data[:,2,:,:] = 2.0 / 255.0 * image_data[:,2,:,:] - 1
return image_data
model_path = "mobilenetv3_small.onnx"
onnx_model = onnx.load(model_path)
image_file = 'cat.png'
x = preprocess_image(image_file)
######################################################################
# Compile the model with relay
# ---------------------------------------------
target = 'llvm'
input_name = '0'
shape_dict = {input_name: (1,3,224,224)}
sym, params = relay.frontend.from_onnx(onnx_model, shape_dict)
with relay.build_config(opt_level=1):
intrp = relay.build_module.create_executor('graph', sym, tvm.cpu(0), target)
######################################################################
# Execute on TVM
# ---------------------------------------------
dtype = 'float32'
tvm_output = intrp.evaluate(sym)(tvm.nd.array(x.astype(dtype)), **params).asnumpy()
print('finished')