Keras/ONNX to TVM error

model_path = model_name + “/” + model_name + “.h5” output_onnx_model = model_name + “/” + model_name + “.onnx” keras_model = load_model(model_path) onnx_model = onnxmltools.convert_keras(keras_model) onnxmltools.utils.save_model(onnx_model, output_onnx_model)

convert the keras model(NHWC layout) to Relay format(NCHW layout).

x = np.ones([1, 1, IMAGE_SIZE, IMAGE_SIZE]) shape_dict = {‘conv2d_input_1’: x.shape} mod, params = relay.frontend.from_keras(keras_model, shape_dict)

compile the model

target = ‘cuda’ ctx = tvm.gpu(0) with relay.build_config(opt_level=3): executor = relay.build_module.create_executor(‘graph’, mod, ctx, target) graph, lib, params = relay.build_module.build(mod, target, params=params) lib.export_library("./deploy_lib.so") print(‘lib export succeefully’) with open("./deploy_graph.json", “w”) as fo: fo.write(graph.json()) with open("./deploy_param.params", “wb”) as fo: fo.write(nnvm.compiler.save_param_dict(params))

convert the keras model(NHWC layout) to Relay format(NCHW layout).

x = np.ones([1, 1, IMAGE_SIZE, IMAGE_SIZE]) shape_dict = {‘input_1’: x.shape} mod, params = relay.frontend.from_onnx(onnx_model, shape_dict)

compile the model

target = ‘cuda’ ctx = tvm.gpu(0) with relay.build_config(opt_level=3): executor = relay.build_module.create_executor(‘graph’, mod, ctx, target) graph, lib, params = relay.build_module.build(mod, target, params=params) lib.export_library("./deploy_lib.so") print(‘lib export succeefully’) with open("./deploy_graph.json", “w”) as fo: fo.write(graph.json()) with open("./deploy_param.params", “wb”) as fo: fo.write(nnvm.compiler.save_param_dict(params))

Error onnx converting:

[15:48:29] /home/yyh/3rdparty/tvm/src/relay/ir/doc.h:50: text node: ’ an internal invariant was violated while typechecking your program [15:48:29] /home/yyh/3rdparty/tvm/src/relay/op/nn/convolution.h:112: Check failed: reporter->AssertEQ(indexdiv(dshape_nchw[1], param->groups), wshape[1]): Stack trace: [bt] (0) /home/yyh/3rdparty/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x22) [0x7f1a82cd4b22] [bt] (1) /home/yyh/3rdparty/tvm/build/libtvm.so(bool tvm::relay::Conv2DReltvm::relay::Conv2DAttrs(tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&)+0x12ca) [0x7f1a8303b62a] [bt] (2) /home/yyh/3rdparty/tvm/build/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::runtime::TypedPackedFunc<bool (tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&)>::AssignTypedLambda<bool ()(tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&)>(bool ()(tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)+0xd2) [0x7f1a82ff9fd2] [bt] (3) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::TypeSolver::Solve()+0x395) [0x7f1a8325e265] [bt] (4) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Infer(tvm::relay::Expr)+0x45) [0x7f1a83249cb5] [bt] (5) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::relay::Module const&, tvm::relay::GlobalVar const&)+0x1f5) [0x7f1a8324a525] [bt] (6) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ModuleNode::Add(tvm::relay::GlobalVar const&, tvm::relay::Function const&, bool)+0x25d) [0x7f1a8331602d] [bt] (7) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ModuleNode::FromExpr(tvm::relay::Expr const&, tvm::Map<tvm::relay::GlobalVar, tvm::relay::Function, void, void> const&, tvm::Map<tvm::GlobalTypeVar, tvm::relay::TypeData, void, void> const&)+0x1b8) [0x7f1a833173d8] [bt] (8) /home/yyh/3rdparty/tvm/build/libtvm.so(+0x9ccac5) [0x7f1a83318ac5]

; ’ should not has tab or newline. Traceback (most recent call last):

File “traffic_cnn_keras_tvm_dbg.py”, line 224, in mod, params = relay.frontend.from_onnx(onnx_model, shape_dict)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/onnx.py”, line 1555, in from_onnx mod, params = g.from_onnx(graph, opset)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/onnx.py”, line 1383, in from_onnx op = self._convert_operator(op_name, inputs, attr, opset)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/onnx.py”, line 1483, in _convert_operator sym = convert_map[op_name](inputs, attrs, self._params)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/onnx.py”, line 351, in _impl_v1 a_shape = infer_shape(inputs[0])

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/common.py”, line 465, in infer_shape out_type = infer_type(inputs, mod=mod)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/common.py”, line 456, in infer_type new_mod = _module.Module.from_expr(node)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/module.py”, line 233, in from_expr return _module.Module_FromExpr(expr, funcs, defs)

File “/home/yyh/3rdparty/tvm/python/tvm/_ffi/_ctypes/function.py”, line 207, in call raise get_last_ffi_error()

tvm._ffi.base.TVMError: Traceback (most recent call last): [bt] (7) /home/yyh/3rdparty/tvm/build/libtvm.so(TVMFuncCall+0x46) [0x7f1a83395266] [bt] (6) /home/yyh/3rdparty/tvm/build/libtvm.so(+0x9ccac5) [0x7f1a83318ac5] [bt] (5) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ModuleNode::FromExpr(tvm::relay::Expr const&, tvm::Map<tvm::relay::GlobalVar, tvm::relay::Function, void, void> const&, tvm::Map<tvm::GlobalTypeVar, tvm::relay::TypeData, void, void> const&)+0x1b8) [0x7f1a833173d8] [bt] (4) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ModuleNode::Add(tvm::relay::GlobalVar const&, tvm::relay::Function const&, bool)+0x25d) [0x7f1a8331602d] [bt] (3) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::relay::Module const&, tvm::relay::GlobalVar const&)+0x1f5) [0x7f1a8324a525] [bt] (2) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Infer(tvm::relay::Expr)+0x76) [0x7f1a83249ce6] [bt] (1) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ErrorReporter::RenderErrors(tvm::relay::Module const&, bool)+0x1706) [0x7f1a832f6ca6] [bt] (0) /home/yyh/3rdparty/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x22) [0x7f1a82cd4b22] [bt] (8) /home/yyh/3rdparty/tvm/build/libtvm.so(+0x9ccac5) [0x7f1a83318ac5] [bt] (7) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ModuleNode::FromExpr(tvm::relay::Expr const&, tvm::Map<tvm::relay::GlobalVar, tvm::relay::Function, void, void> const&, tvm::Map<tvm::GlobalTypeVar, tvm::relay::TypeData, void, void> const&)+0x1b8) [0x7f1a833173d8] [bt] (6) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::ModuleNode::Add(tvm::relay::GlobalVar const&, tvm::relay::Function const&, bool)+0x25d) [0x7f1a8331602d] [bt] (5) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::relay::Module const&, tvm::relay::GlobalVar const&)+0x1f5) [0x7f1a8324a525] [bt] (4) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Infer(tvm::relay::Expr)+0x45) [0x7f1a83249cb5] [bt] (3) /home/yyh/3rdparty/tvm/build/libtvm.so(tvm::relay::TypeSolver::Solve()+0x395) [0x7f1a8325e265] [bt] (2) /home/yyh/3rdparty/tvm/build/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::runtime::TypedPackedFunc<bool (tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&)>::AssignTypedLambda<bool ()(tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&)>(bool ()(tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)+0xd2) [0x7f1a82ff9fd2] [bt] (1) /home/yyh/3rdparty/tvm/build/libtvm.so(bool tvm::relay::Conv2DReltvm::relay::Conv2DAttrs(tvm::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::relay::TypeReporter const&)+0x12ca) [0x7f1a8303b62a] [bt] (0) /home/yyh/3rdparty/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x22) [0x7f1a82cd4b22] File “/home/yyh/3rdparty/tvm/src/relay/ir/error.cc”, line 132 TVMError: Error(s) have occurred. The program has been annotated with them:

In main: v0.0.4 fn (%conv2d_input: Tensor[(1, 1, 28, 28), float32], %conv2d_3/kernel:0: Tensor[(32, 1, 5, 5), float32], %conv2d_3/bias:0: Tensor[(32), float32], %conv2d_1_1/kernel:0: Tensor[(64, 32, 5, 5), float32], %conv2d_1_1/bias:0: Tensor[(64), float32]) { %0 = transpose(%conv2d_input, axes=[0, 3, 1, 2]); %1 = nn.conv2d(%0, %conv2d_3/kernel:0, padding=[2, 2], kernel_size=[5, 5]) an internal invariant was violated while typechecking your program [15:48:29] /home/yyh/3rdparty/tvm/src/relay/op/nn/convolution.h:112: Check failed: reporter->AssertEQ(indexdiv(dshape_nchw[1], param->groups), wshape[1]): ; ; %2 = nn.bias_add(%1, %conv2d_3/bias:0); %3 = nn.relu(%2); %4 = nn.max_pool2d(%3, pool_size=[2, 2], strides=[2, 2]); %5 = nn.conv2d(%4, %conv2d_1_1/kernel:0, padding=[2, 2], kernel_size=[5, 5]); %6 = nn.bias_add(%5, %conv2d_1_1/bias:0); %7 = nn.relu(%6); %8 = nn.max_pool2d(%7, pool_size=[2, 2], strides=[2, 2]); %9 = transpose(%8, axes=[0, 2, 3, 1]); nn.batch_flatten(%9) }

#Error keras converting: Traceback (most recent call last):

File “traffic_cnn_keras_tvm_dbg.py”, line 225, in mod, params = relay.frontend.from_keras(keras_model, shape_dict)

File “/home/yyh/3rdparty/tvm/python/tvm/relay/frontend/keras.py”, line 821, in from_keras zip_node = zip(node.node_indices, node.tensor_indices, node.inbound_layers)

TypeError: zip argument #1 must support iteration

I met the same problem when I convert onnx model to tvm. the onnx model is successfully running up with onnxruntime already. Is this problem fixed?