Onnx to relay error

The error information is like below:

root@cvpai-master:/workspace# python3 /home/xuyangyang/cv_inference/mseqa/autotune_infer.py
img_data shape = (224, 224, 3)
log_file name is mseqa-NHWC-B1-cuda_v2.json
Extract tasks...
Traceback (most recent call last):
  File "/home/xuyangyang/cv_inference/mseqa/autotune_infer.py", line 188, in <module>
    mod, params, input_shape, output_shape = get_network(network, batch_size, layout, dtype=dtype)
  File "/home/xuyangyang/cv_inference/mseqa/autotune_infer.py", line 129, in get_network
    mod, params = relay.frontend.from_onnx(onnx_model, shape_dict)
  File "/workspace/apache-tvm-src-v0.10.0/python/tvm/relay/frontend/onnx.py", line 5924, in from_onnx
    mod, params = g.from_onnx(graph, opset)
  File "/workspace/apache-tvm-src-v0.10.0/python/tvm/relay/frontend/onnx.py", line 5594, in from_onnx
    self._construct_nodes(graph)
  File "/workspace/apache-tvm-src-v0.10.0/python/tvm/relay/frontend/onnx.py", line 5713, in _construct_nodes
    op = fold_constant(op)
  File "/workspace/apache-tvm-src-v0.10.0/python/tvm/relay/frontend/common.py", line 511, in fold_constant
    return _transform.FoldConstantExpr(node, mod)
  File "/workspace/apache-tvm-src-v0.10.0/python/tvm/relay/transform/transform.py", line 280, in FoldConstantExpr
    return _ffi_api.FoldConstantExpr(expr, mod, fold_qnn)
  File "/workspace/apache-tvm-src-v0.10.0/python/tvm/_ffi/_ctypes/packed_func.py", line 237, in __call__
    raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
  21: TVMFuncCall
  20: tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<tvm::runtime::TypedPackedFunc<tvm::RelayExpr (tvm::RelayExpr const&, tvm::IRModule const&, bool)>::AssignTypedLambda<tvm::RelayExpr (*)(tvm::RelayExpr const&, tvm::IRModule const&, bool)>(tvm::RelayExpr (*)(tvm::RelayExpr const&, tvm::IRModule const&, bool), std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >)::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}> >::Call(tvm::runtime::PackedFuncObj const*, tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)
  19: tvm::relay::transform::FoldConstantExpr(tvm::RelayExpr const&, tvm::IRModule const&, bool)
  18: tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)
  17: void tvm::relay::ExpandDataflow<tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#2}, tvm::relay::ExpandDataflow<{lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1}>(tvm::RelayExpr, {lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1})::{lambda(tvm::RelayExpr const&)#1}>(tvm::RelayExpr, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#2}, tvm::relay::ExpandDataflow, tvm::relay::ExpandDataflow<{lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1}>(tvm::RelayExpr, {lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1})::{lambda(tvm::RelayExpr const&)#1}) [clone .isra.0]
  16: tvm::relay::MixedModeMutator::VisitLeaf(tvm::RelayExpr const&)
  15: _ZN3tvm5relay16MixedModeMutator17DispatchVisitExprERKNS_9RelayExp
  14: tvm::relay::ExprMutator::VisitExpr(tvm::RelayExpr const&)
  13: _ZZN3tvm5relay11ExprFunctorIFNS_9RelayExprERKS2_EE10InitVTableEvENUlRKNS_7r
  12: tvm::relay::MixedModeMutator::VisitExpr_(tvm::relay::CallNode const*)
  11: tvm::relay::transform::(anonymous namespace)::ConstantFolder::Rewrite_(tvm::relay::CallNode const*, tvm::RelayExpr const&)
  10: tvm::relay::transform::(anonymous namespace)::ConstantFolder::ConstEvaluate(tvm::RelayExpr const&)
  9: tvm::relay::Eval(tvm::RelayExpr, tvm::runtime::Map<tvm::GlobalTypeVar, tvm::TypeData, void, void>, std::unordered_set<tvm::runtime::String, std::hash<tvm::runtime::String>, std::equal_to<tvm::runtime::String>, std::allocator<tvm::runtime::String> >, DLDevice, tvm::Target, tvm::runtime::Map<tvm::runtime::String, tvm::runtime::ObjectRef, void, void>)
  8: tvm::relay::Prepare(tvm::IRModule, tvm::CompilationConfig const&)
  7: tvm::transform::Pass::operator()(tvm::IRModule) const
  6: tvm::transform::Pass::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  5: tvm::transform::SequentialNode::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  4: tvm::transform::Pass::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  3: tvm::transform::ModulePassNode::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  2: _ZN3tvm7runtime13PackedFuncObj9ExtractorINS0_16PackedFuncSubObjIZNS0_15TypedPackedFuncIFNS_8IRModuleES5_NS_9transform11PassContextEEE17AssignTypedLambdaIZNS_5relay9transform9InferTypeEvEUlS5_RKS7_E_EEvT_EUlRKNS0_7TVMArgsEPNS0_11TVMRetValueEE_EEE4CallEPKS1_SH_SL_
  1: tvm::relay::TypeInferencer::Infer(tvm::GlobalVar, tvm::relay::Function)
  0: tvm::relay::TypeSolver::Solve() [clone .cold]
  23: TVMFuncCall
  22: tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<tvm::runtime::TypedPackedFunc<tvm::RelayExpr (tvm::RelayExpr const&, tvm::IRModule const&, bool)>::AssignTypedLambda<tvm::RelayExpr (*)(tvm::RelayExpr const&, tvm::IRModule const&, bool)>(tvm::RelayExpr (*)(tvm::RelayExpr const&, tvm::IRModule const&, bool), std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >)::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}> >::Call(tvm::runtime::PackedFuncObj const*, tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)
  21: tvm::relay::transform::FoldConstantExpr(tvm::RelayExpr const&, tvm::IRModule const&, bool)
  20: tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)
  19: void tvm::relay::ExpandDataflow<tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#2}, tvm::relay::ExpandDataflow<{lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1}>(tvm::RelayExpr, {lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1})::{lambda(tvm::RelayExpr const&)#1}>(tvm::RelayExpr, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#2}, tvm::relay::ExpandDataflow, tvm::relay::ExpandDataflow<{lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1}>(tvm::RelayExpr, {lambda(tvm::RelayExpr const&)#1}, tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)::{lambda(tvm::RelayExpr const&)#1})::{lambda(tvm::RelayExpr const&)#1}) [clone .isra.0]
  18: tvm::relay::MixedModeMutator::VisitLeaf(tvm::RelayExpr const&)
  17: _ZN3tvm5relay16MixedModeMutator17DispatchVisitExprERKNS_9RelayExp
  16: tvm::relay::ExprMutator::VisitExpr(tvm::RelayExpr const&)
  15: _ZZN3tvm5relay11ExprFunctorIFNS_9RelayExprERKS2_EE10InitVTableEvENUlRKNS_7r
  14: tvm::relay::MixedModeMutator::VisitExpr_(tvm::relay::CallNode const*)
  13: tvm::relay::transform::(anonymous namespace)::ConstantFolder::Rewrite_(tvm::relay::CallNode const*, tvm::RelayExpr const&)
  12: tvm::relay::transform::(anonymous namespace)::ConstantFolder::ConstEvaluate(tvm::RelayExpr const&)
  11: tvm::relay::Eval(tvm::RelayExpr, tvm::runtime::Map<tvm::GlobalTypeVar, tvm::TypeData, void, void>, std::unordered_set<tvm::runtime::String, std::hash<tvm::runtime::String>, std::equal_to<tvm::runtime::String>, std::allocator<tvm::runtime::String> >, DLDevice, tvm::Target, tvm::runtime::Map<tvm::runtime::String, tvm::runtime::ObjectRef, void, void>)
  10: tvm::relay::Prepare(tvm::IRModule, tvm::CompilationConfig const&)
  9: tvm::transform::Pass::operator()(tvm::IRModule) const
  8: tvm::transform::Pass::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  7: tvm::transform::SequentialNode::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  6: tvm::transform::Pass::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  5: tvm::transform::ModulePassNode::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
  4: _ZN3tvm7runtime13PackedFuncObj9ExtractorINS0_16PackedFuncSubObjIZNS0_15TypedPackedFuncIFNS_8IRModuleES5_NS_9transform11PassContextEEE17AssignTypedLambdaIZNS_5relay9transform9InferTypeEvEUlS5_RKS7_E_EEvT_EUlRKNS0_7TVMArgsEPNS0_11TVMRetValueEE_EEE4CallEPKS1_SH_SL_
  3: tvm::relay::TypeInferencer::Infer(tvm::GlobalVar, tvm::relay::Function)
  2: tvm::relay::TypeSolver::Solve()
  1: tvm::runtime::PackedFuncObj::Extractor<tvm::runtime::PackedFuncSubObj<tvm::runtime::TypedPackedFunc<bool (tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>::AssignTypedLambda<bool (*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>(bool (*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}> >::Call(tvm::runtime::PackedFuncObj const*, tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)
  0: tvm::relay::ReshapeRel(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)
  File "/workspace/apache-tvm-src-v0.10.0/src/relay/analysis/type_solver.cc", line 624
TVMError:
---------------------------------------------------------------
An error occurred during the execution of TVM.
For more information, please see: https://tvm.apache.org/docs/errors.html
---------------------------------------------------------------
  Check failed: (false) is false: [02:06:47] /workspace/apache-tvm-src-v0.10.0/src/relay/op/tensor/transform.cc:794:
---------------------------------------------------------------
An error occurred during the execution of TVM.
For more information, please see: https://tvm.apache.org/docs/errors.html
---------------------------------------------------------------

  Check failed: oshape_sum == data_shape_sum (49000 vs. 196) : Input tensor shape(7,7,4) and reshaped shape(49,1000) are not compatible!

The transformation code is :

shape_dict = {"image": (1, 3, 224, 224), "input_ids": (1,1000), "bbox": (1,1000), 
              "position_ids": (1,1000), "token_type_ids":(1,1000),
              "attention_mask": (1,1000)}
mod, params = relay.frontend.from_onnx(onnx_model, shape_dict)

The onnx model is uploaded in: maxvit_surgeoned.onnx - Google Drive

I cannot guess about the cause of the error from the error message. Anyone can help? Thanks a lot.

I think I find the problem. The input shape of one tensor is wrongly set and make the internal state of two tensors in the graph not match.