Hi, everybody.
I have tried to use AutoScheduler on mali gpu, to tune vgg model. It sames the Relay select topi.nn.conv2d_winograd_nhwc
for the vgg model.
But it raises the below error:
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/shaohaizhu/workspace/tvm/python/tvm/auto_scheduler/relay_integration.py", line 67, in call_all_topi_funcs
opt_mod, _ = relay.optimize(mod, target, params)
File "/home/shaohaizhu/workspace/tvm/python/tvm/relay/build_module.py", line 400, in optimize
mod, params = bld_mod.optimize(mod, target, params)
File "/home/shaohaizhu/workspace/tvm/python/tvm/relay/build_module.py", line 187, in optimize
mod = self._optimize(mod, target)
File "/home/shaohaizhu/workspace/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 237, in __call__
raise get_last_ffi_error()
TypeError: Traceback (most recent call last):
28: TVMFuncCall
27: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::relay::backend::RelayBuildModule::GetFunction(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::runtime::ObjectPtr<tvm::runtime::Object> const&)::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#10}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
26: tvm::relay::backend::RelayBuildModule::Optimize(tvm::IRModule, tvm::runtime::Map<tvm::Integer, tvm::Target, void, void> const&, std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::runtime::NDArray, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, tvm::runtime::NDArray> > > const&)
25: tvm::transform::Pass::operator()(tvm::IRModule) const
24: tvm::transform::SequentialNode::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
23: tvm::transform::Pass::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
22: tvm::relay::transform::FunctionPassNode::operator()(tvm::IRModule, tvm::transform::PassContext const&) const
21: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::runtime::TypedPackedFunc<tvm::relay::Function (tvm::relay::Function, tvm::IRModule, tvm::transform::PassContext)>::AssignTypedLambda<tvm::relay::transform::AlterOpLayout()::{lambda(tvm::relay::Function, tvm::IRModule, tvm::transform::PassContext)#1}>(tvm::relay::transform::AlterOpLayout()::{lambda(tvm::relay::Function, tvm::IRModule, tvm::transform::PassContext)#1})::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
20: tvm::relay::alter_op_layout::AlterOpLayout(tvm::RelayExpr const&)
19: tvm::relay::ForwardRewrite(tvm::RelayExpr const&, tvm::runtime::TypedPackedFunc<tvm::RelayExpr (tvm::relay::Call const&, tvm::runtime::Array<tvm::RelayExpr, void> const&, tvm::runtime::ObjectRef const&)> const&, std::function<tvm::runtime::ObjectRef (tvm::relay::Call const&)>, std::function<tvm::RelayExpr (tvm::RelayExpr const&)>)
18: tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)
17: tvm::relay::MixedModeMutator::VisitLeaf(tvm::RelayExpr const&)
16: _ZN3tvm5relay16MixedModeMutator17DispatchVisitExprERKNS_9Re
15: tvm::relay::ExprMutator::VisitExpr(tvm::RelayExpr const&)
14: tvm::relay::ExprFunctor<tvm::RelayExpr (tvm::RelayExpr const&)>::VisitExpr(tvm::RelayExpr const&)
13: _ZZN3tvm5relay11ExprFunctorIFNS_9RelayExprERKS2_EE10InitVTableEvENUlR
12: tvm::relay::ExprMutator::VisitExpr_(tvm::relay::FunctionNode const*)
11: tvm::relay::MixedModeMutator::VisitExpr(tvm::RelayExpr const&)
10: tvm::relay::MixedModeMutator::VisitLeaf(tvm::RelayExpr const&)
9: _ZN3tvm5relay16MixedModeMutator17DispatchVisitExprERKNS_9Re
8: tvm::relay::ExprMutator::VisitExpr(tvm::RelayExpr const&)
7: tvm::relay::ExprFunctor<tvm::RelayExpr (tvm::RelayExpr const&)>::VisitExpr(tvm::RelayExpr const&)
6: _ZZN3tvm5relay11ExprFunctorIFNS_9RelayExprERKS2_EE10InitVTableEvENUlR
5: tvm::relay::MixedModeMutator::VisitExpr_(tvm::relay::CallNode const*)
4: tvm::relay::ForwardRewriter::Rewrite_(tvm::relay::CallNode const*, tvm::RelayExpr const&)
3: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::runtime::TypedPackedFunc<tvm::RelayExpr (tvm::relay::Call const&, tvm::runtime::Array<tvm::RelayExpr, void> const&, tvm::runtime::ObjectRef const&)>::AssignTypedLambda<tvm::RelayExpr (*)(tvm::relay::Call const&, tvm::runtime::Array<tvm::RelayExpr, void> const&, tvm::runtime::ObjectRef const&)>(tvm::RelayExpr (*)(tvm::relay::Call const&, tvm::runtime::Array<tvm::RelayExpr, void> const&, tvm::runtime::ObjectRef const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
2: tvm::RelayExpr tvm::relay::LayoutRewriter<tvm::relay::alter_op_layout::AlterTransformMemorizer>(tvm::relay::Call const&, tvm::runtime::Array<tvm::RelayExpr, void> const&, tvm::runtime::ObjectRef const&)
1: tvm::relay::alter_op_layout::AlterTransformMemorizer::CallWithNewLayouts(tvm::relay::Call const&, std::vector<tvm::RelayExpr, std::allocator<tvm::RelayExpr> > const&)
0: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), TVMFuncCreateFromCFunc::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#2}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
File "/home/shaohaizhu/workspace/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 81, in cfun
rv = local_pyfunc(*pyargs)
File "/home/shaohaizhu/workspace/tvm/python/tvm/relay/op/nn/_nn.py", line 184, in alter_op_layout_conv2d
return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)
File "<decorator-gen-58>", line 2, in conv2d_alter_layout
File "/home/shaohaizhu/workspace/tvm/python/tvm/target/generic_func.py", line 275, in dispatch_func
return dispatch_dict[k](*args, **kwargs)
File "/home/shaohaizhu/workspace/tvm/python/tvm/topi/mali/conv2d.py", line 489, in _alter_conv2d_layout
relay.op.get("nn.conv2d"), attrs, tinfos, out_type, target
File "/home/shaohaizhu/workspace/tvm/python/tvm/relay/backend/compile_engine.py", line 209, in select_implementation
outs = best_plevel_impl.compute(attrs, inputs, out_type) # haizhu.shao 调用conv2d.py中的conv2d_nhwc函数
File "/home/shaohaizhu/workspace/tvm/python/tvm/relay/op/op.py", line 90, in compute
return _OpImplementationCompute(self, attrs, inputs, out_type)
File "/home/shaohaizhu/workspace/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 237, in __call__
raise get_last_ffi_error()
3: TVMFuncCall
2: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::relay::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#4}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
1: tvm::relay::OpImplementation::Compute(tvm::Attrs const&, tvm::runtime::Array<tvm::te::Tensor, void> const&, tvm::Type const&)
0: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), TVMFuncCreateFromCFunc::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#2}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
File "/home/shaohaizhu/workspace/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 81, in cfun
rv = local_pyfunc(*pyargs)
File "/home/shaohaizhu/workspace/tvm/python/tvm/relay/op/strategy/generic.py", line 240, in _compute_conv2d
return [topi_compute(*args)]
File "<decorator-gen-60>", line 2, in conv2d_winograd_nhwc
File "/home/shaohaizhu/workspace/tvm/python/tvm/target/generic_func.py", line 275, in dispatch_func
return dispatch_dict[k](*args, **kwargs)
TypeError: conv2d_winograd_nhwc_mali() takes from 6 to 7 positional arguments but 8 were given
Then I check the function @conv2d_winograd_nhwc.register([“mali”]) in “python/tvm/topi/mali/conv2d.py” and @tvm.target.generic_func def conv2d_winograd_nhwc in “python/tvm/topi/nn/conv2d.py”, it seems the function in “python/tvm/topi/mali/conv2d.py” has lost one argument “auto_scheduler_rewritten_layout=”"", I add this argument, and then tried to use AutoScheduler to auto tunning.
I have no idea whether it is right about my solution, is there anyone also met the same problem, and had fixed it.