Hi, I’m trying to add int8 quantization to my autotvm tuning. for that, I simply copied those lines from @vinx13’s benchmark while extracting the tasks:
with relay.quantize.qconfig(store_lowbit_output=False):
model["main"]= relay.quantize.quantize(model["main"], params=params)
tasks = autotvm.task.extract_from_program(model["main"], target=target,
params=params, ops=(relay.op.nn.conv2d))
But this gives me an error like:
Traceback (most recent call last):
File "./auto_tune.py", line 52, in <module>
tasks = extract_tasks(model, params, target, quantize=True)
File "/home/can/Dev/tvm_wd/tuning_utils.py", line 51, in extract_tasks
model["main"]= relay.quantize.quantize(model["main"], params=params)
File "/home/can/Dev/tvm/python/tvm/relay/quantize/quantize.py", line 339, in quantize
mod = prerequisite_optimize(mod, params)
File "/home/can/Dev/tvm/python/tvm/relay/quantize/quantize.py", line 310, in prerequisite_optimize
mod['main'] = _bind_params(mod['main'], params)
TypeError: 'Function' object is not subscriptable
Then I remove the ['main']
parts after mod
and modified it as mod= relay.quantize.quantize(mod, params=params)
this time I get another error like:
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/can/Dev/tvm/python/tvm/autotvm/task/relay_integration.py", line 56, in _lower
opt_mod, _ = relay.optimize(mod, target, params)
File "/home/can/Dev/tvm/python/tvm/relay/build_module.py", line 303, in optimize
mod, params = bld_mod.optimize(mod, target, params)
File "/home/can/Dev/tvm/python/tvm/relay/build_module.py", line 157, in optimize
mod = self._optimize(mod, target)
File "tvm/_ffi/_cython/./packed_func.pxi", line 315, in tvm._ffi._cy3.core.PackedFuncBase.__call__
File "tvm/_ffi/_cython/./packed_func.pxi", line 250, in tvm._ffi._cy3.core.FuncCall
File "tvm/_ffi/_cython/./packed_func.pxi", line 239, in tvm._ffi._cy3.core.FuncCall3
File "tvm/_ffi/_cython/./base.pxi", line 160, in tvm._ffi._cy3.core.CALL
TypeError: Traceback (most recent call last):
[bt] (8) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::Interpreter::InvokePrimitiveOp(tvm::relay::Function const&, tvm::Array<tvm::runtime::ObjectRef, void> const&)+0x5fc) [0x7f3b3700354c]
[bt] (7) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::CompileEngineImpl::JIT(tvm::relay::CCacheKey const&)+0x32) [0x7f3b36fd0a62]
[bt] (6) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::CompileEngineImpl::LowerInternal(tvm::relay::CCacheKey const&)+0x685) [0x7f3b36fcff45]
[bt] (5) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::ScheduleGetter::Create(tvm::relay::Function const&)+0x574) [0x7f3b36fcc984]
[bt] (4) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::backend::MemoizedExprTranslator<tvm::Array<tvm::te::Tensor, void> >::VisitExpr(tvm::RelayExpr const&)+0xb3) [0x7f3b36fd2ff3]
[bt] (3) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::ExprFunctor<tvm::Array<tvm::te::Tensor, void> (tvm::RelayExpr const&)>::VisitExpr(tvm::RelayExpr const&)+0x91) [0x7f3b36fd13d1]
[bt] (2) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::ExprFunctor<tvm::Array<tvm::te::Tensor, void> (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<tvm::Array<tvm::te::Tensor, void> (tvm::RelayExpr const&)>*)#6}::_FUN(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<tvm::Array<tvm::te::Tensor, void> (tvm::RelayExpr const&)>*)+0x27) [0x7f3b36fc5137]
[bt] (1) /home/can/Dev/tvm/build/libtvm.so(tvm::relay::ScheduleGetter::VisitExpr_(tvm::relay::CallNode const*)+0x6b3) [0x7f3b36fcb5b3]
[bt] (0) /home/can/Dev/tvm/build/libtvm.so(+0xd27f1b) [0x7f3b37112f1b]
File "tvm/_ffi/_cython/./packed_func.pxi", line 55, in tvm._ffi._cy3.core.tvm_callback
File "/home/can/Dev/tvm/python/tvm/relay/backend/compile_engine.py", line 244, in lower_call
if env.wanted_relay_ops is not None and op not in env.wanted_relay_ops:
TypeError: argument of type 'function' is not iterable
I can’t figure out what happens. If you can look here, I would be glad.