Hi, I’m trying to optimize 1x1 convolution with TVM Ansor, with layout_free_placeholders
attributes for weights, the scheduler dumps an assertion error Check failed: (name_it != name_to_arg.end()) is false
.
The code is as following:
import tvm
from tvm import auto_scheduler, te
# [1x1 Convolution]
@auto_scheduler.register_workload
def buggy_conv(n: int, ic: int, oc: int, h: int, w: int):
X = te.placeholder((n, ic, h, w))
W = te.placeholder((oc, ic))
assert len(X.shape) >= 2
assert len(W.shape) == 2
r = te.reduce_axis((0, X.shape[1]), name=f'r')
Z = te.compute(
(n, oc, h, w),
# Not working
lambda *i: te.sum(
X[i[0], r, i[2], i[3]] * W[i[1], r],
axis=[r]
),
# Below works
# lambda i, j, k, l: te.sum(
# X[i, r, k, l] * W[j, r],
# axis=[r]
# ),
attrs={'layout_free_placeholders': [W]}, # Remove this line also works
name=f'conv_out'
)
return [X, W, Z]
if __name__ == '__main__':
target = tvm.target.Target(target='llvm', host='llvm')
task = auto_scheduler.SearchTask(func=buggy_conv, args=(32, 64, 128, 32, 32), target=target)
tune_options = auto_scheduler.TuningOptions(
num_measure_trials=256,
num_measures_per_round=64,
verbose=2,
)
task.tune(tune_options)
What’s more strange, change lambda parameters from *i
to i, j, k, l
also works.