use ‘tvm.build’ or ‘relay.build_module.build’ to generate inference code.
OpenCL is ‘target’ . ‘target_host’ is platform your program run on
Here is example for ‘armv7l-linux-gnueabihf’ device with OpenCL support.
This script is original for Android device with opencl. To show ‘target’ and ‘target_host’ meaning in corsscompile replace the ‘target_host’ with arm-linux
import tvm.relay.testing
from tvm import relay
from tvm.contrib.util import tempdir
target = 'opencl'
target_host = 'llvm -device=arm_cpu -model=bcm2835 -mtriple=armv7l-linux-gnueabihf -mattr=+neon'
toolchain = '/root/cross-pi-gcc-8.3.0-1/bin/arm-linux-gnueabihf-gcc'
options = ['-lm']
from mxnet.gluon.model_zoo.vision import get_model
dshape = (1, 3, 224, 224)
block = get_model("mobilenet0.25", pretrained=True)
shape_dict = {"data": dshape}
mod, params = relay.frontend.from_mxnet(block, shape_dict)
func = mod["main"]
func = relay.Function(
func.params, relay.nn.softmax(func.body), None, func.type_params, func.attrs
)
with tvm.transform.PassContext(opt_level=3):
lib = relay.build_module.build(mod, target=target, params=params, target_host=target_host)
tmp = tempdir()
filename = "mobilenet.so"
lib.export_library(tmp.relpath(filename), cc=toolchain, options=options)