The following script also came across the same crash message.
import tvm
from tvm import relay
from tvm.ir.transform import Sequential
from tvm.contrib import graph_runtime
import numpy as np
mod = tvm.IRModule()
var_0 = relay.var("var_0", dtype = "uint16", shape = (15, 4, 16))#candidate|0|(15, 4, 16)|var|uint16
var_1 = relay.var("var_1", dtype = "uint16", shape = (15, 4, 16))#candidate|1|(15, 4, 16)|var|uint16
bop_2 = relay.add(var_0.astype('uint16'), relay.reshape(var_1.astype('uint16'), relay.shape_of(var_0))) # shape=(15, 4, 16)
bop_3 = relay.power(var_0.astype('float16'), relay.reshape(bop_2.astype('float16'), relay.shape_of(var_0))) # shape=(15, 4, 16)
var_4 = relay.var("var_4", dtype = "bool", shape = (8,))#candidate|4|(8,)|var|bool
var_5 = relay.var("var_5", dtype = "bool", shape = (8,))#candidate|5|(8,)|var|bool
bop_6 = relay.logical_or(var_4.astype('bool'), relay.reshape(var_5.astype('bool'), relay.shape_of(var_4))) # shape=(8,)
output = relay.Tuple([bop_3,bop_6,])
func_7 = relay.Function([var_0,var_1,var_4,var_5,], output)
mod['func_7'] = func_7
mod = relay.transform.InferType()(mod)
var_8 = relay.var("var_8", dtype = "float32", shape = ())#candidate|8|()|var|float32
var_9 = relay.var("var_9", dtype = "float32", shape = (11, 3))#candidate|9|(11, 3)|var|float32
bop_10 = relay.floor_divide(var_8.astype('float32'), var_9.astype('float32')) # shape=(11, 3)
output = relay.Tuple([bop_10,])
func_11 = relay.Function([var_8,var_9,], output)
mod['func_11'] = func_11
mod = relay.transform.InferType()(mod)
var_12 = relay.var("var_12", dtype = "float32", shape = (8, 15))#candidate|12|(8, 15)|var|float32
var_13 = relay.var("var_13", dtype = "float32", shape = (8, 15))#candidate|13|(8, 15)|var|float32
bop_14 = relay.floor_divide(var_12.astype('float32'), relay.reshape(var_13.astype('float32'), relay.shape_of(var_12))) # shape=(8, 15)
bop_15 = relay.logical_and(bop_14.astype('bool'), relay.reshape(var_13.astype('bool'), relay.shape_of(bop_14))) # shape=(8, 15)
var_16 = relay.var("var_16", dtype = "bool", shape = (15, 5))#candidate|16|(15, 5)|var|bool
var_17 = relay.var("var_17", dtype = "bool", shape = (15, 5))#candidate|17|(15, 5)|var|bool
bop_18 = relay.logical_or(var_16.astype('bool'), relay.reshape(var_17.astype('bool'), relay.shape_of(var_16))) # shape=(15, 5)
var_19 = relay.var("var_19", dtype = "bool", shape = (15, 5))#candidate|19|(15, 5)|var|bool
bop_20 = relay.mod(bop_18.astype('float32'), relay.reshape(var_19.astype('float32'), relay.shape_of(bop_18))) # shape=(15, 5)
bop_21 = relay.floor_divide(var_16.astype('float64'), relay.reshape(bop_18.astype('float64'), relay.shape_of(var_16))) # shape=(15, 5)
output = relay.Tuple([bop_15,bop_20,bop_21,])
func_22 = relay.Function([var_12,var_13,var_16,var_17,var_19,], output)
mod['func_22'] = func_22
mod = relay.transform.InferType()(mod)
const_23 = relay.const([-1.235983,-4.153876,5.490331,-5.503126,9.040543,-6.415685,7.882599,-5.213575], dtype = "float16")#candidate|23|(8,)|const|float16
var_24 = relay.var("var_24", dtype = "float16", shape = (8,))#candidate|24|(8,)|var|float16
bop_25 = relay.mod(const_23.astype('float16'), relay.reshape(var_24.astype('float16'), relay.shape_of(const_23))) # shape=(8,)
output = relay.Tuple([bop_25,])
F = relay.Function([var_24,], output)
mod['main'] = F
mod = relay.transform.InferType()(mod)
print('==========irmod built by Relay==========')
print(mod.astext(show_meta_data=False))
print('===================================')
intrp2 = relay.build_module.create_executor('debug', mod, tvm.device('llvm',0),'llvm')
input_24= np.array([-5.767808,-7.854935,-3.281783,4.785201,-0.134391,-9.405264,6.846601,-5.962306], dtype='float16')
res2 = intrp2.evaluate()(input_24, ) # crash comes from here!!!!!!!