Hello!
I am wondering that I use TOPI correctly. i make a custom conv2d and Relu by using TOPI like below function.
def custom_Conv2d( intput_data , output , param_1 , strides=[1,1], padding=[0,0], dilation=[1,1] , opt_level = 0 ):
with tvm.target.create('cuda'):
L1 = topi.nn.conv2d(input_data,param_1,strides,padding,dilation)
output = L1
if opt_level == 0:
sch = CASS.custom.schedule(output.op)
elif opt_level > 0 :
sch = topi.generic.schedule_conv2d_nchw(output)
ConvModule = tvm.build( sch, [input_data, param_1, output] , tgt, tgt_host)
# tgt = 'cuda' , tgt_host = 'llvm'
return ConvModule
def custom_Relu( input_data , output ,opt_level =0):
with tvm.target.create('cuda'):
L1 = topi.nn.relu(input_data)
output = L1
if opt_level ==0 :
sch = CASS.custom.schedule(L1.op)
elif opt_level > 0 :
sch = topi.generic.schedule_injective(L1)
ReluModule = tvm.build( sch, [input_data,output] , tgt, tgt_host )
return ReluModule
the conv2d and relu can fuse each other, but I split them to simplify the question. and make placeholder to initialize custom function.
input_data = tvm.placeholder( (1,1,28,28) )
param_1 = tvm.placeholder( (6,1,3,3) )
output_data = tvm.placeholder( (1,6,26,26) )
Result = tvm.placeholder( (1,6,26,26) )
Conv = custom_Conv2d( input_data, output_data, param_1 , opt_level = 1 )
Relu = custom_Relu ( output_data , Result, opt_level = 1 )
and to test them i make simple input and running them.
test_input1 = tvm.nd.array( np.random.uniform( size=(1,1,28,28)).astype("float32"),ctx)
test_param1 = tvm.nd.array( np.random.uniform( size=(6,1,3,3)).astype("float32"),ctx)
test_out1 = tvm.nd.array( np.zeros( (1,6,26,26)).astype("float32"),ctx)
test_result = tvm.nd.array( np.zeros( (1,6,26,26) ).astype("float32"),ctx )
Conv(test_input1,test_param1,test_out1)
Relu(test_out1,test_result)
print(test_result.asnumpy())
The test_result seems to come out normally when print it. But I don’t know if it’s right to use TOPI this way.
Am I using it normally? Or is there a problem?