Dimension mismatch

A error occurred when I run a code snippet which is a small self-attenion implementation. I also disable ‘AlterOpLayout’ pass,it doesn’t work. Is my code wrong?How to modify it. Here is my source code

from tvm import relay
from tvm.relay import testing
import tvm
from tvm import te
from tvm.contrib import graph_executor
import numpy as np

def dense(data, weight=None, units=None, **kwargs):
    name = kwargs.get("name")
    kwargs.pop("name")
    if not weight:
        weight = relay.var(name + "_weight")
    ans = relay.nn.dense(data, weight, units, **kwargs)
    return ans

def batch_matmul(data, weight=None,**kwargs):
	return relay.nn.batch_matmul(data,weight)

def bert(seq_lenth=64,hidden_dims=1024,heads=64):
	data = relay.var("data", shape=(seq_lenth,hidden_dims), dtype='float32')
	body = relay.nn.relu(data=data)

	# attention
	d_k = hidden_dims // heads
	q = dense(data,units=hidden_dims,name="q")
	k = dense(data,units=hidden_dims,name="k")
	v = dense(data,units=hidden_dims,name="v")

	q = relay.reshape(q,(-1,heads,d_k))
	k = relay.reshape(k,(-1,heads,d_k))
	q = relay.reshape(q,(-1,heads,d_k))

	q = relay.transpose(q,(1,0,2))
	k = relay.transpose(k,(1,0,2))
	v = relay.transpose(v,(1,2,0))

	logits = batch_matmul(q,k,name='logits')
	output = batch_matmul(logits,v,name='output')

	output = relay.transpose(output,(1,0,2))
	output = relay.reshape(output,(seq_lenth,hidden_dims))

	net = dense(output,units=hidden_dims,name='net')

	return relay.Function(relay.analysis.free_vars(net), net)

def get_workload():
	net = bert()
	mod = tvm.IRModule.from_expr(net)
	mod = relay.transform.InferType()(mod)
	shape_dict = {v.name_hint: v.checked_type for v in mod["main"].params}
	np.random.seed(0)
	params = {}
	for k, v in shape_dict.items():
	    if k == "data":
	        continue
	    init_value = np.random.uniform(-1, 1, v.concrete_shape).astype(v.dtype)
	    params[k] = tvm.nd.array(init_value, device=tvm.cpu(0))
	return mod,params

if __name__=='__main__':
	mod,params = get_workload()
	data_shape = (64,1024)

	opt_level = 4
	target = tvm.target.cuda()

	with tvm.transform.PassContext(opt_level=opt_level,disabled_pass={"AlterOpLayout"}):
		lib = relay.build(mod, target, params=params)


	dev = tvm.cuda()
	data = np.random.uniform(-1, 1, size=data_shape).astype("float32")
	# create module
	module = graph_executor.GraphModule(lib["default"](dev))
	# set input and parameters
	module.set_input("data", data)
	# run
	print(module.benchmark(dev, number=1, repeat=600))

And error message

Check failed: (!axes.defined() || static_cast<int>(axes.size()) == ndim) is false: Dimension mismatch: axes has 3 elements, but data.ndim = 2

And I think it failed at

output = batch_matmul(logits,v,name='output')

Thanks a lot!

1 Like

Hey guy! I don’t know if you have solved this problem. I changed data shape in function bert

data = relay.var(“data”, shape=(seq_lenth,heads,64), dtype=‘float32’), like that, changing the shape dimention from 2 to 3 helps.

But this change will cause another error: Check failed: (reporter->AssertEQ(xk, yk)) is false: BatchDot: shapes of x and y is inconsistent, x shape=[12, 1536, 1536], y shape=[12, 768, 128]

Besides, I am doing similar work now, can we have more communication about this topic?

If you can speak Chinese, I think we can communicate more easily…