[BYOC] partition with Tensorflow failed: "contains free variables"

Hi,

I wrote my own CodeGen: NNOp, which works well using the build with partition on originally ONNX model. But when I try it on a Tensorflow model - I get an error: “contains free variables: [Var(NNOp_0_i0, ty=TensorType([], int32)), Var(NNOp_0_i1, ty=TensorType([], int32))]”.

My code is the same as: Compile Tensorflow Models — tvm 0.11.dev0 documentation with the only addition of my CodeGen attached:

import tvm

from tvm import te

from tvm import relay

import numpy as np

import os.path

import tensorflow as tf

from tvm.relay import transform

gpus = tf.config.list_physical_devices("GPU")

if gpus:

    try:

        for gpu in gpus:

            tf.config.experimental.set_memory_growth(gpu, True)

        print("tensorflow will use experimental.set_memory_growth(True)")

    except RuntimeError as e:

        print("experimental.set_memory_growth option is not available: {}".format(e))

try:

    tf_compat_v1 = tf.compat.v1

except ImportError:

    tf_compat_v1 = tf

import tvm.relay.testing.tf as tf_testing

repo_base = "https://github.com/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/"

img_name = "elephant-299.jpg"

image_url = os.path.join(repo_base, img_name)

######################################################################

# Tutorials

# ---------

model_name = "classify_image_graph_def-with_shapes.pb"

model_url = os.path.join(repo_base, model_name)

map_proto = "imagenet_2012_challenge_label_map_proto.pbtxt"

map_proto_url = os.path.join(repo_base, map_proto)

label_map = "imagenet_synset_to_human_label_map.txt"

label_map_url = os.path.join(repo_base, label_map)

target = tvm.target.Target("llvm", host="llvm")

layout = None

dev = tvm.cpu(0)

######################################################################

# Download required files

# -----------------------

from tvm.contrib.download import download_testdata

img_path = download_testdata(image_url, img_name, module="data")

model_path = download_testdata(model_url, model_name, module=["tf", "InceptionV1"])

map_proto_path = download_testdata(map_proto_url, map_proto, module="data")

label_path = download_testdata(label_map_url, label_map, module="data")

######################################################################

# Import model

# ------------

with tf_compat_v1.gfile.GFile(model_path, "rb") as f:

    graph_def = tf_compat_v1.GraphDef()

    graph_def.ParseFromString(f.read())

    graph = tf.import_graph_def(graph_def, name="")

    graph_def = tf_testing.ProcessGraphDefParam(graph_def)

    with tf_compat_v1.Session() as sess:

        graph_def = tf_testing.AddShapesToGraphDef(sess, "softmax")

######################################################################

# Decode image

from PIL import Image

image = Image.open(img_path).resize((299, 299))

x = np.array(image)

######################################################################

# Import the graph to Relay

# -------------------------

shape_dict = {"DecodeJpeg/contents": x.shape}

dtype_dict = {"DecodeJpeg/contents": "uint8"}

mod, params = relay.frontend.from_tensorflow(graph_def, layout=layout, shape=shape_dict)

print("Tensorflow protobuf imported to relay frontend.")

######################################################################

# Relay Build

# ----------

# my Addition

if not tvm.get_global_func("relay.ext.NNOp", True):

        print("skip because NNOp codegen is not available")

mod = transform.AnnotateTarget(["NNOp"])(mod)

mod = transform.MergeCompilerRegions()(mod)

mod = transform.PartitionGraph()(mod)

# end of my Addition

with tvm.transform.PassContext(opt_level=3):

    lib = relay.build(mod, target, params=params)