在加载onnx模型时报错,代码如下 import onnx
import numpy as np
import onnxruntime as ort
import tvm
from tvm import relay
from tvm.relay.import_model import import_model_to_igie
加载模型
model_path = ‘/gptsovits/sovits_infer/date/t2s_first_stage_decoder_simp.onnx’
model = onnx.load(model_path)
创建 InferenceSession,验证模型
try:
session = ort.InferenceSession(model_path, providers=['CPUExecutionProvider'])
print("InferenceSession 创建成功。")
except Exception as e:
print(f"创建 InferenceSession 失败: {e}")
exit(1)
测试输入
input_data = {
'all_phoneme_ids': np.zeros((1, 64), dtype=np.int64),
'bert': np.zeros((1, 1024, 64), dtype=np.float32),
'prompt': np.zeros((1, 64), dtype=np.int64),
'top_k': np.array([5], dtype=np.int64),
'temperature': np.array([1.0], dtype=np.float32)
}
运行推理
try:
outputs = session.run(None, input_data)
print("ONNX 推理成功,输出:", outputs)
except Exception as e:
print(f"推理失败: {e}")
exit(1)
备用:使用标准 ONNX 导入
print(“正在导入模型(标准 ONNX 导入)…”)
try:
shape_dict_standard = {
'all_phoneme_ids': (1, 64),
'bert': (1, 1024, 64),
'prompt': (1, 64),
'top_k': (1,),
'temperature': (1,)
}
mod, params = relay.frontend.from_onnx(model, shape_dict_standard)
print("✅ 模型导入成功(使用标准 ONNX 导入)。")