|
本帖最后由 xavi 于 2023-4-15 20:35 编辑
使用rknn_toolkit_lite2在RK3588上部署模型,运行rknn_lite.inference时出现E RKNN: failed to submit! Op id:3 , op name: conv:inputs.3_Conv_pads_conv, flags: 0x5, task start:0,task number:218,run task number:218 , run task counter: 146 int status: 0模型推理精度大幅下降。
请教一下这种问题该如何解决?感谢各位大佬不吝赐教
代码如下:
import numpy as np
import platform
from rknnlite.api import RKNNLite
# decice tree for rk356x/rk3588
DEVICE_COMPATIBLE_NODE = '/proc/device-tree/compatible'
def get_host():
# get platform and device type
system = platform.system()
machine = platform.machine()
os_machine = system + '-' + machine
if os_machine == 'Linux-aarch64':
try:
with open(DEVICE_COMPATIBLE_NODE) as f:
device_compatible_str = f.read()
if 'rk3588' in device_compatible_str:
host = 'RK3588'
else:
host = 'RK356x'
except IOError:
print('Read device node {} failed.'.format(DEVICE_COMPATIBLE_NODE))
exit(-1)
else:
host = os_machine
return host
RK3588_RKNN_MODEL = '3588_saved_model_quantization_2.rknn'
if __name__ == '__main__':
host_name = get_host()
rknn_model = RK3588_RKNN_MODEL
rknn_lite = RKNNLite()
print('load RKNN model')
ret = rknn_lite.load_rknn(rknn_model)
if ret != 0:
print('Load RKNN model failed')
exit(ret)
print('load over')
print('init runtime environment')
#Debian OS
ret = rknn_lite.init_runtime(core_mask=RKNNLite.NPU_CORE_0)
if ret != 0:
print('Init runtime environment failed')
exit(ret)
print('init over')
data_in = np.load(r'./test_data_X_nhwc.npy')
test_Y = np.load(r'./test_data_Y.npy')
print('running model')
outputs = rknn_lite.inference(inputs=[data_in])
np.save('./rk3588_result.npy', outputs[0])
final_result = np.load(r'./rk3588_result.npy')
preds = final_result.argmax(axis = -1)
# print(preds)
# print(test_Y.argmax(axis = -1))
acc = np.mean((preds == test_Y.argmax(axis=-1)))
print(acc)
print('run over')
|
本帖子中包含更多资源
您需要 登录 才可以下载或查看,没有帐号?立即注册
x
|