Skip to content

Commit 81d2824

Browse files
pkhk-1lyuwenyu
andauthored
[test_tipc] add paddle2onnx; fix onnxruntime infer_demo (#5857)
Co-authored-by: lyuwenyu <wenyu.lyu@gmail.com>
1 parent 4d59784 commit 81d2824

File tree

4 files changed

+168
-10
lines changed

4 files changed

+168
-10
lines changed

deploy/third_engine/demo_onnxruntime/infer_demo.py

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,12 @@ def __init__(self,
3838
so = ort.SessionOptions()
3939
so.log_severity_level = 3
4040
self.net = ort.InferenceSession(model_pb_path, so)
41-
self.input_shape = (self.net.get_inputs()[0].shape[2],
42-
self.net.get_inputs()[0].shape[3])
41+
inputs_name = [a.name for a in self.net.get_inputs()]
42+
inputs_shape = {
43+
k: v.shape
44+
for k, v in zip(inputs_name, self.net.get_inputs())
45+
}
46+
self.input_shape = inputs_shape['image'][2:]
4347

4448
def _normalize(self, img):
4549
img = img.astype(np.float32)
@@ -51,6 +55,8 @@ def resize_image(self, srcimg, keep_ratio=False):
5155
origin_shape = srcimg.shape[:2]
5256
im_scale_y = newh / float(origin_shape[0])
5357
im_scale_x = neww / float(origin_shape[1])
58+
img_shape = np.array([[float(origin_shape[0]), float(origin_shape[1])]
59+
]).astype('float32')
5460
scale_factor = np.array([[im_scale_y, im_scale_x]]).astype('float32')
5561

5662
if keep_ratio and srcimg.shape[0] != srcimg.shape[1]:
@@ -87,7 +93,7 @@ def resize_image(self, srcimg, keep_ratio=False):
8793
img = cv2.resize(
8894
srcimg, self.input_shape, interpolation=cv2.INTER_AREA)
8995

90-
return img, scale_factor
96+
return img, img_shape, scale_factor
9197

9298
def get_color_map_list(self, num_classes):
9399
color_map = num_classes * [0, 0, 0]
@@ -104,15 +110,20 @@ def get_color_map_list(self, num_classes):
104110
return color_map
105111

106112
def detect(self, srcimg):
107-
img, scale_factor = self.resize_image(srcimg)
113+
img, im_shape, scale_factor = self.resize_image(srcimg)
108114
img = self._normalize(img)
109115

110116
blob = np.expand_dims(np.transpose(img, (2, 0, 1)), axis=0)
111117

112-
outs = self.net.run(None, {
113-
self.net.get_inputs()[0].name: blob,
114-
self.net.get_inputs()[1].name: scale_factor
115-
})
118+
inputs_dict = {
119+
'im_shape': im_shape,
120+
'image': blob,
121+
'scale_factor': scale_factor
122+
}
123+
inputs_name = [a.name for a in self.net.get_inputs()]
124+
net_inputs = {k: inputs_dict[k] for k in inputs_name}
125+
126+
outs = self.net.run(None, net_inputs)
116127

117128
outs = np.array(outs[0])
118129
expect_boxes = (outs[:, 1] > 0.5) & (outs[:, 0] > -1)
@@ -181,7 +192,7 @@ def detect_folder(self, img_fold, result_path):
181192
parser.add_argument(
182193
"--img_fold", dest="img_fold", type=str, default="./imgs")
183194
parser.add_argument(
184-
"--result_fold", dest="result_fold", type=str, default="./results")
195+
"--result_fold", dest="result_fold", type=str, default="results")
185196
args = parser.parse_args()
186197

187198
net = PicoDet(
@@ -191,3 +202,6 @@ def detect_folder(self, img_fold, result_path):
191202
iou_threshold=args.nmsThreshold)
192203

193204
net.detect_folder(args.img_fold, args.result_fold)
205+
print(
206+
f'infer results in ./deploy/third_engine/demo_onnxruntime/{args.result_fold}'
207+
)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
===========================paddle2onnx_params===========================
2+
model_name:picodet_s_320_coco_lcnet
3+
python:python3.7
4+
filename:null
5+
##
6+
--output_dir:./output_inference
7+
weights:https://paddledet.bj.bcebos.com/models/picodet_s_320_coco_lcnet.pdparams
8+
norm_export:tools/export_model.py -c configs/picodet/picodet_s_320_coco_lcnet.yml -o
9+
quant_export:tools/export_model.py -c configs/picodet/picodet_s_320_coco_lcnet.yml --slim_config configs/picodet/picodet_s_320_coco_lcnet.yml -o
10+
fpgm_export:tools/export_model.py -c configs/picodet/picodet_s_320_coco_lcnet.yml --slim_config configs/picodet/picodet_s_320_coco_lcnet.yml -o
11+
distill_export:null
12+
export1:null
13+
export2:null
14+
kl_quant_export:tools/post_quant.py -c configs/picodet/picodet_s_320_coco_lcnet.yml --slim_config configs/picodet/picodet_s_320_coco_lcnet.yml -o
15+
##
16+
2onnx: paddle2onnx
17+
--model_dir:./output_inference/picodet_s_320_coco_lcnet/
18+
--model_filename:model.pdmodel
19+
--params_filename:model.pdiparams
20+
--save_file:./deploy/third_engine/demo_onnxruntime/onnx_file/picodet_s_320_coco.onnx
21+
--opset_version:11
22+
##
23+
inference:infer_demo.py
24+
--modelpath:./onnx_file/picodet_s_320_coco.onnx
25+
--img_fold:./imgs
26+
--result_fold:results
27+
infer_mode:norm
28+
null:null

test_tipc/prepare.sh

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ source test_tipc/utils_func.sh
44
FILENAME=$1
55
# MODE be one of ['lite_train_lite_infer' 'lite_train_whole_infer'
66
# 'whole_train_whole_infer', 'whole_infer', 'klquant_whole_infer',
7-
# 'cpp_infer', 'serving_infer', 'lite_infer']
7+
# 'cpp_infer', 'serving_infer', 'lite_infer', 'paddle2onnx_infer']
88
MODE=$2
99

1010
# parse params
@@ -67,6 +67,10 @@ elif [ ${MODE} = "benchmark_train" ];then
6767
wget -nc -P ./dataset/mot/ https://paddledet.bj.bcebos.com/data/mot_benchmark.tar
6868
cd ./dataset/mot/ && tar -xvf mot_benchmark.tar && mv -u mot_benchmark/* .
6969
rm -rf mot_benchmark/ && cd ../../
70+
elif [ ${MODE} = "paddle2onnx_infer" ];then
71+
# set paddle2onnx_infer enve
72+
${python} -m pip install install paddle2onnx
73+
${python} -m pip install onnxruntime==1.10.0
7074
else
7175
# download coco lite data
7276
wget -nc -P ./dataset/coco/ https://paddledet.bj.bcebos.com/data/tipc/coco_tipc.tar

test_tipc/test_paddle2onnx.sh

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
#!/bin/bash
2+
source test_tipc/utils_func.sh
3+
4+
FILENAME=$1
5+
6+
# parser model_name
7+
dataline=$(cat ${FILENAME})
8+
IFS=$'\n'
9+
lines=(${dataline})
10+
model_name=$(func_parser_value "${lines[1]}")
11+
echo "ppdet onnx_infer: ${model_name}"
12+
python=$(func_parser_value "${lines[2]}")
13+
filename_key=$(func_parser_key "${lines[3]}")
14+
filename_value=$(func_parser_value "${lines[3]}")
15+
16+
# export params
17+
save_export_key=$(func_parser_key "${lines[5]}")
18+
save_export_value=$(func_parser_value "${lines[5]}")
19+
export_weight_key=$(func_parser_key "${lines[6]}")
20+
export_weight_value=$(func_parser_value "${lines[6]}")
21+
norm_export=$(func_parser_value "${lines[7]}")
22+
pact_export=$(func_parser_value "${lines[8]}")
23+
fpgm_export=$(func_parser_value "${lines[9]}")
24+
distill_export=$(func_parser_value "${lines[10]}")
25+
export_key1=$(func_parser_key "${lines[11]}")
26+
export_value1=$(func_parser_value "${lines[11]}")
27+
export_key2=$(func_parser_key "${lines[12]}")
28+
export_value2=$(func_parser_value "${lines[12]}")
29+
kl_quant_export=$(func_parser_value "${lines[13]}")
30+
31+
# parser paddle2onnx
32+
padlle2onnx_cmd=$(func_parser_value "${lines[15]}")
33+
infer_model_dir_key=$(func_parser_key "${lines[16]}")
34+
infer_model_dir_value=$(func_parser_value "${lines[16]}")
35+
model_filename_key=$(func_parser_key "${lines[17]}")
36+
model_filename_value=$(func_parser_value "${lines[17]}")
37+
params_filename_key=$(func_parser_key "${lines[18]}")
38+
params_filename_value=$(func_parser_value "${lines[18]}")
39+
save_file_key=$(func_parser_key "${lines[19]}")
40+
save_file_value=$(func_parser_value "${lines[19]}")
41+
opset_version_key=$(func_parser_key "${lines[20]}")
42+
opset_version_value=$(func_parser_value "${lines[20]}")
43+
44+
# parser onnx inference
45+
inference_py=$(func_parser_value "${lines[22]}")
46+
model_file_key=$(func_parser_key "${lines[23]}")
47+
model_file_value=$(func_parser_value "${lines[23]}")
48+
img_fold_key=$(func_parser_key "${lines[24]}")
49+
img_fold_value=$(func_parser_value "${lines[24]}")
50+
results_fold_key=$(func_parser_key "${lines[25]}")
51+
results_fold_value=$(func_parser_value "${lines[25]}")
52+
onnx_infer_mode_list=$(func_parser_value "${lines[26]}")
53+
54+
LOG_PATH="./test_tipc/output"
55+
mkdir -p ${LOG_PATH}
56+
status_log="${LOG_PATH}/results_paddle2onnx.log"
57+
58+
function func_paddle2onnx(){
59+
IFS='|'
60+
_script=$1
61+
62+
# paddle2onnx
63+
echo "################### run onnx export ###################"
64+
_save_log_path="${LOG_PATH}/paddle2onnx_infer_cpu.log"
65+
set_dirname=$(func_set_params "${infer_model_dir_key}" "${infer_model_dir_value}")
66+
set_model_filename=$(func_set_params "${model_filename_key}" "${model_filename_value}")
67+
set_params_filename=$(func_set_params "${params_filename_key}" "${params_filename_value}")
68+
set_save_model=$(func_set_params "${save_file_key}" "${save_file_value}")
69+
set_opset_version=$(func_set_params "${opset_version_key}" "${opset_version_value}")
70+
trans_model_cmd="${padlle2onnx_cmd} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_save_model} ${set_opset_version}"
71+
eval $trans_model_cmd
72+
last_status=${PIPESTATUS[0]}
73+
status_check $last_status "${trans_model_cmd}" "${status_log}"
74+
# python inference
75+
echo "################### run infer ###################"
76+
cd ./deploy/third_engine/demo_onnxruntime/
77+
model_file=$(func_set_params "${model_file_key}" "${model_file_value}")
78+
img_fold=$(func_set_params "${img_fold_key}" "${img_fold_value}")
79+
results_fold=$(func_set_params "${results_fold_key}" "${results_fold_value}")
80+
infer_model_cmd="${python} ${inference_py} ${model_file} ${img_fold} ${results_fold}"
81+
eval $infer_model_cmd
82+
last_status=${PIPESTATUS[0]}
83+
status_check $last_status "${infer_model_cmd}" "${status_log}"
84+
}
85+
86+
export Count=0
87+
IFS="|"
88+
echo "################### run paddle export ###################"
89+
for infer_mode in ${onnx_infer_mode_list[*]}; do
90+
91+
# run export
92+
case ${infer_mode} in
93+
norm) run_export=${norm_export} ;;
94+
quant) run_export=${pact_export} ;;
95+
fpgm) run_export=${fpgm_export} ;;
96+
distill) run_export=${distill_export} ;;
97+
kl_quant) run_export=${kl_quant_export} ;;
98+
*) echo "Undefined infer_mode!"; exit 1;
99+
esac
100+
if [ ${run_export} = "null" ]; then
101+
continue
102+
fi
103+
set_export_weight=$(func_set_params "${export_weight_key}" "${export_weight_value}")
104+
set_save_export_dir=$(func_set_params "${save_export_key}" "${save_export_value}")
105+
set_filename=$(func_set_params "${filename_key}" "${model_name}")
106+
export_cmd="${python} ${run_export} ${set_export_weight} ${set_filename} ${set_save_export_dir} "
107+
echo $export_cmd
108+
eval $export_cmd
109+
status_export=$?
110+
status_check $status_export "${export_cmd}" "${status_log}"
111+
done
112+
func_paddle2onnx

0 commit comments

Comments
 (0)