Skip to content

Commit aa4f7e9

Browse files
committed
fix
1 parent d1a2606 commit aa4f7e9

12 files changed

+433
-413
lines changed

test/ir/inference/test_trt_convert_activation.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def generate_input(attrs: list[dict[str, Any]]):
5252
"stanh",
5353
"thresholded_relu",
5454
"celu",
55-
# "logsigmoid",
55+
"logsigmoid",
5656
"tanh_shrink",
5757
"softplus",
5858
# "hard_swish",

test/ir/inference/test_trt_convert_einsum.py

+40-37
Original file line numberDiff line numberDiff line change
@@ -380,34 +380,35 @@ def generate_input_matrix(input_shape):
380380
outputs=["einsum_output_data"],
381381
)
382382

383-
yield program_config
383+
yield program_config
384+
385+
def generate_dynamic_shape(self):
386+
min_xshape = self.x_shape[:]
387+
max_xshape = self.x_shape[:]
388+
min_yshape = self.y_shape[:]
389+
max_yshape = self.y_shape[:]
390+
if "b" in self.equation:
391+
min_xshape[0] = 1
392+
max_xshape[0] = 4
393+
min_yshape[0] = 1
394+
max_yshape[0] = 4
395+
self.dynamic_shape.min_input_shape = {
396+
"operands_data0": min_xshape,
397+
"operands_data1": min_yshape,
398+
}
399+
self.dynamic_shape.max_input_shape = {
400+
"operands_data0": max_xshape,
401+
"operands_data1": max_yshape,
402+
}
403+
self.dynamic_shape.opt_input_shape = {
404+
"operands_data0": self.x_shape,
405+
"operands_data1": self.y_shape,
406+
}
407+
return self.dynamic_shape
384408

385409
def sample_predictor_configs(
386-
self, program_config
410+
self, program_config, run_pir=False
387411
) -> tuple[paddle_infer.Config, list[int], float]:
388-
def generate_dynamic_shape(attrs):
389-
min_xshape = self.x_shape[:]
390-
max_xshape = self.x_shape[:]
391-
min_yshape = self.y_shape[:]
392-
max_yshape = self.y_shape[:]
393-
if "b" in self.equation:
394-
min_xshape[0] = 1
395-
max_xshape[0] = 4
396-
min_yshape[0] = 1
397-
max_yshape[0] = 4
398-
self.dynamic_shape.min_input_shape = {
399-
"operands_data0": min_xshape,
400-
"operands_data1": min_yshape,
401-
}
402-
self.dynamic_shape.max_input_shape = {
403-
"operands_data0": max_xshape,
404-
"operands_data1": max_yshape,
405-
}
406-
self.dynamic_shape.opt_input_shape = {
407-
"operands_data0": self.x_shape,
408-
"operands_data1": self.y_shape,
409-
}
410-
411412
def clear_dynamic_shape():
412413
self.dynamic_shape.min_input_shape = {}
413414
self.dynamic_shape.max_input_shape = {}
@@ -424,19 +425,20 @@ def generate_trt_nodes_num(attrs, dynamic_shape):
424425

425426
# for static_shape
426427
clear_dynamic_shape()
427-
self.trt_param.precision = paddle_infer.PrecisionType.Float32
428-
program_config.set_input_type(np.float32)
429-
yield self.create_inference_config(), generate_trt_nodes_num(
430-
attrs, False
431-
), 1e-5
432-
self.trt_param.precision = paddle_infer.PrecisionType.Half
433-
program_config.set_input_type(np.float16)
434-
yield self.create_inference_config(), generate_trt_nodes_num(
435-
attrs, False
436-
), 1e-5
428+
if not run_pir:
429+
self.trt_param.precision = paddle_infer.PrecisionType.Float32
430+
program_config.set_input_type(np.float32)
431+
yield self.create_inference_config(), generate_trt_nodes_num(
432+
attrs, False
433+
), 1e-5
434+
self.trt_param.precision = paddle_infer.PrecisionType.Half
435+
program_config.set_input_type(np.float16)
436+
yield self.create_inference_config(), generate_trt_nodes_num(
437+
attrs, False
438+
), 1e-5
437439

438440
# for dynamic_shape
439-
generate_dynamic_shape(attrs)
441+
self.generate_dynamic_shape()
440442
self.trt_param.precision = paddle_infer.PrecisionType.Float32
441443
program_config.set_input_type(np.float32)
442444
yield self.create_inference_config(), generate_trt_nodes_num(
@@ -449,7 +451,8 @@ def generate_trt_nodes_num(attrs, dynamic_shape):
449451
), 1e-5
450452

451453
def test(self):
452-
self.run_test()
454+
# self.run_test()
455+
self.run_test(run_pir=True)
453456

454457

455458
if __name__ == "__main__":

test/ir/inference/test_trt_convert_gather.py

+69-67
Original file line numberDiff line numberDiff line change
@@ -121,63 +121,64 @@ def generate_input3(axis):
121121

122122
yield program_config
123123

124+
def generate_dynamic_shape(self):
125+
if len(self.shape) == 1:
126+
self.dynamic_shape.min_input_shape = {
127+
"input_data": [4],
128+
"index_data": [1],
129+
}
130+
self.dynamic_shape.max_input_shape = {
131+
"input_data": [128],
132+
"index_data": [4],
133+
}
134+
self.dynamic_shape.opt_input_shape = {
135+
"input_data": [16],
136+
"index_data": [2],
137+
}
138+
elif len(self.shape) == 2:
139+
self.dynamic_shape.min_input_shape = {
140+
"input_data": [2, 4],
141+
"index_data": [1],
142+
}
143+
self.dynamic_shape.max_input_shape = {
144+
"input_data": [256, 256],
145+
"index_data": [4],
146+
}
147+
self.dynamic_shape.opt_input_shape = {
148+
"input_data": [64, 32],
149+
"index_data": [2],
150+
}
151+
elif len(self.shape) == 3:
152+
self.dynamic_shape.min_input_shape = {
153+
"input_data": [2, 4, 4],
154+
"index_data": [1],
155+
}
156+
self.dynamic_shape.max_input_shape = {
157+
"input_data": [128, 256, 256],
158+
"index_data": [4],
159+
}
160+
self.dynamic_shape.opt_input_shape = {
161+
"input_data": [16, 64, 32],
162+
"index_data": [2],
163+
}
164+
elif len(self.shape) == 4:
165+
self.dynamic_shape.min_input_shape = {
166+
"input_data": [2, 4, 4, 2],
167+
"index_data": [1],
168+
}
169+
self.dynamic_shape.max_input_shape = {
170+
"input_data": [128, 256, 64, 128],
171+
"index_data": [4],
172+
}
173+
self.dynamic_shape.opt_input_shape = {
174+
"input_data": [16, 64, 16, 32],
175+
"index_data": [2],
176+
}
177+
return self.dynamic_shape
178+
124179
def sample_predictor_configs(
125-
self, program_config
180+
self, program_config, run_pir=False
126181
) -> tuple[paddle_infer.Config, list[int], float]:
127-
def generate_dynamic_shape(attrs):
128-
if len(self.shape) == 1:
129-
self.dynamic_shape.min_input_shape = {
130-
"input_data": [4],
131-
"index_data": [1],
132-
}
133-
self.dynamic_shape.max_input_shape = {
134-
"input_data": [128],
135-
"index_data": [4],
136-
}
137-
self.dynamic_shape.opt_input_shape = {
138-
"input_data": [16],
139-
"index_data": [2],
140-
}
141-
elif len(self.shape) == 2:
142-
self.dynamic_shape.min_input_shape = {
143-
"input_data": [2, 4],
144-
"index_data": [1],
145-
}
146-
self.dynamic_shape.max_input_shape = {
147-
"input_data": [256, 256],
148-
"index_data": [4],
149-
}
150-
self.dynamic_shape.opt_input_shape = {
151-
"input_data": [64, 32],
152-
"index_data": [2],
153-
}
154-
elif len(self.shape) == 3:
155-
self.dynamic_shape.min_input_shape = {
156-
"input_data": [2, 4, 4],
157-
"index_data": [1],
158-
}
159-
self.dynamic_shape.max_input_shape = {
160-
"input_data": [128, 256, 256],
161-
"index_data": [4],
162-
}
163-
self.dynamic_shape.opt_input_shape = {
164-
"input_data": [16, 64, 32],
165-
"index_data": [2],
166-
}
167-
elif len(self.shape) == 4:
168-
self.dynamic_shape.min_input_shape = {
169-
"input_data": [2, 4, 4, 2],
170-
"index_data": [1],
171-
}
172-
self.dynamic_shape.max_input_shape = {
173-
"input_data": [128, 256, 64, 128],
174-
"index_data": [4],
175-
}
176-
self.dynamic_shape.opt_input_shape = {
177-
"input_data": [16, 64, 16, 32],
178-
"index_data": [2],
179-
}
180-
181182
def clear_dynamic_shape():
182183
self.dynamic_shape.max_input_shape = {}
183184
self.dynamic_shape.min_input_shape = {}
@@ -198,19 +199,20 @@ def generate_trt_nodes_num(dynamic_shape):
198199

199200
# for static_shape
200201
clear_dynamic_shape()
201-
self.trt_param.precision = paddle_infer.PrecisionType.Float32
202-
program_config.set_input_type(np.float32)
203-
yield self.create_inference_config(), generate_trt_nodes_num(
204-
False
205-
), 1e-5
206-
self.trt_param.precision = paddle_infer.PrecisionType.Half
207-
program_config.set_input_type(np.float16)
208-
yield self.create_inference_config(), generate_trt_nodes_num(
209-
False
210-
), 1e-3
202+
if not run_pir:
203+
self.trt_param.precision = paddle_infer.PrecisionType.Float32
204+
program_config.set_input_type(np.float32)
205+
yield self.create_inference_config(), generate_trt_nodes_num(
206+
False
207+
), 1e-5
208+
self.trt_param.precision = paddle_infer.PrecisionType.Half
209+
program_config.set_input_type(np.float16)
210+
yield self.create_inference_config(), generate_trt_nodes_num(
211+
False
212+
), 1e-3
211213

212214
# for dynamic_shape
213-
generate_dynamic_shape(attrs)
215+
self.generate_dynamic_shape()
214216
self.trt_param.precision = paddle_infer.PrecisionType.Float32
215217
program_config.set_input_type(np.float32)
216218
yield self.create_inference_config(), generate_trt_nodes_num(True), 1e-5
@@ -240,7 +242,7 @@ def teller1(program_config, predictor_config):
240242

241243
def test(self):
242244
self.add_skip_trt_case()
243-
self.run_test()
245+
self.run_test(run_pir=True)
244246

245247

246248
if __name__ == "__main__":

test/ir/inference/test_trt_convert_group_norm.py

+22-21
Original file line numberDiff line numberDiff line change
@@ -101,15 +101,15 @@ def generate_bias():
101101

102102
yield program_config
103103

104+
def generate_dynamic_shape(self):
105+
self.dynamic_shape.min_input_shape = {"input_data": [1, 16, 16, 16]}
106+
self.dynamic_shape.max_input_shape = {"input_data": [4, 64, 128, 128]}
107+
self.dynamic_shape.opt_input_shape = {"input_data": [1, 32, 64, 64]}
108+
return self.dynamic_shape
109+
104110
def sample_predictor_configs(
105-
self, program_config
111+
self, program_config, run_pir=False
106112
) -> tuple[paddle_infer.Config, list[int], float]:
107-
def generate_dynamic_shape(attrs):
108-
self.dynamic_shape.min_input_shape = {"input_data": [1, 16, 16, 16]}
109-
self.dynamic_shape.max_input_shape = {
110-
"input_data": [4, 64, 128, 128]
111-
}
112-
self.dynamic_shape.opt_input_shape = {"input_data": [1, 32, 64, 64]}
113113

114114
def clear_dynamic_shape():
115115
self.dynamic_shape.max_input_shape = {}
@@ -125,20 +125,21 @@ def generate_trt_nodes_num(attrs, dynamic_shape):
125125

126126
# for static_shape
127127
clear_dynamic_shape()
128-
self.trt_param.workspace_size = 2013265920
129-
self.trt_param.precision = paddle_infer.PrecisionType.Half
130-
program_config.set_input_type(np.float16)
131-
yield self.create_inference_config(), generate_trt_nodes_num(
132-
attrs, False
133-
), 1e-2
134-
135-
self.trt_param.precision = paddle_infer.PrecisionType.Float32
136-
program_config.set_input_type(np.float32)
137-
yield self.create_inference_config(), generate_trt_nodes_num(
138-
attrs, False
139-
), 1e-5
128+
if not run_pir:
129+
self.trt_param.workspace_size = 2013265920
130+
self.trt_param.precision = paddle_infer.PrecisionType.Half
131+
program_config.set_input_type(np.float16)
132+
yield self.create_inference_config(), generate_trt_nodes_num(
133+
attrs, False
134+
), 1e-2
135+
136+
self.trt_param.precision = paddle_infer.PrecisionType.Float32
137+
program_config.set_input_type(np.float32)
138+
yield self.create_inference_config(), generate_trt_nodes_num(
139+
attrs, False
140+
), 1e-5
140141
# for dynamic_shape
141-
generate_dynamic_shape(attrs)
142+
self.generate_dynamic_shape()
142143
self.trt_param.workspace_size = 2013265920
143144

144145
self.trt_param.precision = paddle_infer.PrecisionType.Half
@@ -154,7 +155,7 @@ def generate_trt_nodes_num(attrs, dynamic_shape):
154155
), 1e-5
155156

156157
def test(self):
157-
self.run_test()
158+
self.run_test(run_pir=True)
158159

159160

160161
if __name__ == "__main__":

0 commit comments

Comments
 (0)