@@ -517,7 +517,6 @@ def slice_converter(network, paddle_op, inputs):
517
517
idx ,
518
518
name = [paddle_op .name (), f'starts_tensor_{ idx } ' ],
519
519
)
520
- start_tensor = trt_concat (network , starts_tensor )
521
520
522
521
ends = get_input_constant_value (paddle_op , inputs , 2 )
523
522
if ends is not None :
@@ -569,36 +568,21 @@ def slice_converter(network, paddle_op, inputs):
569
568
else :
570
569
ends = inputs [2 ]
571
570
for idx in range (len (axes )):
572
- axis = axes [idx ]
573
- input_dim = get_shape_tensor_element (
574
- network ,
575
- input_shape_tensor ,
576
- axis ,
577
- name = [paddle_op .name (), f'input_dim_{ idx } ' ],
578
- )
579
- end_element = get_shape_tensor_element (
571
+ ends_tensor [axes [idx ]] = get_shape_tensor_element (
580
572
network ,
581
573
ends ,
582
574
idx ,
583
- name = [paddle_op .name (), f'end_element_ { idx } ' ],
575
+ name = [paddle_op .name (), f'ends_tensor_ { idx } ' ],
584
576
)
585
577
586
- ends_tensor [axes [idx ]] = trt_min (
587
- network ,
588
- trt_max (
589
- network ,
590
- end_element ,
591
- add_1D_constant_layer (
592
- network , 0 , name = [paddle_op .name (), 'zero_tensor_{idx}' ]
593
- ),
594
- name = [paddle_op .name (), 'trt_max_{idx}' ],
595
- ),
596
- input_dim ,
597
- name = [paddle_op .name (), 'trt_min_{idx}' ],
598
- )
599
- end_tensor = trt_concat (
600
- network , ends_tensor , name = [paddle_op .name (), 'end_tensor' ]
601
- )
578
+ start_tensor_layer = network .add_concatenation (starts_tensor )
579
+ start_tensor_layer .axis = 0
580
+ set_layer_name (start_tensor_layer , paddle_op )
581
+ start_tensor = start_tensor_layer .get_output (0 )
582
+ end_tensor_layer = network .add_concatenation (ends_tensor )
583
+ end_tensor_layer .axis = 0
584
+ set_layer_name (end_tensor_layer , paddle_op )
585
+ end_tensor = end_tensor_layer .get_output (0 )
602
586
size_tensor = trt_sub (
603
587
network ,
604
588
end_tensor ,
@@ -629,12 +613,7 @@ def slice_converter(network, paddle_op, inputs):
629
613
shuffle_layer = network .add_shuffle (output_tensor )
630
614
shuffle_layer .reshape_dims = ()
631
615
else :
632
- real_size_tensor = trt_gather (
633
- network ,
634
- size_tensor ,
635
- gather_indices ,
636
- name = [paddle_op .name (), 'real_size_tensor' ],
637
- )
616
+ real_size_tensor = trt_gather (network , size_tensor , gather_indices )
638
617
shuffle_layer = network .add_shuffle (output_tensor )
639
618
shuffle_layer .set_input (1 , real_size_tensor )
640
619
0 commit comments