Skip to content

Commit 3d686fb

Browse files
authored
[fluid_ops] auto_parallel_fused_linear_promotion.py backward modify c_allreduce_sum (PaddlePaddle#72135)
1 parent f078ba9 commit 3d686fb

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

python/paddle/distributed/passes/auto_parallel_fused_linear_promotion.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@
6060
"forward": ["matmul_v2", "reduce_scatter", "elementwise_add"],
6161
"backward": [
6262
"elementwise_add_grad",
63-
"c_allreduce_sum",
63+
"all_reduce",
6464
"scale",
6565
"all_gather",
6666
"matmul_v2_grad",
@@ -71,7 +71,7 @@
7171
"forward": ["matmul_v2", "all_reduce", "elementwise_add"],
7272
"backward": [
7373
"elementwise_add_grad",
74-
"c_allreduce_sum",
74+
"all_reduce",
7575
"scale",
7676
"matmul_v2_grad",
7777
],
@@ -80,9 +80,9 @@
8080
"forward": ["matmul_v2", "reduce_scatter", "elementwise_add"],
8181
"backward": [
8282
"elementwise_add_grad",
83-
"c_allreduce_sum",
83+
"all_reduce",
8484
"scale",
85-
"c_allreduce_sum",
85+
"all_reduce",
8686
"scale",
8787
"all_gather",
8888
"matmul_v2_grad",
@@ -98,7 +98,7 @@
9898
"forward": ["matmul_v2", "reduce_scatter", "cast", "elementwise_add"],
9999
"backward": [
100100
"elementwise_add_grad",
101-
"c_allreduce_sum",
101+
"all_reduce",
102102
"scale",
103103
"all_gather",
104104
"all_gather",
@@ -109,7 +109,7 @@
109109
"forward": ["matmul_v2", "all_reduce", "cast", "elementwise_add"],
110110
"backward": [
111111
"elementwise_add_grad",
112-
"c_allreduce_sum",
112+
"all_reduce",
113113
"scale",
114114
"matmul_v2_grad",
115115
],
@@ -118,9 +118,9 @@
118118
"forward": ["matmul_v2", "reduce_scatter", "cast", "elementwise_add"],
119119
"backward": [
120120
"elementwise_add_grad",
121-
"c_allreduce_sum",
121+
"all_reduce",
122122
"scale",
123-
"c_allreduce_sum",
123+
"all_reduce",
124124
"scale",
125125
"all_gather",
126126
"matmul_v2_grad",

0 commit comments

Comments
 (0)