Skip to content

Commit 0db6741

Browse files
committed
Fix
1 parent 0511272 commit 0db6741

File tree

3 files changed

+5
-15
lines changed

3 files changed

+5
-15
lines changed

python/paddle/distributed/auto_parallel/static/helper.py

+2-6
Original file line numberDiff line numberDiff line change
@@ -456,9 +456,7 @@ def init_pir(self, main_program, place):
456456
barrier_tensor = paddle.full([1], 1, dtype="int32")
457457
# barrier is not available in xpu for now
458458
if not paddle.framework.core.is_compiled_with_xpu():
459-
paddle._legacy_C_ops.barrier(
460-
barrier_tensor, barrier_tensor, 'ring_id', 0
461-
)
459+
paddle._C_ops.barrier(barrier_tensor, 0)
462460
paddle.enable_static()
463461

464462
def init(self, main_program, place, dist_context):
@@ -600,9 +598,7 @@ def init(self, main_program, place, dist_context):
600598
barrier_tensor = paddle.full([1], 1, dtype="int32")
601599
# barrier is not available in xpu for now
602600
if not paddle.framework.core.is_compiled_with_xpu():
603-
paddle._legacy_C_ops.barrier(
604-
barrier_tensor, barrier_tensor, 'ring_id', 0
605-
)
601+
paddle._C_ops.barrier(barrier_tensor, 0)
606602
paddle.enable_static()
607603

608604
def cache_whole_graph_dist_attr(self, all_params):

python/paddle/distributed/auto_parallel/static/process_group.py

+2-6
Original file line numberDiff line numberDiff line change
@@ -222,9 +222,7 @@ def instantiate(self):
222222
barrier_tensor = paddle.full([1], 1, dtype="int32")
223223
# barrier is not available in xpu for now
224224
if not paddle.framework.core.is_compiled_with_xpu():
225-
paddle._legacy_C_ops.barrier(
226-
barrier_tensor, barrier_tensor, 'ring_id', ring_id
227-
)
225+
paddle._C_ops.barrier(barrier_tensor, ring_id)
228226

229227
# NOTE(zhiqiu): to avoid send/recv hang in lazy init
230228
if self._group_type == 'p2p':
@@ -240,9 +238,7 @@ def instantiate(self):
240238
barrier_tensor = paddle.full([1], 1, dtype="int32")
241239
# barrier is not available in xpu for now
242240
if not paddle.framework.core.is_compiled_with_xpu():
243-
paddle._legacy_C_ops.barrier(
244-
barrier_tensor, barrier_tensor, 'ring_id', 0
245-
)
241+
paddle._C_ops.barrier(barrier_tensor, 0)
246242

247243
self._is_instantiate = True
248244

python/paddle/distributed/communication/group.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -347,9 +347,7 @@ def barrier(group: Group | None = None) -> None:
347347
if framework.in_dynamic_mode():
348348
# barrier is not available in xpu for now
349349
if not paddle.framework.core.is_compiled_with_xpu():
350-
return paddle._legacy_C_ops.barrier(
351-
barrier_tensor, barrier_tensor, 'ring_id', ring_id
352-
)
350+
return paddle._C_ops.barrier(barrier_tensor, ring_id)
353351
else:
354352
op_type = 'barrier'
355353
if not isinstance(ring_id, int):

0 commit comments

Comments
 (0)