Skip to content

Commit 09eaa7d

Browse files
authored
fix bugs of PR 35401 (#35746)
1 parent 7e18106 commit 09eaa7d

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

python/paddle/distributed/fleet/utils/hybrid_parallel_util.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
from paddle import framework
2020
import paddle
2121
from paddle.fluid import core
22-
import paddle.distributed as dist
2322
from paddle.fluid.dygraph.parallel import _split_tensors, sync_params_buffers, build_groups
2423
from collections import OrderedDict
2524
from .log_util import logger
@@ -45,7 +44,7 @@ def _apply_collective_grads(parameters, comm_group):
4544

4645
for coalesced_grad, _, _ in coalesced_grads_and_vars:
4746
# need to div nranks
48-
nranks = dist.get_world_size(
47+
nranks = paddle.distributed.get_world_size(
4948
) if comm_group is None else comm_group.nranks
5049
div_factor = paddle.to_tensor(nranks, dtype=coalesced_grad.dtype)
5150
paddle.fluid.framework._dygraph_tracer().trace_op(

0 commit comments

Comments
 (0)