Skip to content

Commit d4a3472

Browse files
committed
Fix module issue
1 parent c76a357 commit d4a3472

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

python/paddle/incubate/nn/functional/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
from .fused_rms_norm_ext import fused_rms_norm_ext
5353
from .moe_gate_dispatch import moe_gate_dispatch
5454
from .moe_gate_dispatch_permute import moe_gate_dispatch_permute
55-
from .moe_ops_partial_nosoftmaxtopk import moe_gate_dispatch_partial_nosoftmaxtopk
55+
from .moe_gate_dispatch_partial_nosoftmaxtopk import moe_gate_dispatch_partial_nosoftmaxtopk
5656

5757
__all__ = [
5858
'fused_multi_head_attention',

python/paddle/incubate/nn/functional/moe_ops_partial_nosoftmaxtopk.py renamed to python/paddle/incubate/nn/functional/moe_gate_dispatch_partial_nosoftmaxtopk.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
if TYPE_CHECKING:
99
from paddle import Tensor
1010

11-
def moe_ops_partial_nosoftmaxtopk(
11+
def moe_gate_dispatch_partial_nosoftmaxtopk(
1212
x: Tensor,
1313
combine_weights: Tensor,
1414
expert_id: Tensor,
@@ -23,7 +23,7 @@ def moe_ops_partial_nosoftmaxtopk(
2323
) -> tuple[Tensor, Tensor, Tensor, Tensor, Tensor, Tensor]:
2424
if in_dynamic_or_pir_mode():
2525
return _C_ops.moe_gate_dispatch_partial_nosoftmaxtopk(x, combine_weights, expert_id, k, capacity, num_experts, use_pad, expert_start_index, expert_end_index, reverse_token_drop)
26-
helper = LayerHelper("moe_ops_partial_nosoftmaxtopk", **locals())
26+
helper = LayerHelper("moe_gate_dispatch_partial_nosoftmaxtopk", **locals())
2727
y = helper.create_variable_for_type_inference(dtype=x.dtype)
2828
combine_weights_out = helper.create_variable_for_type_inference(dtype=combine_weights.dtype)
2929
scatter_index = helper.create_variable_for_type_inference(dtype='int32')
@@ -53,7 +53,7 @@ def moe_ops_partial_nosoftmaxtopk(
5353
"reverse_token_drop": reverse_token_drop,
5454
}
5555
helper.append_op(
56-
type="moe_ops_partial_nosoftmaxtopk",
56+
type="moe_gate_dispatch_partial_nosoftmaxtopk",
5757
inputs=inputs,
5858
outputs=outputs,
5959
attrs=attrs,

0 commit comments

Comments
 (0)