Skip to content

Commit fd5b8ee

Browse files
authored
Fix Python IndexError of case2-3 (#49986)
* add shape check for fused_multi_head_attention * use raise for coverage test * add unittest * remove unnecessary pass * add unittest
1 parent 3cf50f9 commit fd5b8ee

File tree

3 files changed

+43
-0
lines changed

3 files changed

+43
-0
lines changed

python/paddle/fluid/tests/unittests/test_fused_attention_no_dropout.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -192,5 +192,17 @@ def set_configs(self):
192192
self.normalize_before = True
193193

194194

195+
class TestFusedAttentionAPIError(unittest.TestCase):
196+
def test_invalid_x_rank(self):
197+
def test_x_rank_1():
198+
with paddle.fluid.dygraph.guard():
199+
layer = FusedMultiHeadAttention(embed_dim=1, num_heads=1)
200+
array = np.array([1.9], dtype=np.float32)
201+
x = paddle.to_tensor(np.reshape(array, [1]), dtype='float32')
202+
out = layer(x)
203+
204+
self.assertRaises(ValueError, test_x_rank_1)
205+
206+
195207
if __name__ == "__main__":
196208
unittest.main()

python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1051,5 +1051,31 @@ def test_fused_multi_transformer_op(self):
10511051
)
10521052

10531053

1054+
class TestFusedMultiAttentionAPIError(unittest.TestCase):
1055+
def test_errors(self):
1056+
def test_invalid_input_dim():
1057+
array = np.array([1.9], dtype=np.float32)
1058+
x = paddle.to_tensor(np.reshape(array, [1]), dtype='float32')
1059+
layer = paddle.incubate.nn.FusedMultiHeadAttention(
1060+
embed_dim=1, num_heads=1
1061+
)
1062+
out = layer(x)
1063+
1064+
self.assertRaises(ValueError, test_invalid_input_dim)
1065+
1066+
1067+
class TestFusedMultiTransformerAPIError(unittest.TestCase):
1068+
def test_errors(self):
1069+
def test_invalid_input_dim():
1070+
array = np.array([], dtype=np.float32)
1071+
x = paddle.to_tensor(np.reshape(array, [0]), dtype='int32')
1072+
layer = paddle.incubate.nn.FusedTransformerEncoderLayer(
1073+
108, 108, 108, 0.0, 'relu'
1074+
)
1075+
out = layer(x)
1076+
1077+
self.assertRaises(ValueError, test_invalid_input_dim)
1078+
1079+
10541080
if __name__ == "__main__":
10551081
unittest.main()

python/paddle/incubate/nn/functional/fused_transformer.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -615,6 +615,11 @@ def fused_multi_head_attention(
615615
'downgrade_in_infer' if mode == 'downscale_in_infer' else mode
616616
) # semantic transfer
617617

618+
if x.ndim != 3:
619+
raise ValueError(
620+
f"The rank of the x should be 3, but received {x.ndim}."
621+
)
622+
618623
if _non_static_mode():
619624
if default_main_program().random_seed != 0:
620625
seed = default_main_program().random_seed

0 commit comments

Comments
 (0)