Skip to content

Commit bc1e17e

Browse files
authored
Enhance error message of cross_entropy_op, sigmoid_cross_entropy_with_logits_op, test=release/1.8 (#24542)
1 parent 19d9846 commit bc1e17e

File tree

5 files changed

+155
-79
lines changed

5 files changed

+155
-79
lines changed

paddle/fluid/operators/cross_entropy_op.cc

Lines changed: 71 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,9 @@ class CrossEntropyOpBase : public framework::OperatorWithKernel {
2525
using framework::OperatorWithKernel::OperatorWithKernel;
2626

2727
void InferShape(framework::InferShapeContext* ctx) const override {
28-
PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) should be not null.");
29-
PADDLE_ENFORCE_EQ(ctx->HasInput("Label"), true,
30-
"Input(Label) should be not null.");
31-
32-
PADDLE_ENFORCE_EQ(ctx->HasOutput("Y"), true,
33-
"Output(Y) should be not null.");
28+
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "CrossEntropy");
29+
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "CrossEntropy");
30+
OP_INOUT_CHECK(ctx->HasOutput("Y"), "Output", "Y", "CrossEntropy");
3431

3532
auto x_dims = ctx->GetInputDim("X");
3633
auto label_dims = ctx->GetInputDim("Label");
@@ -44,53 +41,61 @@ class CrossEntropyOpBase : public framework::OperatorWithKernel {
4441
PADDLE_ENFORCE_EQ(
4542
framework::slice_ddim(x_dims, 0, rank - 1),
4643
framework::slice_ddim(label_dims, 0, rank - 1),
47-
"ShapeError: Input(X) and Input(Label) shall have the same shape "
48-
"except the last dimension. But received: the shape of Input(X) is "
49-
"[%s],"
50-
"the shape of Input(Label) is [%s].",
51-
x_dims, label_dims);
44+
platform::errors::InvalidArgument(
45+
"Input(X) and Input(Label) shall have the same shape "
46+
"except the last dimension. But received: the shape of Input(X) "
47+
"is "
48+
"[%s], the shape of Input(Label) is [%s].",
49+
x_dims, label_dims));
5250
}
5351

5452
if (IsSoftLabel(ctx)) {
5553
PADDLE_ENFORCE_EQ(
5654
rank, label_dims.size(),
57-
"ShapeError: If Attr(soft_label) == true, Input(X) and Input(Label) "
58-
"shall have the same dimensions. But received: the dimensions of "
59-
"Input(X) is [%d],"
60-
"the shape of Input(X) is [%s], the dimensions of Input(Label) is "
61-
"[%d], the shape of"
62-
"Input(Label) is [%s]",
63-
rank, x_dims, label_dims.size(), label_dims);
55+
platform::errors::InvalidArgument(
56+
"If Attr(soft_label) == true, Input(X) and Input(Label) "
57+
"shall have the same dimensions. But received: the dimensions of "
58+
"Input(X) is [%d],"
59+
"the shape of Input(X) is [%s], the dimensions of Input(Label) "
60+
"is "
61+
"[%d], the shape of"
62+
"Input(Label) is [%s]",
63+
rank, x_dims, label_dims.size(), label_dims));
6464

6565
if (check) {
6666
PADDLE_ENFORCE_EQ(
6767
x_dims[rank - 1], label_dims[rank - 1],
68-
"ShapeError: If Attr(soft_label) == true, the last dimension of "
69-
"Input(X) and Input(Label) should be equal. But received: the"
70-
"last dimension of Input(X) is [%d], the shape of Input(X) is [%s],"
71-
"the last dimension of Input(Label) is [%d], the shape of "
72-
"Input(Label)"
73-
"is [%s], the last dimension is [%d].",
74-
x_dims[rank - 1], x_dims, label_dims[rank - 1], label_dims,
75-
rank - 1);
68+
platform::errors::InvalidArgument(
69+
"If Attr(soft_label) == true, the last dimension of "
70+
"Input(X) and Input(Label) should be equal. But received: the"
71+
"last dimension of Input(X) is [%d], the shape of Input(X) is "
72+
"[%s],"
73+
"the last dimension of Input(Label) is [%d], the shape of "
74+
"Input(Label)"
75+
"is [%s], the last dimension is [%d].",
76+
x_dims[rank - 1], x_dims, label_dims[rank - 1], label_dims,
77+
rank - 1));
7678
}
7779
} else {
7880
if (rank == label_dims.size()) {
7981
PADDLE_ENFORCE_EQ(
8082
label_dims[rank - 1], 1UL,
81-
"ShapeError: the last dimension of Input(Label) should be 1."
82-
"But received: the last dimension of Input(Label) is [%d],"
83-
"the last dimension is [%d]",
84-
label_dims[rank - 1], rank - 1);
83+
platform::errors::InvalidArgument(
84+
"the last dimension of Input(Label) should be 1."
85+
"But received: the last dimension of Input(Label) is [%d],"
86+
"the last dimension is [%d]",
87+
label_dims[rank - 1], rank - 1));
8588
} else {
86-
PADDLE_ENFORCE_EQ(rank, label_dims.size() + 1,
87-
"ShapeError: The rank of Input(X) should be equal to "
88-
"Input(Label) plus 1."
89-
"But received: The dimension of Input(X) is [%d], "
90-
"the shape of Input(X) is [%s],"
91-
"the dimension of Input(Label) is [%d], the shape of "
92-
"Input(Label) is [%s]",
93-
rank, x_dims, label_dims.size(), label_dims);
89+
PADDLE_ENFORCE_EQ(
90+
rank, label_dims.size() + 1,
91+
platform::errors::InvalidArgument(
92+
"ShapeError: The rank of Input(X) should be equal to "
93+
"Input(Label) plus 1."
94+
"But received: The dimension of Input(X) is [%d], "
95+
"the shape of Input(X) is [%s],"
96+
"the dimension of Input(Label) is [%d], the shape of "
97+
"Input(Label) is [%s]",
98+
rank, x_dims, label_dims.size(), label_dims));
9499
}
95100
}
96101

@@ -122,19 +127,23 @@ class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
122127
using framework::OperatorWithKernel::OperatorWithKernel;
123128

124129
void InferShape(framework::InferShapeContext* ctx) const {
125-
PADDLE_ENFORCE_EQ(ctx->HasInput("Label"), true,
126-
"Input(Label) should be not null.");
127-
PADDLE_ENFORCE_EQ(ctx->HasInput(framework::GradVarName("Y")), true,
128-
"Input(Y@GRAD) shoudl be not null.");
129-
PADDLE_ENFORCE_EQ(ctx->HasOutput(framework::GradVarName("X")), true,
130-
"Output(X@GRAD) should be not null.");
130+
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
131+
"CrossEntropyGradientOpBase");
132+
OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Y")), "Input",
133+
framework::GradVarName("Y"), "CrossEntropyGradientOpBase");
134+
OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output",
135+
framework::GradVarName("X"), "CrossEntropyGradientOpBase");
131136

132137
auto x_dims = GetXDim(ctx);
133138
auto label_dims = ctx->GetInputDim("Label");
134139
auto dy_dims = ctx->GetInputDim(framework::GradVarName("Y"));
135140
int rank = x_dims.size();
136-
PADDLE_ENFORCE_EQ(dy_dims.size(), label_dims.size(),
137-
"Input(Y@Grad) and Input(Y) should have the same rank.");
141+
PADDLE_ENFORCE_EQ(
142+
dy_dims.size(), label_dims.size(),
143+
platform::errors::InvalidArgument(
144+
"Input(Y@Grad) and Input(Y) should have the same rank."
145+
"But received: Y@Grad's rank is [%d], Y's rank is [%d]",
146+
dy_dims.size(), label_dims.size()));
138147

139148
bool check = true;
140149
if ((!ctx->IsRuntime()) &&
@@ -143,10 +152,15 @@ class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
143152
}
144153

145154
if (check) {
146-
PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1),
147-
framework::slice_ddim(dy_dims, 0, rank - 1),
148-
"The Input(X) and Input(Y@Grad) should have the same "
149-
"shape except the last dimension.");
155+
PADDLE_ENFORCE_EQ(
156+
framework::slice_ddim(x_dims, 0, rank - 1),
157+
framework::slice_ddim(dy_dims, 0, rank - 1),
158+
platform::errors::InvalidArgument(
159+
"The Input(X) and Input(Y@Grad) should have the same "
160+
"shape except the last dimension. but received: "
161+
"the shape of Input(X) is [%s], "
162+
"the shape of Input(Y@Grad) is [%s].",
163+
x_dims, dy_dims));
150164
}
151165

152166
ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
@@ -253,7 +267,7 @@ class CrossEntropyGradientOp : public CrossEntropyGradientOpBase {
253267
using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
254268

255269
void InferShape(framework::InferShapeContext* ctx) const override {
256-
PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) should be not null.");
270+
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "CrossEntropyGradientOp");
257271
CrossEntropyGradientOpBase::InferShape(ctx);
258272
}
259273
};
@@ -281,11 +295,10 @@ class CrossEntropyOp2 : public CrossEntropyOpBase {
281295
void InferShape(framework::InferShapeContext* ctx) const override {
282296
CrossEntropyOpBase::InferShape(ctx);
283297

284-
PADDLE_ENFORCE_EQ(ctx->HasOutput("XShape"), true,
285-
"Output(XShape) should be not null.");
286-
287-
PADDLE_ENFORCE_EQ(ctx->HasOutput("MatchX"), true,
288-
"Output(MatchX) should be not null.");
298+
OP_INOUT_CHECK(ctx->HasOutput("XShape"), "Output", "XShape",
299+
"CrossEntropyOp2");
300+
OP_INOUT_CHECK(ctx->HasOutput("MatchX"), "Output", "MatchX",
301+
"CrossEntropyOp2");
289302
auto x_dims = ctx->GetInputDim("X");
290303
auto x_dims_vec = framework::vectorize(x_dims);
291304
x_dims_vec.push_back(0);
@@ -305,8 +318,8 @@ class CrossEntropyGradientOp2 : public CrossEntropyGradientOpBase {
305318
public:
306319
using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
307320
void InferShape(framework::InferShapeContext* ctx) const override {
308-
PADDLE_ENFORCE_EQ(ctx->HasInput("MatchX"), true,
309-
"Input(MatchX) must exist");
321+
OP_INOUT_CHECK(ctx->HasInput("MatchX"), "Input", "MatchX",
322+
"CrossEntropyGradientOp2");
310323
CrossEntropyGradientOpBase::InferShape(ctx);
311324
}
312325

paddle/fluid/operators/cross_entropy_op.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,11 +166,14 @@ struct HardLabelCrossEntropyForwardFunctor {
166166
HOSTDEVICE void operator()(int64_t idx) const {
167167
auto label = label_[idx];
168168
if (label != ignore_index_) {
169+
// don't update to PADDLE_ENFORCE_GE and PADDLE_ENFORCE_LT cause
170+
// can't use platform::errors::InvalidArgument in HOSTDEVICE
169171
PADDLE_ENFORCE(label >= 0 && label < feature_size_,
170172
"Variable value (label) of "
171173
"OP(fluid.layers.cross_entropy) expected >= 0 "
172174
"and < %ld, but got %ld. Please check label value.",
173175
feature_size_, label);
176+
174177
auto match_x = x_[idx * feature_size_ + label];
175178
y_[idx] = -math::TolerableValue<T>()(real_log(match_x));
176179
match_x_[idx] = match_x;

paddle/fluid/operators/sigmoid_cross_entropy_with_logits_op.cc

Lines changed: 43 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -28,27 +28,39 @@ class SigmoidCrossEntropyWithLogitsOp : public framework::OperatorWithKernel {
2828
using framework::OperatorWithKernel::OperatorWithKernel;
2929

3030
void InferShape(framework::InferShapeContext* ctx) const override {
31-
PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
32-
PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null.");
33-
PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should be not null.");
31+
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X",
32+
"SigmoidCrossEntropyWithLogitsOp");
33+
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
34+
"SigmoidCrossEntropyWithLogitsOp");
35+
OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out",
36+
"SigmoidCrossEntropyWithLogitsOp");
3437

3538
auto x_dims = ctx->GetInputDim("X");
3639
auto labels_dims = ctx->GetInputDim("Label");
3740

3841
int rank = x_dims.size();
3942
PADDLE_ENFORCE_EQ(rank, labels_dims.size(),
40-
"Input(X) and Input(Label) shall have the same rank.");
43+
platform::errors::InvalidArgument(
44+
"Input(X) and Input(Label) shall have the same rank."
45+
"But received: the rank of Input(X) is [%d], "
46+
"the rank of Input(Label) is [%d].",
47+
rank, labels_dims.size()));
48+
4149
bool check = true;
4250
if ((!ctx->IsRuntime()) && (framework::product(x_dims) <= 0 ||
4351
framework::product(labels_dims) <= 0)) {
4452
check = false;
4553
}
4654

4755
if (check) {
48-
PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank),
49-
framework::slice_ddim(labels_dims, 0, rank),
50-
"Input(X) and Input(Label) shall have the same shape "
51-
"except the last dimension.");
56+
PADDLE_ENFORCE_EQ(
57+
framework::slice_ddim(x_dims, 0, rank),
58+
framework::slice_ddim(labels_dims, 0, rank),
59+
platform::errors::InvalidArgument(
60+
"Input(X) and Input(Label) shall have the same shape "
61+
"except the last dimension. But received: the shape of "
62+
"Input(X) is [%s], the shape of Input(Label) is [%s].",
63+
x_dims, labels_dims));
5264
}
5365

5466
ctx->ShareDim("X", /*->*/ "Out");
@@ -62,12 +74,16 @@ class SigmoidCrossEntropyWithLogitsGradOp
6274
using framework::OperatorWithKernel::OperatorWithKernel;
6375

6476
void InferShape(framework::InferShapeContext* ctx) const override {
65-
PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
66-
PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null.");
67-
PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
68-
"Input(Out@GRAD) shoudl be not null.");
69-
PADDLE_ENFORCE(ctx->HasOutput(framework::GradVarName("X")),
70-
"Output(X@GRAD) should be not null.");
77+
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X",
78+
"SigmoidCrossEntropyWithLogitsGradOp");
79+
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
80+
"SigmoidCrossEntropyWithLogitsGradOp");
81+
OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
82+
framework::GradVarName("Out"),
83+
"SigmoidCrossEntropyWithLogitsGradOp");
84+
OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output",
85+
framework::GradVarName("X"),
86+
"SigmoidCrossEntropyWithLogitsGradOp");
7187

7288
auto x_dims = ctx->GetInputDim("X");
7389
auto labels_dims = ctx->GetInputDim("Label");
@@ -81,14 +97,23 @@ class SigmoidCrossEntropyWithLogitsGradOp
8197
}
8298

8399
if (check) {
84-
PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank),
85-
framework::slice_ddim(labels_dims, 0, rank),
86-
"Input(X) and Input(Label) shall have the same shape.");
100+
PADDLE_ENFORCE_EQ(
101+
framework::slice_ddim(x_dims, 0, rank),
102+
framework::slice_ddim(labels_dims, 0, rank),
103+
platform::errors::InvalidArgument(
104+
"Input(X) and Input(Label) shall have the same shape "
105+
"except the last dimension. But received: the shape of "
106+
"Input(X) is [%s], the shape of Input(Label) is [%s].",
107+
x_dims, labels_dims));
87108

88109
PADDLE_ENFORCE_EQ(
89110
framework::slice_ddim(x_dims, 0, rank),
90111
framework::slice_ddim(dout_dims, 0, rank),
91-
"Input(X) and Input(Out@Grad) shall have the same shape.");
112+
platform::errors::InvalidArgument(
113+
"Input(X) and Input(Out@Grad) shall have the same shape "
114+
"except the last dimension. But received: the shape of "
115+
"Input(X) is [%s], the shape of Input(Out@Grad) is [%s].",
116+
x_dims, dout_dims));
92117
}
93118

94119
ctx->SetOutputDim(framework::GradVarName("X"), x_dims);

python/paddle/fluid/layers/loss.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1404,9 +1404,14 @@ def sigmoid_cross_entropy_with_logits(x,
14041404
${comment}
14051405
14061406
Args:
1407-
x(${x_type}): ${x_comment}
1408-
label(${label_type}): ${label_comment}
1409-
ignore_index(int): ${ignore_index_comment}
1407+
x(Variable): a 2-D tensor with shape N x D, where N is the batch size and
1408+
D is the number of classes. This input is a tensor of logits computed
1409+
by the previous operator. Logits are unscaled log probabilities given
1410+
as log(p/(1-p)) The data type should be float32 or float64.
1411+
label (Variable): a 2-D tensor of the same type and shape as X.
1412+
This input is a tensor of probabalistic labels for each logit.
1413+
ignore_index(int): Specifies a target value that is ignored and
1414+
does not contribute to the input gradient.
14101415
name(str|None): The default value is None. Normally there is
14111416
no need for user to set this property. For more information,
14121417
please refer to :ref:`api_guide_Name`
@@ -1431,6 +1436,8 @@ def sigmoid_cross_entropy_with_logits(x,
14311436
normalize=True) # or False
14321437
# loss = fluid.layers.reduce_sum(loss) # summation of loss
14331438
"""
1439+
check_variable_and_dtype(x, 'input', ['float16', 'float32', 'float64'],
1440+
'sigmoid_cross_entropy_with_logits')
14341441

14351442
helper = LayerHelper("sigmoid_cross_entropy_with_logits", **locals())
14361443

python/paddle/fluid/tests/unittests/test_sigmoid_cross_entropy_with_logits_op.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020
from scipy.special import expit
2121
import paddle.fluid.core as core
2222
import unittest
23+
from paddle.fluid import compiler, Program, program_guard
24+
import paddle.fluid as fluid
2325

2426

2527
class TestSigmoidCrossEntropyWithLogitsOp1(OpTest):
@@ -242,5 +244,31 @@ def test_check_grad(self):
242244
self.check_grad(['X'], 'Out')
243245

244246

247+
class TestSigmoidCrossEntropyWithLogitsOpError(unittest.TestCase):
248+
def test_errors(self):
249+
with program_guard(Program(), Program()):
250+
251+
def test_Variable():
252+
# the input of sigmoid_cross_entropy_with_logits must be Variable.
253+
x1 = fluid.create_lod_tensor(
254+
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace())
255+
lab1 = fluid.create_lod_tensor(
256+
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace())
257+
fluid.layers.sigmoid_cross_entropy_with_logits(x1, lab1)
258+
259+
self.assertRaises(TypeError, test_Variable)
260+
261+
def test_dtype():
262+
# the input dtype of sigmoid_cross_entropy_with_logits must be float16 or float32 or float64
263+
# float16 only can be set on GPU place
264+
x2 = fluid.layers.data(
265+
name='x2', shape=[3, 4, 5, 6], dtype="int32")
266+
lab2 = fluid.layers.data(
267+
name='lab2', shape=[3, 4, 5, 6], dtype="int32")
268+
fluid.layers.sigmoid_cross_entropy_with_logits(x2, lab2)
269+
270+
self.assertRaises(TypeError, test_dtype)
271+
272+
245273
if __name__ == '__main__':
246274
unittest.main()

0 commit comments

Comments
 (0)