Skip to content

Commit 7c370e4

Browse files
author
chengduo
authored
Fix test_recurrent_op (PaddlePaddle#17001)
* fix ramdom fail test=develop
1 parent 73a360b commit 7c370e4

File tree

2 files changed

+19
-12
lines changed

2 files changed

+19
-12
lines changed

python/paddle/fluid/tests/unittests/test_recurrent_op.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def backward(self):
182182
fetch_list=fetch_list,
183183
return_numpy=False)
184184

185-
def test_backward(self):
185+
def test_backward(self, rtol=0.1):
186186
self.check_forward()
187187

188188
with fluid.program_guard(self.main_program, self.startup_program):
@@ -195,7 +195,10 @@ def test_backward(self):
195195
self.assertEqual(num_grad[idx].shape, ana_grad[idx].shape)
196196
self.assertTrue(
197197
np.isclose(
198-
num_grad[idx], ana_grad[idx], rtol=0.1).all())
198+
num_grad[idx], ana_grad[idx], rtol=rtol).all(),
199+
"num_grad (" + name + ") has diff at " + str(self.place) +
200+
"\nExpect " + str(num_grad[idx]) + "\n" + "But Got" +
201+
str(ana_grad[idx]) + " in class " + self.__class__.__name__)
199202

200203
def check_forward(self):
201204
pd_output = self.forward()
@@ -287,6 +290,9 @@ def create_rnn_op(self):
287290

288291
return rnn()
289292

293+
def test_backward(self):
294+
super(RecurrentOpTest2, self).test_backward(rtol=0.2)
295+
290296

291297
class RecurrentOpMultipleMemoryTest(RecurrentOpTest1):
292298
'''

python/paddle/fluid/tests/unittests/test_weight_decay.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ def run_parallel_exe(self,
9595
place,
9696
feed_list,
9797
loss,
98-
use_cuda=True,
9998
use_reduce=False,
10099
use_fast_executor=False,
101100
use_ir_memory_optimize=False):
@@ -136,19 +135,16 @@ def check_weight_decay(self,
136135
startup_prog = fluid.framework.Program()
137136
startup_prog.random_seed = 1
138137
with prog_scope_guard(main_prog=main_prog, startup_prog=startup_prog):
139-
140138
data = fluid.layers.data(
141139
name="words", shape=[1], dtype="int64", lod_level=1)
142140
label = fluid.layers.data(name="label", shape=[1], dtype="int64")
143-
144141
avg_cost = model(data, label, len(self.word_dict))
145142

146143
param_list = [(var, var * self.learning_rate)
147144
for var in main_prog.block(0).all_parameters()]
148145

149146
optimizer = fluid.optimizer.Adagrad(
150147
learning_rate=self.learning_rate)
151-
152148
optimizer.minimize(avg_cost)
153149

154150
for params in param_list:
@@ -158,10 +154,7 @@ def check_weight_decay(self,
158154

159155
if use_parallel_exe:
160156
loss = self.run_parallel_exe(
161-
place, [data, label],
162-
loss=avg_cost,
163-
use_cuda=True,
164-
use_reduce=use_reduce)
157+
place, [data, label], loss=avg_cost, use_reduce=use_reduce)
165158
else:
166159
loss = self.run_executor(place, [data, label], loss=avg_cost)
167160

@@ -176,13 +169,21 @@ def test_weight_decay(self):
176169
place, model, use_parallel_exe=True, use_reduce=False)
177170

178171
for i in range(len(loss)):
179-
assert np.isclose(a=loss[i], b=loss2[i], rtol=5e-5)
172+
self.assertTrue(
173+
np.isclose(
174+
a=loss[i], b=loss2[i], rtol=5e-5),
175+
"Expect " + str(loss[i]) + "\n" + "But Got" + str(loss2[i])
176+
+ " in class " + self.__class__.__name__)
180177

181178
loss3 = self.check_weight_decay(
182179
place, model, use_parallel_exe=True, use_reduce=True)
183180

184181
for i in range(len(loss)):
185-
assert np.isclose(a=loss[i], b=loss3[i], rtol=5e-5)
182+
self.assertTrue(
183+
np.isclose(
184+
a=loss[i], b=loss3[i], rtol=5e-5),
185+
"Expect " + str(loss[i]) + "\n" + "But Got" + str(loss2[i])
186+
+ " in class " + self.__class__.__name__)
186187

187188

188189
if __name__ == '__main__':

0 commit comments

Comments
 (0)