@@ -95,7 +95,6 @@ def run_parallel_exe(self,
95
95
place ,
96
96
feed_list ,
97
97
loss ,
98
- use_cuda = True ,
99
98
use_reduce = False ,
100
99
use_fast_executor = False ,
101
100
use_ir_memory_optimize = False ):
@@ -136,19 +135,16 @@ def check_weight_decay(self,
136
135
startup_prog = fluid .framework .Program ()
137
136
startup_prog .random_seed = 1
138
137
with prog_scope_guard (main_prog = main_prog , startup_prog = startup_prog ):
139
-
140
138
data = fluid .layers .data (
141
139
name = "words" , shape = [1 ], dtype = "int64" , lod_level = 1 )
142
140
label = fluid .layers .data (name = "label" , shape = [1 ], dtype = "int64" )
143
-
144
141
avg_cost = model (data , label , len (self .word_dict ))
145
142
146
143
param_list = [(var , var * self .learning_rate )
147
144
for var in main_prog .block (0 ).all_parameters ()]
148
145
149
146
optimizer = fluid .optimizer .Adagrad (
150
147
learning_rate = self .learning_rate )
151
-
152
148
optimizer .minimize (avg_cost )
153
149
154
150
for params in param_list :
@@ -158,10 +154,7 @@ def check_weight_decay(self,
158
154
159
155
if use_parallel_exe :
160
156
loss = self .run_parallel_exe (
161
- place , [data , label ],
162
- loss = avg_cost ,
163
- use_cuda = True ,
164
- use_reduce = use_reduce )
157
+ place , [data , label ], loss = avg_cost , use_reduce = use_reduce )
165
158
else :
166
159
loss = self .run_executor (place , [data , label ], loss = avg_cost )
167
160
@@ -176,13 +169,21 @@ def test_weight_decay(self):
176
169
place , model , use_parallel_exe = True , use_reduce = False )
177
170
178
171
for i in range (len (loss )):
179
- assert np .isclose (a = loss [i ], b = loss2 [i ], rtol = 5e-5 )
172
+ self .assertTrue (
173
+ np .isclose (
174
+ a = loss [i ], b = loss2 [i ], rtol = 5e-5 ),
175
+ "Expect " + str (loss [i ]) + "\n " + "But Got" + str (loss2 [i ])
176
+ + " in class " + self .__class__ .__name__ )
180
177
181
178
loss3 = self .check_weight_decay (
182
179
place , model , use_parallel_exe = True , use_reduce = True )
183
180
184
181
for i in range (len (loss )):
185
- assert np .isclose (a = loss [i ], b = loss3 [i ], rtol = 5e-5 )
182
+ self .assertTrue (
183
+ np .isclose (
184
+ a = loss [i ], b = loss3 [i ], rtol = 5e-5 ),
185
+ "Expect " + str (loss [i ]) + "\n " + "But Got" + str (loss2 [i ])
186
+ + " in class " + self .__class__ .__name__ )
186
187
187
188
188
189
if __name__ == '__main__' :
0 commit comments