Skip to content

Commit 040bada

Browse files
committed
merge develop and update fill_constant api
2 parents 54925d0 + b7d1701 commit 040bada

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+437
-461
lines changed

backends/custom_cpu/tests/unittests/test_reshape_op.py

Lines changed: 29 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,14 @@
1717
import unittest
1818
import numpy as np
1919

20-
from op_test import OpTest, convert_float_to_uint16
20+
from op_test import OpTest
2121
import paddle
2222
import paddle.fluid as fluid
23-
from paddle.fluid import compiler
2423
from paddle.static import Program, program_guard
25-
import paddle.fluid.core as core
2624

2725

2826
def get_places(self):
29-
return [paddle.CustomPlace('custom_cpu', 0)]
27+
return [paddle.CustomPlace("custom_cpu", 0)]
3028

3129

3230
OpTest._get_places = get_places
@@ -41,7 +39,7 @@ def setUp(self):
4139
self.attrs = {"shape": self.new_shape}
4240
self.outputs = {
4341
"Out": self.inputs["X"].reshape(self.infered_shape),
44-
'XShape': np.random.random(self.ori_shape).astype("float32")
42+
"XShape": np.random.random(self.ori_shape).astype("float32"),
4543
}
4644

4745
def init_data(self):
@@ -50,7 +48,7 @@ def init_data(self):
5048
self.infered_shape = (12, 10)
5149

5250
def test_check_output(self):
53-
self.check_output(no_check_set=['XShape'])
51+
self.check_output(no_check_set=["XShape"])
5452

5553
def test_check_grad(self):
5654
self.check_grad(["X"], "Out")
@@ -78,13 +76,12 @@ def setUp(self):
7876

7977
self.inputs = {
8078
"X": np.random.random(self.ori_shape).astype("float32"),
81-
"Shape": np.array(
82-
self.actual_shape, dtype="int32")
79+
"Shape": np.array(self.actual_shape, dtype="int32"),
8380
}
8481
self.attrs = {"shape": self.new_shape}
8582
self.outputs = {
8683
"Out": self.inputs["X"].reshape(self.actual_shape),
87-
'XShape': np.random.random(self.ori_shape).astype("float32")
84+
"XShape": np.random.random(self.ori_shape).astype("float32"),
8885
}
8986

9087
def init_data(self):
@@ -93,7 +90,7 @@ def init_data(self):
9390
self.actual_shape = (2, 3, 20)
9491

9592
def test_check_output(self):
96-
self.check_output(no_check_set=['XShape'])
93+
self.check_output(no_check_set=["XShape"])
9794

9895
def test_check_grad(self):
9996
self.check_grad(["X"], "Out")
@@ -107,17 +104,16 @@ def setUp(self):
107104

108105
shape_tensor = []
109106
for index, ele in enumerate(self.new_shape):
110-
shape_tensor.append(("x" + str(index), np.ones(
111-
(1)).astype('int32') * ele))
107+
shape_tensor.append(("x" + str(index), np.ones((1)).astype("int32") * ele))
112108

113109
self.inputs = {
114110
"X": np.random.random(self.ori_shape).astype("float32"),
115-
'ShapeTensor': shape_tensor
111+
"ShapeTensor": shape_tensor,
116112
}
117-
self.attrs = {'shape': self.shape}
113+
self.attrs = {"shape": self.shape}
118114
self.outputs = {
119115
"Out": self.inputs["X"].reshape(self.infered_shape),
120-
'XShape': np.random.random(self.ori_shape).astype("float32")
116+
"XShape": np.random.random(self.ori_shape).astype("float32"),
121117
}
122118

123119
def init_data(self):
@@ -127,7 +123,7 @@ def init_data(self):
127123
self.shape = (-1, -1)
128124

129125
def test_check_output(self):
130-
self.check_output(no_check_set=['XShape'])
126+
self.check_output(no_check_set=["XShape"])
131127

132128
def test_check_grad(self):
133129
self.check_grad(["X"], "Out")
@@ -157,13 +153,12 @@ def setUp(self):
157153

158154
self.inputs = {
159155
"X": np.random.random(self.ori_shape).astype("float32"),
160-
"Shape": np.array(
161-
self.new_shape, dtype="int32")
156+
"Shape": np.array(self.new_shape, dtype="int32"),
162157
}
163158
self.attrs = {}
164159
self.outputs = {
165160
"Out": self.inputs["X"].reshape(self.infered_shape),
166-
'XShape': np.random.random(self.ori_shape).astype("float32")
161+
"XShape": np.random.random(self.ori_shape).astype("float32"),
167162
}
168163

169164
def init_data(self):
@@ -172,7 +167,7 @@ def init_data(self):
172167
self.infered_shape = (10, 10)
173168

174169
def test_check_output(self):
175-
self.check_output(no_check_set=['XShape'])
170+
self.check_output(no_check_set=["XShape"])
176171

177172
def test_check_grad(self):
178173
self.check_grad(["X"], "Out")
@@ -198,14 +193,11 @@ class TestReshapeOpBool(TestReshapeOp):
198193
def setUp(self):
199194
self.init_data()
200195
self.op_type = "reshape2"
201-
self.inputs = {
202-
"X": np.random.choice(
203-
[True, False], size=self.ori_shape)
204-
}
196+
self.inputs = {"X": np.random.choice([True, False], size=self.ori_shape)}
205197
self.attrs = {"shape": self.new_shape}
206198
self.outputs = {
207199
"Out": self.inputs["X"].reshape(self.infered_shape),
208-
'XShape': np.random.random(self.ori_shape).astype("float32")
200+
"XShape": np.random.random(self.ori_shape).astype("float32"),
209201
}
210202

211203
def test_check_grad(self):
@@ -215,7 +207,7 @@ def test_check_grad(self):
215207
# Test python API
216208
class TestReshapeAPI(unittest.TestCase):
217209
def _set_paddle_api(self):
218-
self.fill_constant = paddle.fluid.layers.fill_constant
210+
self.fill_constant = paddle.tensor.fill_constant
219211
self.data = paddle.static.data
220212
self.to_tensor = paddle.to_tensor
221213
self._executed_api()
@@ -224,7 +216,7 @@ def _executed_api(self):
224216
self.reshape = paddle.reshape
225217

226218
def _set_fluid_api(self):
227-
self.fill_constant = fluid.layers.fill_constant
219+
self.fill_constant = paddle.tensor.fill_constant
228220
self.data = paddle.static.data
229221
self.reshape = fluid.layers.reshape
230222

@@ -243,21 +235,20 @@ def _test_api(self):
243235
out_1 = self.reshape(x, shape)
244236

245237
# situation 2: have shape(list, no tensor), have actual shape(Tensor)
246-
out_2 = fluid.layers.reshape(
247-
x, shape=shape, actual_shape=actual_shape)
238+
out_2 = fluid.layers.reshape(x, shape=shape, actual_shape=actual_shape)
248239

249240
# Situation 3: have shape(list, have tensor), no actual shape(Tensor)
250241
out_3 = self.reshape(x, shape=[positive_five, 10])
251242

252243
# Situation 4: have shape(Tensor), no actual shape(Tensor)
253244
out_4 = self.reshape(x, shape=actual_shape)
254245

255-
exe = paddle.static.Executor(place=paddle.CustomPlace('custom_cpu', 0))
246+
exe = paddle.static.Executor(place=paddle.CustomPlace("custom_cpu", 0))
256247
res_1, res_2, res_3, res_4 = exe.run(
257248
main_prog,
258-
feed={"x": input,
259-
"shape": np.array([2, 5, 5]).astype("int32")},
260-
fetch_list=[out_1, out_2, out_3, out_4])
249+
feed={"x": input, "shape": np.array([2, 5, 5]).astype("int32")},
250+
fetch_list=[out_1, out_2, out_3, out_4],
251+
)
261252

262253
assert np.array_equal(res_1, input.reshape(shape))
263254
assert np.array_equal(res_2, input.reshape(shape))
@@ -276,7 +267,7 @@ def test_imperative(self):
276267
self._set_paddle_api()
277268
input = np.random.random([2, 25]).astype("float32")
278269
shape = [2, 5, 5]
279-
with fluid.dygraph.guard(paddle.CustomPlace('custom_cpu', 0)):
270+
with fluid.dygraph.guard(paddle.CustomPlace("custom_cpu", 0)):
280271
x = self.to_tensor(input)
281272
positive_five = self.fill_constant([1], "int32", 5)
282273

@@ -300,7 +291,7 @@ def test_imperative(self):
300291
self._set_paddle_api()
301292
input = np.random.random([2, 25]).astype("float32")
302293
shape = [2, 5, 5]
303-
with fluid.dygraph.guard(paddle.CustomPlace('custom_cpu', 0)):
294+
with fluid.dygraph.guard(paddle.CustomPlace("custom_cpu", 0)):
304295
x = self.to_tensor(input)
305296
positive_five = self.fill_constant([1], "int32", 5)
306297

@@ -324,7 +315,7 @@ def executed_api(self):
324315
self.reshape = paddle.reshape
325316

326317
def test_out(self):
327-
paddle.disable_static(paddle.CustomPlace('custom_cpu', 0))
318+
paddle.disable_static(paddle.CustomPlace("custom_cpu", 0))
328319
input_1 = np.random.random([5, 1, 10]).astype("int32")
329320
input = paddle.to_tensor(input_1)
330321
output = self.reshape(x=input, shape=[5, 10])
@@ -333,7 +324,7 @@ def test_out(self):
333324
self.assertTrue(np.allclose(expected_out, out_np))
334325

335326
def test_out_uint8(self):
336-
paddle.disable_static(paddle.CustomPlace('custom_cpu', 0))
327+
paddle.disable_static(paddle.CustomPlace("custom_cpu", 0))
337328
input_1 = np.random.random([5, 1, 10]).astype("uint8")
338329
input = paddle.to_tensor(input_1)
339330
output = self.reshape(x=input, shape=[5, 10])
@@ -342,7 +333,7 @@ def test_out_uint8(self):
342333
self.assertTrue(np.allclose(expected_out, out_np))
343334

344335
def test_out_float32(self):
345-
paddle.disable_static(paddle.CustomPlace('custom_cpu', 0))
336+
paddle.disable_static(paddle.CustomPlace("custom_cpu", 0))
346337
input_1 = np.random.random([5, 1, 10]).astype("float32")
347338
input = paddle.to_tensor(input_1)
348339
output = self.reshape(x=input, shape=[5, 10])

0 commit comments

Comments
 (0)