Skip to content

Commit aa7ba3e

Browse files
authored
Merge pull request #437 from SunAhong1993/develop
add tf optimizer
2 parents f469f49 + 3ca4aed commit aa7ba3e

File tree

5 files changed

+150
-17
lines changed

5 files changed

+150
-17
lines changed

tools/README.md

-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ python tools/check_for_lite.py paddle_model/inference_model/__model__
66
```
77
> 附:check_for_lite工具并不能完全判断模型是否被支持,PaddleLite详细支持的算子请参考[PaddleLite支持算子集](https://github.com/PaddlePaddle/Paddle-Lite/blob/develop/docs/introduction/support_operation_list.md)
88
9-
109
### 二、模型参数合并
1110
X2Paddle转换后产出的路径下包括两个目录,
1211
1. `model_with_code`: 包含保存的参数文件和模型python代码文件,供用户debug

x2paddle/convert.py

+3
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,7 @@ def tf2paddle(model_path,
117117
from x2paddle.optimizer.tensorflow.bias import BiasOpt
118118
from x2paddle.optimizer.tensorflow.transpose import TransposeOpt
119119
from x2paddle.optimizer.tensorflow.batch_norm import BatchNormOpt
120+
from x2paddle.optimizer.tensorflow.prelu import PReLUOpt
120121

121122
print("Now translating model from tensorflow to paddle.")
122123
model = TFDecoder(model_path, define_input_shape=define_input_shape)
@@ -125,8 +126,10 @@ def tf2paddle(model_path,
125126
bias_opt = BiasOpt()
126127
transpose_opt = TransposeOpt()
127128
batch_norm_opt = BatchNormOpt()
129+
prelu_opt = PReLUOpt()
128130
bias_opt.run(program)
129131
batch_norm_opt.run(program)
132+
prelu_opt.run(program)
130133
transpose_opt.run(program)
131134
program.gen_model(save_dir)
132135

x2paddle/op_mapper/tf_op_mapper.py

+20-10
Original file line numberDiff line numberDiff line change
@@ -882,7 +882,7 @@ def Slice(self, node):
882882
begin = self.graph.get_node(node.layer.input[1])
883883
size = self.graph.get_node(node.layer.input[2])
884884

885-
inputs = {"x": input.name}
885+
inputs = {"input": input.name}
886886
attrs = {}
887887
if begin.layer_type == "Const":
888888
begin = begin.value.tolist()
@@ -901,20 +901,30 @@ def Slice(self, node):
901901
if size.layer_type == "Const":
902902
size = size.value.tolist()
903903
attrs['shape'] = size
904+
shape = size
904905
else:
905906
shape = size.out_shapes[0]
906-
reshape_name = gen_name("slice", "reshape")
907-
program.add_layer(
908-
kernel="fluid.layers.reshape",
909-
inputs={"x": size.name},
910-
outputs=[reshape_name],
911-
shape=shape)
912-
inputs['shape'] = reshape_name
907+
# reshape_name = gen_name("slice", "reshape")
908+
# program.add_layer(
909+
# kernel="fluid.layers.reshape",
910+
# inputs={"x": size.name},
911+
# outputs=[reshape_name],
912+
# shape=shape)
913+
# inputs['shape'] = reshape_name
914+
915+
# inputs.pop('shape')
913916
program.add_layer(
914-
kernel="fluid.layers.crop_tensor",
917+
kernel="fluid.layers.slice",
915918
inputs=inputs,
916919
outputs=[node.name],
917-
**attrs)
920+
axes=list(range(len(attrs['offsets']))),
921+
starts=attrs['offsets'],
922+
ends=[attrs['offsets'][i] + shape[i] for i in range(len(shape))])
923+
# program.add_layer(
924+
# kernel="fluid.layers.crop_tensor",
925+
# inputs=inputs,
926+
# outputs=[node.name],
927+
# **attrs)
918928

919929
def ResizeNearestNeighbor(self, node):
920930
input = self.graph.get_node(node.layer.input[0])

x2paddle/optimizer/tensorflow/batch_norm.py

+4-6
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,12 @@ def run(self, graph):
2020
input_ids0 = graph.edges_in[layer_id]
2121
mul_layer0 = graph.layers[input_ids0[0]]
2222
sub_layer0 = graph.layers[input_ids0[1]]
23+
2324
if mul_layer0.kernel != "fluid.layers.elementwise_mul":
2425
continue
2526
if sub_layer0.kernel != "fluid.layers.elementwise_sub":
2627
continue
28+
2729
axis = mul_layer0.attrs.get('axis', -1)
2830
if axis != -1 and axis != 3:
2931
continue
@@ -116,7 +118,7 @@ def run(self, graph):
116118
other = graph.layers[input_ids6[1]]
117119
if variance.kernel != "fluid.layers.create_parameter":
118120
continue
119-
if other.kernel != "fluid.layers.create_parameter":
121+
if other.kernel != "fluid.layers.fill_constant":
120122
continue
121123
if len(graph.edges_out.get(input_ids6[0], [])) != 1:
122124
continue
@@ -127,10 +129,6 @@ def run(self, graph):
127129
variance_shape = graph.parameters[variance.outputs[0]].shape
128130
if variance_shape != beta_shape:
129131
continue
130-
if other.outputs[0] not in graph.parameters:
131-
continue
132-
if graph.parameters[other.outputs[0]].size != 1:
133-
continue
134132

135133
ids = set([
136134
layer_id, mul_layer0.id, sub_layer0.id, mul_layer1.id, beta.id,
@@ -163,7 +161,7 @@ def run(self, graph):
163161
kernel="fluid.layers.batch_norm",
164162
inputs={"input": "transpose_for_bn"},
165163
outputs=layer.outputs,
166-
epsilon=graph.parameters[other.outputs[0]],
164+
epsilon=other.attrs["value"],
167165
param_attr="'{}'".format(gamma.outputs[0]),
168166
bias_attr="'{}'".format(beta.outputs[0]),
169167
moving_mean_name="'{}'".format(mean.outputs[0]),
+123
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
import copy
2+
import numpy as np
3+
from collections import OrderedDict
4+
from x2paddle.core.program import PaddleLayer
5+
from x2paddle.core.util import *
6+
7+
8+
class PReLUOpt:
9+
def __init__(self):
10+
pass
11+
12+
def run(self, graph):
13+
print("Optimize: PReLUOpt...")
14+
layers = copy.deepcopy(graph.layers)
15+
for layer_id, layer in layers.items():
16+
if layer.kernel != "fluid.layers.elementwise_add":
17+
continue
18+
axis = layer.attrs.get('axis', -1)
19+
if axis != -1 and axis != 3:
20+
continue
21+
22+
input_ids0 = graph.edges_in[layer_id]
23+
relu_layer0 = graph.layers[input_ids0[0]]
24+
mul_layer0 = graph.layers[input_ids0[1]]
25+
26+
if relu_layer0.kernel != "fluid.layers.relu":
27+
continue
28+
if mul_layer0.kernel != "fluid.layers.elementwise_mul":
29+
continue
30+
31+
axis = mul_layer0.attrs.get('axis', -1)
32+
if axis != -1 and axis != 3:
33+
continue
34+
if len(graph.edges_out.get(input_ids0[0], [])) != 1:
35+
continue
36+
if len(graph.edges_out.get(input_ids0[1], [])) != 1:
37+
continue
38+
39+
input_ids1_0 = graph.edges_in[input_ids0[0]]
40+
input_ids1_1 = graph.edges_in[input_ids0[1]]
41+
fill_layer = graph.layers[input_ids1_1[1]]
42+
mul_layer1 = graph.layers[input_ids1_1[0]]
43+
if fill_layer.kernel != "fluid.layers.fill_constant":
44+
continue
45+
if mul_layer1.kernel != "fluid.layers.elementwise_mul":
46+
continue
47+
axis = mul_layer1.attrs.get('axis', -1)
48+
if axis != -1 and axis != 0:
49+
continue
50+
if len(graph.edges_out.get(input_ids1_1[1], [])) != 1:
51+
continue
52+
if len(graph.edges_out.get(input_ids1_0[0], [])) != 3:
53+
continue
54+
55+
input_ids2 = graph.edges_in[input_ids1_1[0]]
56+
alpha = graph.layers[input_ids2[0]]
57+
sub_layer = graph.layers[input_ids2[1]]
58+
if alpha.kernel != "fluid.layers.create_parameter":
59+
continue
60+
if sub_layer.kernel != "fluid.layers.elementwise_sub":
61+
continue
62+
axis = sub_layer.attrs.get('axis', -1)
63+
if axis != -1 and axis != 3:
64+
continue
65+
if len(graph.edges_out.get(input_ids2[0], [])) != 1:
66+
continue
67+
if len(graph.edges_out.get(input_ids2[1], [])) != 1:
68+
continue
69+
if alpha.outputs[0] not in graph.parameters:
70+
continue
71+
72+
input_ids3 = graph.edges_in[input_ids2[1]]
73+
add_layer = graph.layers[input_ids3[0]]
74+
abs_layer = graph.layers[input_ids3[1]]
75+
if abs_layer.kernel != "fluid.layers.abs":
76+
continue
77+
if len(graph.edges_out.get(input_ids3[1], [])) != 1:
78+
continue
79+
80+
81+
ids = set([
82+
layer.id, relu_layer0.id, mul_layer0.id, fill_layer.id, mul_layer1.id, alpha.id,
83+
sub_layer.id, abs_layer.id])
84+
85+
for id in ids:
86+
del graph.layers[id]
87+
if id in graph.edges_in:
88+
del graph.edges_in[id]
89+
if id in graph.edges_out:
90+
del graph.edges_out[id]
91+
92+
copy_layers = copy.deepcopy(graph.layers)
93+
graph.layers = OrderedDict()
94+
for k, v in copy_layers.items():
95+
if k != add_layer.id:
96+
graph.layers[k] = v
97+
continue
98+
graph.layers[k] = v
99+
transpose0 = PaddleLayer(
100+
id='{}_1'.format(k),
101+
kernel="fluid.layers.transpose",
102+
inputs={"x": v.outputs[0]},
103+
outputs=["transpose_for_prelu"],
104+
perm=[0, 3, 1, 2])
105+
prelu = PaddleLayer(
106+
id='{}_2'.format(k),
107+
kernel="fluid.layers.prelu",
108+
inputs={"x": "transpose_for_prelu"},
109+
outputs=layer.outputs,
110+
mode=string("channel"),
111+
param_attr="'{}'".format(alpha.outputs[0]))
112+
transpose1 = PaddleLayer(
113+
id=layer_id,
114+
kernel="fluid.layers.transpose",
115+
inputs={"x": layer.outputs[0]},
116+
outputs=layer.outputs,
117+
perm=[0, 2, 3, 1])
118+
graph.layers[transpose0.id] = transpose0
119+
graph.layers[prelu.id] = prelu
120+
graph.layers[transpose1.id] = transpose1
121+
graph.parameters[alpha.outputs[0]] = np.expand_dims(graph.parameters[alpha.outputs[0]], 0)
122+
graph.build()
123+

0 commit comments

Comments
 (0)