Skip to content

Commit 06984e8

Browse files
authored
Version Problem (#708)
* add SiLU op * fixed bugs * version warning * fixed bugs * fixed bugs * deal with comments * fixed code format * deal with comments
1 parent b2a78a5 commit 06984e8

File tree

2 files changed

+41
-30
lines changed

2 files changed

+41
-30
lines changed

x2paddle/convert.py

+41-29
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from x2paddle import program
1717
import argparse
1818
import sys
19+
import logging
1920

2021

2122
def arg_parser():
@@ -132,20 +133,20 @@ def tf2paddle(model_path,
132133
import tensorflow as tf
133134
version = tf.__version__
134135
if version >= '2.0.0' or version < '1.0.0':
135-
print(
136+
logging.info(
136137
"[ERROR] 1.0.0<=tensorflow<2.0.0 is required, and v1.14.0 is recommended"
137138
)
138139
return
139140
except:
140-
print(
141+
logging.info(
141142
"[ERROR] Tensorflow is not installed, use \"pip install tensorflow\"."
142143
)
143144
return
144145

145146
from x2paddle.decoder.tf_decoder import TFDecoder
146147
from x2paddle.op_mapper.tf2paddle.tf_op_mapper import TFOpMapper
147148

148-
print("Now translating model from tensorflow to paddle.")
149+
logging.info("Now translating model from tensorflow to paddle.")
149150
model = TFDecoder(model_path, define_input_shape=define_input_shape)
150151
mapper = TFOpMapper(model)
151152
mapper.paddle_graph.build()
@@ -173,15 +174,15 @@ def caffe2paddle(proto_file,
173174
or (int(ver_part[0]) > 3):
174175
version_satisfy = True
175176
assert version_satisfy, '[ERROR] google.protobuf >= 3.6.0 is required'
176-
print("Now translating model from caffe to paddle.")
177+
logging.info("Now translating model from caffe to paddle.")
177178
model = CaffeDecoder(proto_file, weight_file, caffe_proto)
178179
mapper = CaffeOpMapper(model)
179180
mapper.paddle_graph.build()
180-
print("Model optimizing ...")
181+
logging.info("Model optimizing ...")
181182
from x2paddle.optimizer.optimizer import GraphOptimizer
182183
graph_opt = GraphOptimizer(source_frame="caffe")
183184
graph_opt.optimize(mapper.paddle_graph)
184-
print("Model optimized.")
185+
logging.info("Model optimized.")
185186
mapper.paddle_graph.gen_model(save_dir)
186187
if convert_to_lite:
187188
convert2lite(save_dir, lite_valid_places, lite_model_type)
@@ -199,12 +200,13 @@ def onnx2paddle(model_path,
199200
v0, v1, v2 = version.split('.')
200201
version_sum = int(v0) * 100 + int(v1) * 10 + int(v2)
201202
if version_sum < 160:
202-
print("[ERROR] onnx>=1.6.0 is required")
203+
logging.info("[ERROR] onnx>=1.6.0 is required")
203204
return
204205
except:
205-
print("[ERROR] onnx is not installed, use \"pip install onnx==1.6.0\".")
206+
logging.info(
207+
"[ERROR] onnx is not installed, use \"pip install onnx==1.6.0\".")
206208
return
207-
print("Now translating model from onnx to paddle.")
209+
logging.info("Now translating model from onnx to paddle.")
208210

209211
from x2paddle.decoder.onnx_decoder import ONNXDecoder
210212
from x2paddle.op_mapper.onnx2paddle.onnx_op_mapper import ONNXOpMapper
@@ -227,17 +229,24 @@ def pytorch2paddle(module,
227229
try:
228230
import torch
229231
version = torch.__version__
230-
ver_part = version.split('.')
231-
print(ver_part)
232-
if int(ver_part[1]) < 5:
233-
print("[ERROR] pytorch>=1.5.0 is required")
232+
v0, v1, v2 = version.split('.')
233+
# Avoid the situation where the version is equal to 1.7.0+cu101
234+
if '+' in v2:
235+
v2 = v2.split('+')[0]
236+
version_sum = int(v0) * 100 + int(v1) * 10 + int(v2)
237+
if version_sum < 150:
238+
logging.info(
239+
"[ERROR] pytorch>=1.5.0 is required, 1.6.0 is the most recommended"
240+
)
234241
return
242+
if version_sum > 160:
243+
logging.info("[WARNING] pytorch==1.6.0 is recommended")
235244
except:
236-
print(
237-
"[ERROR] Pytorch is not installed, use \"pip install torch==1.5.0 torchvision\"."
245+
logging.info(
246+
"[ERROR] Pytorch is not installed, use \"pip install torch==1.6.0 torchvision\"."
238247
)
239248
return
240-
print("Now translating model from pytorch to paddle.")
249+
logging.info("Now translating model from pytorch to paddle.")
241250

242251
from x2paddle.decoder.pytorch_decoder import ScriptDecoder, TraceDecoder
243252
from x2paddle.op_mapper.pytorch2paddle.pytorch_op_mapper import PyTorchOpMapper
@@ -248,30 +257,32 @@ def pytorch2paddle(module,
248257
model = ScriptDecoder(module, input_examples)
249258
mapper = PyTorchOpMapper(model)
250259
mapper.paddle_graph.build()
251-
print("Model optimizing ...")
260+
logging.info("Model optimizing ...")
252261
from x2paddle.optimizer.optimizer import GraphOptimizer
253262
graph_opt = GraphOptimizer(source_frame="pytorch", jit_type=jit_type)
254263
graph_opt.optimize(mapper.paddle_graph)
255-
print("Model optimized.")
264+
logging.info("Model optimized.")
256265
mapper.paddle_graph.gen_model(save_dir, jit_type=jit_type)
257266
if convert_to_lite:
258267
convert2lite(save_dir, lite_valid_places, lite_model_type)
259268

260269

261270
def main():
271+
logging.basicConfig(level=logging.INFO)
262272
if len(sys.argv) < 2:
263-
print("Use \"x2paddle -h\" to print the help information")
264-
print("For more information, please follow our github repo below:)")
265-
print("\nGithub: https://github.com/PaddlePaddle/X2Paddle.git\n")
273+
logging.info("Use \"x2paddle -h\" to print the help information")
274+
logging.info(
275+
"For more information, please follow our github repo below:)")
276+
logging.info("\nGithub: https://github.com/PaddlePaddle/X2Paddle.git\n")
266277
return
267278

268279
parser = arg_parser()
269280
args = parser.parse_args()
270281

271282
if args.version:
272283
import x2paddle
273-
print("x2paddle-{} with python>=3.5, paddlepaddle>=1.6.0\n".format(
274-
x2paddle.__version__))
284+
logging.info("x2paddle-{} with python>=3.5, paddlepaddle>=1.6.0\n".
285+
format(x2paddle.__version__))
275286
return
276287

277288
if not args.convert_torch_project:
@@ -282,18 +293,19 @@ def main():
282293
import platform
283294
v0, v1, v2 = platform.python_version().split('.')
284295
if not (int(v0) >= 3 and int(v1) >= 5):
285-
print("[ERROR] python>=3.5 is required")
296+
logging.info("[ERROR] python>=3.5 is required")
286297
return
287298
import paddle
288299
v0, v1, v2 = paddle.__version__.split('.')
289-
print("paddle.__version__ = {}".format(paddle.__version__))
300+
logging.info("paddle.__version__ = {}".format(paddle.__version__))
290301
if v0 == '0' and v1 == '0' and v2 == '0':
291-
print("[WARNING] You are use develop version of paddlepaddle")
302+
logging.info(
303+
"[WARNING] You are use develop version of paddlepaddle")
292304
elif int(v0) != 2 or int(v1) < 0:
293-
print("[ERROR] paddlepaddle>=2.0.0 is required")
305+
logging.info("[ERROR] paddlepaddle>=2.0.0 is required")
294306
return
295307
except:
296-
print(
308+
logging.info(
297309
"[ERROR] paddlepaddle not installed, use \"pip install paddlepaddle\""
298310
)
299311

@@ -334,7 +346,7 @@ def main():
334346
lite_valid_places=args.lite_valid_places,
335347
lite_model_type=args.lite_model_type)
336348
elif args.framework == "paddle2onnx":
337-
print(
349+
logging.info(
338350
"Paddle to ONNX tool has been migrated to the new github: https://github.com/PaddlePaddle/paddle2onnx"
339351
)
340352

x2paddle/decoder/onnx_decoder.py

-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
from x2paddle.decoder.onnx_shape_inference import SymbolicShapeInference
1717
from onnx.checker import ValidationError
1818
from onnx.checker import check_model
19-
from onnx.utils import polish_model
2019
from onnx import helper, shape_inference
2120
from onnx.helper import get_attribute_value, make_attribute
2221
from onnx.shape_inference import infer_shapes

0 commit comments

Comments
 (0)