65
65
}
66
66
67
67
68
- def predict_sentiment_analysis (module , input_text , extra = None ):
68
+ def predict_sentiment_analysis (module , input_text , batch_size , extra = None ):
69
69
global use_gpu
70
70
method_name = module .desc .attr .map .data ['default_signature' ].s
71
71
predict_method = getattr (module , method_name )
72
72
try :
73
73
data = input_text [0 ]
74
74
data .update (input_text [1 ])
75
- results = predict_method (data = data , use_gpu = use_gpu )
75
+ results = predict_method (
76
+ data = data , use_gpu = use_gpu , batch_size = batch_size )
76
77
except Exception as err :
77
78
curr = time .strftime ("%Y-%m-%d %H:%M:%S" , time .localtime (time .time ()))
78
79
print (curr , " - " , err )
79
80
return {"result" : "Please check data format!" }
80
81
return results
81
82
82
83
83
- def predict_pretrained_model (module , input_text , extra = None ):
84
+ def predict_pretrained_model (module , input_text , batch_size , extra = None ):
84
85
global use_gpu
85
86
method_name = module .desc .attr .map .data ['default_signature' ].s
86
87
predict_method = getattr (module , method_name )
87
88
try :
88
89
data = {"text" : input_text }
89
- results = predict_method (data = data , use_gpu = use_gpu )
90
+ results = predict_method (
91
+ data = data , use_gpu = use_gpu , batch_size = batch_size )
90
92
except Exception as err :
91
93
curr = time .strftime ("%Y-%m-%d %H:%M:%S" , time .localtime (time .time ()))
92
94
print (curr , " - " , err )
93
95
return {"result" : "Please check data format!" }
94
96
return results
95
97
96
98
97
- def predict_lexical_analysis (module , input_text , extra = []):
99
+ def predict_lexical_analysis (module , input_text , batch_size , extra = []):
98
100
global use_gpu
99
101
method_name = module .desc .attr .map .data ['default_signature' ].s
100
102
predict_method = getattr (module , method_name )
101
103
data = {"text" : input_text }
102
104
try :
103
105
if extra == []:
104
- results = predict_method (data = data , use_gpu = use_gpu )
106
+ results = predict_method (
107
+ data = data , use_gpu = use_gpu , batch_size = batch_size )
105
108
else :
106
109
user_dict = extra [0 ]
107
110
results = predict_method (
108
- data = data , user_dict = user_dict , use_gpu = use_gpu )
111
+ data = data ,
112
+ user_dict = user_dict ,
113
+ use_gpu = use_gpu ,
114
+ batch_size = batch_size )
109
115
for path in extra :
110
116
os .remove (path )
111
117
except Exception as err :
@@ -115,29 +121,31 @@ def predict_lexical_analysis(module, input_text, extra=[]):
115
121
return results
116
122
117
123
118
- def predict_classification (module , input_img ):
124
+ def predict_classification (module , input_img , batch_size ):
119
125
global use_gpu
120
126
method_name = module .desc .attr .map .data ['default_signature' ].s
121
127
predict_method = getattr (module , method_name )
122
128
try :
123
129
input_img = {"image" : input_img }
124
- results = predict_method (data = input_img , use_gpu = use_gpu )
130
+ results = predict_method (
131
+ data = input_img , use_gpu = use_gpu , batch_size = batch_size )
125
132
except Exception as err :
126
133
curr = time .strftime ("%Y-%m-%d %H:%M:%S" , time .localtime (time .time ()))
127
134
print (curr , " - " , err )
128
135
return {"result" : "Please check data format!" }
129
136
return results
130
137
131
138
132
- def predict_gan (module , input_img , extra = {}):
139
+ def predict_gan (module , input_img , id , batch_size , extra = {}):
133
140
# special
134
141
output_folder = module .name .split ("_" )[0 ] + "_" + "output"
135
142
global use_gpu
136
143
method_name = module .desc .attr .map .data ['default_signature' ].s
137
144
predict_method = getattr (module , method_name )
138
145
try :
139
146
input_img = {"image" : input_img }
140
- results = predict_method (data = input_img , use_gpu = use_gpu )
147
+ results = predict_method (
148
+ data = input_img , use_gpu = use_gpu , batch_size = batch_size )
141
149
except Exception as err :
142
150
curr = time .strftime ("%Y-%m-%d %H:%M:%S" , time .localtime (time .time ()))
143
151
print (curr , " - " , err )
@@ -155,6 +163,7 @@ def predict_gan(module, input_img, extra={}):
155
163
b_body = str (b_body ).replace ("b'" , "" ).replace ("'" , "" )
156
164
b_img = b_head + "," + b_body
157
165
base64_list .append (b_img )
166
+ results [index ] = results [index ].replace (id + "_" , "" )
158
167
results [index ] = {"path" : results [index ]}
159
168
results [index ].update ({"base64" : b_img })
160
169
results_pack .append (results [index ])
@@ -163,14 +172,15 @@ def predict_gan(module, input_img, extra={}):
163
172
return results_pack
164
173
165
174
166
- def predict_object_detection (module , input_img ):
175
+ def predict_object_detection (module , input_img , id , batch_size ):
167
176
output_folder = "output"
168
177
global use_gpu
169
178
method_name = module .desc .attr .map .data ['default_signature' ].s
170
179
predict_method = getattr (module , method_name )
171
180
try :
172
181
input_img = {"image" : input_img }
173
- results = predict_method (data = input_img , use_gpu = use_gpu )
182
+ results = predict_method (
183
+ data = input_img , use_gpu = use_gpu , batch_size = batch_size )
174
184
except Exception as err :
175
185
curr = time .strftime ("%Y-%m-%d %H:%M:%S" , time .localtime (time .time ()))
176
186
print (curr , " - " , err )
@@ -186,22 +196,25 @@ def predict_object_detection(module, input_img):
186
196
b_body = str (b_body ).replace ("b'" , "" ).replace ("'" , "" )
187
197
b_img = b_head + "," + b_body
188
198
base64_list .append (b_img )
199
+ results [index ]["path" ] = results [index ]["path" ].replace (
200
+ id + "_" , "" )
189
201
results [index ].update ({"base64" : b_img })
190
202
results_pack .append (results [index ])
191
203
os .remove (item )
192
204
os .remove (os .path .join (output_folder , item ))
193
205
return results_pack
194
206
195
207
196
- def predict_semantic_segmentation (module , input_img ):
208
+ def predict_semantic_segmentation (module , input_img , id , batch_size ):
197
209
# special
198
210
output_folder = module .name .split ("_" )[- 1 ] + "_" + "output"
199
211
global use_gpu
200
212
method_name = module .desc .attr .map .data ['default_signature' ].s
201
213
predict_method = getattr (module , method_name )
202
214
try :
203
215
input_img = {"image" : input_img }
204
- results = predict_method (data = input_img , use_gpu = use_gpu )
216
+ results = predict_method (
217
+ data = input_img , use_gpu = use_gpu , batch_size = batch_size )
205
218
except Exception as err :
206
219
curr = time .strftime ("%Y-%m-%d %H:%M:%S" , time .localtime (time .time ()))
207
220
print (curr , " - " , err )
@@ -219,6 +232,10 @@ def predict_semantic_segmentation(module, input_img):
219
232
b_body = str (b_body ).replace ("b'" , "" ).replace ("'" , "" )
220
233
b_img = b_head + "," + b_body
221
234
base64_list .append (b_img )
235
+ results [index ]["origin" ] = results [index ]["origin" ].replace (
236
+ id + "_" , "" )
237
+ results [index ]["processed" ] = results [index ]["processed" ].replace (
238
+ id + "_" , "" )
222
239
results [index ].update ({"base64" : b_img })
223
240
results_pack .append (results [index ])
224
241
os .remove (item )
@@ -260,7 +277,7 @@ def get_modules_info():
260
277
@app_instance .route ("/predict/image/<module_name>" , methods = ["POST" ])
261
278
def predict_image (module_name ):
262
279
req_id = request .data .get ("id" )
263
- global use_gpu
280
+ global use_gpu , batch_size_dict
264
281
img_base64 = request .form .getlist ("image" )
265
282
file_name_list = []
266
283
if img_base64 != []:
@@ -289,7 +306,8 @@ def predict_image(module_name):
289
306
else :
290
307
module_type = module .type .split ("/" )[- 1 ].replace ("-" , "_" ).lower ()
291
308
predict_func = eval ("predict_" + module_type )
292
- results = predict_func (module , file_name_list )
309
+ batch_size = batch_size_dict .get (module_name , 1 )
310
+ results = predict_func (module , file_name_list , req_id , batch_size )
293
311
r = {"results" : str (results )}
294
312
return r
295
313
@@ -316,22 +334,25 @@ def predict_text(module_name):
316
334
file_path = req_id + "_" + item .filename
317
335
file_list .append (file_path )
318
336
item .save (file_path )
319
- results = predict_func (module , data , file_list )
337
+ batch_size = batch_size_dict .get (module_name , 1 )
338
+ results = predict_func (module , data , batch_size , file_list )
320
339
return {"results" : results }
321
340
322
341
return app_instance
323
342
324
343
325
344
def config_with_file (configs ):
326
- global nlp_module , cv_module
345
+ global nlp_module , cv_module , batch_size_dict
327
346
nlp_module = []
328
347
cv_module = []
348
+ batch_size_dict = {}
329
349
for item in configs :
330
350
print (item )
331
351
if item ["category" ] == "CV" :
332
352
cv_module .append (item ["module" ])
333
353
elif item ["category" ] == "NLP" :
334
354
nlp_module .append (item ["module" ])
355
+ batch_size_dict .update ({item ["module" ]: item ["batch_size" ]})
335
356
336
357
337
358
def run (is_use_gpu = False , configs = None , port = 8866 , timeout = 60 ):
0 commit comments