1
+ use std:: collections:: HashMap ;
2
+ use std:: path:: Path ;
3
+ use std:: sync:: Arc ;
4
+ use std:: thread:: { spawn, JoinHandle } ;
5
+ use std:: time:: SystemTime ;
6
+
1
7
use crossbeam:: channel:: { Receiver , Sender } ;
2
8
use ffmpeg:: util:: frame:: video:: Video ;
3
9
use ffmpeg_next as ffmpeg;
@@ -10,13 +16,8 @@ use parking_lot::Mutex;
10
16
use pyo3:: exceptions:: PyBrokenPipeError ;
11
17
use pyo3:: prelude:: * ;
12
18
use pyo3:: types:: PyBytes ;
13
- use std:: collections:: HashMap ;
14
- use std:: path:: Path ;
15
- use std:: sync:: Arc ;
16
- use std:: thread:: { spawn, JoinHandle } ;
17
- use std:: time:: SystemTime ;
18
19
19
- const DECODING_FORMAT : Pixel = Pixel :: BGR24 ;
20
+ const DECODING_FORMAT : Pixel = Pixel :: RGB24 ;
20
21
const DECODED_PIX_BYTES : u32 = 3 ;
21
22
22
23
fn is_stream_key_framed ( id : Id ) -> Result < bool , String > {
@@ -108,8 +109,12 @@ impl VideoFrameEnvelope {
108
109
self . __repr__ ( )
109
110
}
110
111
111
- fn payload_as_bytes ( & self , py : Python ) -> PyObject {
112
- PyBytes :: new ( py, & self . payload ) . into ( )
112
+ fn payload_as_bytes ( & self , py : Python ) -> PyResult < PyObject > {
113
+ let res = PyBytes :: new_with ( py, self . payload . len ( ) , |b : & mut [ u8 ] | {
114
+ b. copy_from_slice ( & self . payload ) ;
115
+ Ok ( ( ) )
116
+ } ) ?;
117
+ Ok ( res. into ( ) )
113
118
}
114
119
}
115
120
@@ -139,6 +144,7 @@ fn handle(
139
144
tx : Sender < VideoFrameEnvelope > ,
140
145
signal : Arc < Mutex < bool > > ,
141
146
decode : bool ,
147
+ autoconvert_raw_formats_to_rgb24 : bool ,
142
148
log_level : Arc < Mutex < Option < Level > > > ,
143
149
) {
144
150
let mut queue_full_skipped_count = 0 ;
@@ -181,7 +187,7 @@ fn handle(
181
187
// video_decoder.format(),
182
188
// video_decoder.width(),
183
189
// video_decoder.height(),
184
- // Pixel::BGR24 ,
190
+ // Pixel::rgb24 ,
185
191
// video_decoder.width(),
186
192
// video_decoder.height(),
187
193
// Flags::FAST_BILINEAR,
@@ -256,6 +262,10 @@ fn handle(
256
262
continue ;
257
263
}
258
264
265
+ let decode = decode
266
+ || ( autoconvert_raw_formats_to_rgb24
267
+ && video_decoder. codec ( ) . map ( |c| c. id ( ) ) == Some ( Id :: RAWVIDEO ) ) ;
268
+
259
269
let raw_frames = if decode {
260
270
let mut raw_frames = Vec :: new ( ) ;
261
271
video_decoder
@@ -364,12 +374,18 @@ fn assign_log_level(ffmpeg_log_level: FFmpegLogLevel) -> Level {
364
374
#[ pymethods]
365
375
impl FFMpegSource {
366
376
#[ new]
367
- #[ pyo3( signature = ( uri, params, queue_len = 32 , decode = false , ffmpeg_log_level = FFmpegLogLevel :: Info ) ) ]
377
+ #[ pyo3( signature = ( uri, params,
378
+ queue_len = 32 ,
379
+ decode = false ,
380
+ autoconvert_raw_formats_to_rgb24 = false ,
381
+ ffmpeg_log_level = FFmpegLogLevel :: Info )
382
+ ) ]
368
383
pub fn new (
369
384
uri : String ,
370
385
params : HashMap < String , String > ,
371
386
queue_len : i64 ,
372
387
decode : bool ,
388
+ autoconvert_raw_formats_to_rgb24 : bool ,
373
389
ffmpeg_log_level : FFmpegLogLevel ,
374
390
) -> Self {
375
391
assert ! ( queue_len > 0 , "Queue length must be a positive number" ) ;
@@ -383,7 +399,15 @@ impl FFMpegSource {
383
399
let thread_exit_signal = exit_signal. clone ( ) ;
384
400
let thread_ll = log_level. clone ( ) ;
385
401
let thread = Some ( spawn ( move || {
386
- handle ( uri, params, tx, thread_exit_signal, decode, thread_ll)
402
+ handle (
403
+ uri,
404
+ params,
405
+ tx,
406
+ thread_exit_signal,
407
+ decode,
408
+ autoconvert_raw_formats_to_rgb24,
409
+ thread_ll,
410
+ )
387
411
} ) ) ;
388
412
389
413
Self {
0 commit comments