Skip to content

Commit 2a2011a

Browse files
authored
Merge pull request #27 from GetStream/chore/sync_0.13.2
chore: sync with flutter_webrtc v0.13.2
2 parents 25cea96 + dc317b6 commit 2a2011a

35 files changed

+851
-66
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,12 @@
11

22
# Changelog
33

4+
[1.0.4] - 2025-04-29
5+
* Synced flutter-webrtc v0.13.2
6+
* [iOS/Android]feat: Media Recorder implementation Android and iOS (#1810)
7+
* [Wndows] fix: Pickup registrar for plugin by plugin registrar manager (#1752)
8+
* [Linux] fix: add task runner for linux. (#1821)
9+
410
[1.0.3] - 2025-04-11
511
* Reverted to using `onSurfaceDestroyed` in `SurfaceTextureRenderer` for compatibility with Flutter 3.27.
612

android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java

Lines changed: 62 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import android.app.Activity;
55
import android.app.Fragment;
66
import android.app.FragmentTransaction;
7+
import android.content.ContentResolver;
78
import android.content.ContentValues;
89
import android.content.Context;
910
import android.content.Intent;
@@ -13,12 +14,14 @@
1314
import android.media.AudioDeviceInfo;
1415
import android.media.projection.MediaProjection;
1516
import android.media.projection.MediaProjectionManager;
17+
import android.net.Uri;
1618
import android.os.Build;
1719
import android.os.Build.VERSION;
1820
import android.os.Build.VERSION_CODES;
1921
import android.os.Bundle;
2022
import android.os.Handler;
2123
import android.os.Looper;
24+
import android.os.ParcelFileDescriptor;
2225
import android.os.ResultReceiver;
2326
import android.provider.MediaStore;
2427
import android.util.Log;
@@ -72,6 +75,9 @@
7275
import org.webrtc.audio.JavaAudioDeviceModule;
7376

7477
import java.io.File;
78+
import java.io.FileInputStream;
79+
import java.io.FileOutputStream;
80+
import java.io.InputStream;
7581
import java.util.ArrayList;
7682
import java.util.HashMap;
7783
import java.util.List;
@@ -1046,22 +1052,64 @@ void startRecordingToFile(
10461052
mediaRecorders.append(id, mediaRecorder);
10471053
}
10481054

1049-
void stopRecording(Integer id) {
1050-
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
1051-
if (mediaRecorder != null) {
1052-
mediaRecorder.stopRecording();
1053-
mediaRecorders.remove(id);
1054-
File file = mediaRecorder.getRecordFile();
1055-
if (file != null) {
1056-
ContentValues values = new ContentValues(3);
1057-
values.put(MediaStore.Video.Media.TITLE, file.getName());
1058-
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
1059-
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
1060-
applicationContext
1061-
.getContentResolver()
1062-
.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
1055+
void stopRecording(Integer id, String albumName) {
1056+
try {
1057+
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
1058+
if (mediaRecorder != null) {
1059+
mediaRecorder.stopRecording();
1060+
mediaRecorders.remove(id);
1061+
File file = mediaRecorder.getRecordFile();
1062+
Uri collection;
1063+
1064+
if (file != null) {
1065+
ContentValues values = new ContentValues();
1066+
values.put(MediaStore.Video.Media.TITLE, file.getName());
1067+
values.put(MediaStore.Video.Media.DISPLAY_NAME, file.getName());
1068+
values.put(MediaStore.Video.Media.ALBUM, albumName);
1069+
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
1070+
values.put(MediaStore.Video.Media.DATE_ADDED, System.currentTimeMillis() / 1000);
1071+
values.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis());
1072+
1073+
//Android version above 9 MediaStore uses RELATIVE_PATH
1074+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
1075+
values.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/" + albumName);
1076+
values.put(MediaStore.Video.Media.IS_PENDING, 1);
1077+
1078+
collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY);
1079+
} else {
1080+
//Android version 9 and below MediaStore uses DATA
1081+
values.put(MediaStore.Video.Media.DATA, "/storage/emulated/0/Movies/" + albumName + "/" + file.getName());
1082+
1083+
collection = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
1084+
}
1085+
1086+
ContentResolver resolver = applicationContext.getContentResolver();
1087+
Uri uriSavedMedia = resolver.insert(collection, values);
1088+
1089+
assert uriSavedMedia != null;
1090+
ParcelFileDescriptor pfd = resolver.openFileDescriptor(uriSavedMedia, "w");
1091+
assert pfd != null;
1092+
FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor());
1093+
1094+
InputStream in = new FileInputStream(file);
1095+
1096+
byte[] buf = new byte[8192];
1097+
int len;
1098+
1099+
while ((len = in.read(buf)) > 0) {
1100+
out.write(buf, 0, len);
1101+
}
1102+
1103+
out.close();
1104+
in.close();
1105+
pfd.close();
1106+
values.clear();
1107+
}
10631108
}
1109+
} catch(Exception e){
1110+
10641111
}
1112+
10651113
}
10661114

10671115

android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -793,7 +793,8 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
793793
break;
794794
case "stopRecordToFile":
795795
Integer recorderId = call.argument("recorderId");
796-
getUserMediaImpl.stopRecording(recorderId);
796+
String albumName = call.argument("albumName");
797+
getUserMediaImpl.stopRecording(recorderId, albumName);
797798
result.success(null);
798799
break;
799800
case "captureFrame": {
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
void RTCAudioSinkCallback (void *object,
2+
const void *audio_data,
3+
int bits_per_sample,
4+
int sample_rate,
5+
size_t number_of_channels,
6+
size_t number_of_frames);
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
#import <Foundation/Foundation.h>
2+
#import <CoreMedia/CoreMedia.h>
3+
#import <WebRTC/WebRTC.h>
4+
5+
@interface FlutterRTCAudioSink : NSObject
6+
7+
@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef);
8+
@property (nonatomic) CMAudioFormatDescriptionRef format;
9+
10+
- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio;
11+
12+
- (void) close;
13+
14+
@end
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
#import <AVFoundation/AVFoundation.h>
2+
#import "FlutterRTCAudioSink.h"
3+
#import "RTCAudioSource+Private.h"
4+
#include "media_stream_interface.h"
5+
#include "audio_sink_bridge.cpp"
6+
7+
@implementation FlutterRTCAudioSink {
8+
AudioSinkBridge *_bridge;
9+
webrtc::AudioSourceInterface* _audioSource;
10+
}
11+
12+
- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio {
13+
self = [super init];
14+
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSourcePtr = audio.source.nativeAudioSource;
15+
_audioSource = audioSourcePtr.get();
16+
_bridge = new AudioSinkBridge((void*)CFBridgingRetain(self));
17+
_audioSource->AddSink(_bridge);
18+
return self;
19+
}
20+
21+
- (void) close {
22+
_audioSource->RemoveSink(_bridge);
23+
delete _bridge;
24+
_bridge = nil;
25+
_audioSource = nil;
26+
}
27+
28+
void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames)
29+
{
30+
AudioBufferList audioBufferList;
31+
AudioBuffer audioBuffer;
32+
audioBuffer.mData = (void*) audio_data;
33+
audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames;
34+
audioBuffer.mNumberChannels = number_of_channels;
35+
audioBufferList.mNumberBuffers = 1;
36+
audioBufferList.mBuffers[0] = audioBuffer;
37+
AudioStreamBasicDescription audioDescription;
38+
audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels;
39+
audioDescription.mBitsPerChannel = bits_per_sample;
40+
audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels;
41+
audioDescription.mChannelsPerFrame = number_of_channels;
42+
audioDescription.mFormatID = kAudioFormatLinearPCM;
43+
audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
44+
audioDescription.mFramesPerPacket = 1;
45+
audioDescription.mReserved = 0;
46+
audioDescription.mSampleRate = sample_rate;
47+
CMAudioFormatDescriptionRef formatDesc;
48+
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc);
49+
CMSampleBufferRef buffer;
50+
CMSampleTimingInfo timing;
51+
timing.decodeTimeStamp = kCMTimeInvalid;
52+
timing.presentationTimeStamp = CMTimeMake(0, sample_rate);
53+
timing.duration = CMTimeMake(1, sample_rate);
54+
CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer);
55+
CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
56+
@autoreleasepool {
57+
FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object);
58+
sink.format = formatDesc;
59+
if (sink.bufferCallback != nil) {
60+
sink.bufferCallback(buffer);
61+
} else {
62+
NSLog(@"Buffer callback is nil");
63+
}
64+
}
65+
}
66+
67+
@end

common/darwin/Classes/FlutterRTCFrameCapturer.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,6 @@
1212
toPath:(NSString*)path
1313
result:(FlutterResult)result;
1414

15+
+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame;
16+
1517
@end

common/darwin/Classes/FlutterRTCFrameCapturer.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame {
4141
CVPixelBufferRef pixelBufferRef;
4242
bool shouldRelease;
4343
if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
44-
pixelBufferRef = [self convertToCVPixelBuffer:frame];
44+
pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame];
4545
shouldRelease = true;
4646
} else {
4747
pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer;
@@ -108,7 +108,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame {
108108
});
109109
}
110110

111-
- (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
111+
+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
112112
id<RTCI420Buffer> i420Buffer = [frame.buffer toI420];
113113
CVPixelBufferRef outputPixelBuffer;
114114
size_t w = (size_t)roundf(i420Buffer.width);
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
#if TARGET_OS_IPHONE
2+
#import <Flutter/Flutter.h>
3+
#elif TARGET_OS_OSX
4+
#import <FlutterMacOS/FlutterMacOS.h>
5+
#endif
6+
#import <WebRTC/WebRTC.h>
7+
8+
@import Foundation;
9+
@import AVFoundation;
10+
11+
@interface FlutterRTCMediaRecorder : NSObject <RTCVideoRenderer>
12+
13+
@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack;
14+
@property(nonatomic, strong) NSURL* _Nonnull output;
15+
@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter;
16+
@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput;
17+
18+
- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video
19+
audioTrack:(RTCAudioTrack* _Nullable)audio
20+
outputFile:(NSURL* _Nonnull)out;
21+
22+
- (void)stop:(_Nonnull FlutterResult)result;
23+
24+
@end

0 commit comments

Comments
 (0)