Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
239 changes: 170 additions & 69 deletions src/PluginMediaStreamRenderer.swift
Original file line number Diff line number Diff line change
@@ -1,17 +1,86 @@
import Foundation
import AVFoundation

class PluginRTCVideoView : UIView {

var mirror: Bool = false

#if RTC_SUPPORTS_METAL
var rtcVideoView: RTCMTLVideoView?
#else
var rtcVideoView: RTCEAGLVideoView?
#endif

/**
* The width and height of the video (frames) rendered by {@link #subview}.
*/
var _videoSize : CGSize = CGSize(width: 0, height: 0);

public convenience init(){
self.init(frame: .zero)
}

public override init(frame: CGRect) {
super.init(frame: frame)

#if RTC_SUPPORTS_METAL
self.rtcVideoView = RTCMTLVideoView.init(frame: frame)
#else
self.rtcVideoView = RTCEAGLVideoView.init(frame: frame)
#endif

self.rtcVideoView!.isUserInteractionEnabled = false
self.rtcVideoView!.frame = self.bounds
self.addSubview(self.rtcVideoView!)
}

public required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)

#if RTC_SUPPORTS_METAL
self.rtcVideoView = RTCMTLVideoView.init(coder: aDecoder)
#else
self.rtcVideoView = RTCEAGLVideoView.init(coder: aDecoder)
#endif

self.rtcVideoView!.isUserInteractionEnabled = false
self.rtcVideoView!.frame = self.bounds
self.addSubview(self.rtcVideoView!)
}

// TODO
public func setSize(size: CGSize) {
//self._videoSize = size;
//self.rtcVideoView?.setSize(size)
}

// TODO
public override func layoutSubviews() {
//if(self.rtcVideoView != nil){
//self.rtcVideoView!.frame = self.bounds
//}
}

public func setMirror(mirror: Bool){
if (self.mirror) {
self.mirror = true
self.transform = CGAffineTransform(scaleX: -1.0, y: 1.0);
} else {
self.transform = CGAffineTransform(scaleX: 1.0, y: 1.0);
self.mirror = false;
}
}
}

class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {

var id: String
var eventListener: (_ data: NSDictionary) -> Void
var closed: Bool

var webView: UIView
var elementView: UIView
var videoView: PluginRTCVideoView
var pluginMediaStream: PluginMediaStream?

var videoView: RTCEAGLVideoView
var rtcAudioTrack: RTCAudioTrack?
var rtcVideoTrack: RTCVideoTrack?
var pluginVideoTrack: PluginMediaStreamTrack?
Expand All @@ -33,14 +102,23 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
let useManualLayoutRenderer = Bundle.main.object(forInfoDictionaryKey: "UseManualLayoutRenderer") as? Bool ?? false

// The video element view.
let guide = webView.safeAreaLayoutGuide;
self.elementView = useManualLayoutRenderer
? UIView(frame: CGRect(x: 0.0, y: guide.layoutFrame.minY, width: guide.layoutFrame.width, height: guide.layoutFrame.height))
: UIView()
self.videoView = PluginRTCVideoView()


self.videoView.isUserInteractionEnabled = false
self.videoView.isHidden = true
self.videoView.backgroundColor = UIColor.black
self.videoView.layer.masksToBounds = true

// Place the video element view inside the WebView's superview
self.webView.addSubview(self.videoView)
self.webView.isOpaque = false
self.webView.backgroundColor = UIColor.clear

/*
// The effective video view in which the the video stream is shown.
// It's placed over the elementView.
self.videoView = RTCEAGLVideoView()
self.videoView = RTCEAGLVideoView(frame: CGRect.zero)
self.videoView.isUserInteractionEnabled = false

self.elementView.isUserInteractionEnabled = false
Expand All @@ -54,7 +132,9 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
self.webView.addSubview(self.elementView)
self.webView.isOpaque = false
self.webView.backgroundColor = UIColor.clear

*/

/*
// https://stackoverflow.com/questions/46317061/use-safe-area-layout-programmatically
// https://developer.apple.com/documentation/uikit/uiview/2891102-safearealayoutguide
// https://developer.apple.com/documentation/uikit/
Expand All @@ -72,6 +152,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
NSLayoutConstraint(item: view, attribute: .trailing, relatedBy: .equal, toItem: webView, attribute: .trailing, multiplier: 1.0, constant: 0).isActive = true
}
}
*/
}

deinit {
Expand All @@ -80,8 +161,8 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {

func run() {
NSLog("PluginMediaStreamRenderer#run()")

self.videoView.delegate = self
self.videoView.rtcVideoView!.delegate = self
}

func render(_ pluginMediaStream: PluginMediaStream) {
Expand All @@ -92,7 +173,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
}

self.pluginMediaStream = pluginMediaStream

// Take the first audio track.
for (_, track) in pluginMediaStream.audioTracks {
self.rtcAudioTrack = track.rtcMediaStreamTrack as? RTCAudioTrack
Expand All @@ -101,18 +182,20 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {

// Take the first video track.
for (_, track) in pluginMediaStream.videoTracks {
self.pluginVideoTrack = track
if (self.rtcVideoTrack == track.rtcMediaStreamTrack) {
return;
}
self.pluginVideoTrack = track
self.rtcVideoTrack = track.rtcMediaStreamTrack as? RTCVideoTrack
break
}



if self.rtcVideoTrack != nil {
self.rtcVideoTrack!.add(self.videoView)
self.pluginVideoTrack?.registerRender(render: self)
self.rtcVideoTrack!.ad(self.videoView.rtcVideoView!)
self.pluginVideoTrack?.registerRender(render: self)
}
}

func mediaStreamChanged() {
NSLog("PluginMediaStreamRenderer#mediaStreamChanged()")

Expand Down Expand Up @@ -150,27 +233,29 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
else if oldRtcVideoTrack != nil && self.rtcVideoTrack != nil &&
oldRtcVideoTrack!.trackId != self.rtcVideoTrack!.trackId {
NSLog("PluginMediaStreamRenderer#mediaStreamChanged() | has a new video track")
oldPluginVideoTrack?.unregisterRender(render: self)
oldRtcVideoTrack!.remove(self.videoView)
self.pluginVideoTrack?.registerRender(render: self)
self.rtcVideoTrack!.add(self.videoView)
oldPluginVideoTrack?.unregisterRender(render: self)
oldRtcVideoTrack!.remove(self.videoView.rtcVideoView!)
self.pluginVideoTrack?.registerRender(render: self)
self.rtcVideoTrack!.add(self.videoView.rtcVideoView!)
}

// Did not have video but now it has.
else if oldRtcVideoTrack == nil && self.rtcVideoTrack != nil {
NSLog("PluginMediaStreamRenderer#mediaStreamChanged() | video track added")
if oldPluginVideoTrack != nil{
oldPluginVideoTrack?.unregisterRender(render: self)
}
self.pluginVideoTrack?.registerRender(render: self)
self.rtcVideoTrack!.add(self.videoView)

if oldPluginVideoTrack != nil{
oldPluginVideoTrack?.unregisterRender(render: self)
}
self.pluginVideoTrack?.registerRender(render: self)
self.rtcVideoTrack!.add(self.videoView.rtcVideoView!)
}

// Had video but now it has not.
else if oldRtcVideoTrack != nil && self.rtcVideoTrack == nil {
NSLog("PluginMediaStreamRenderer#mediaStreamChanged() | video track removed")
oldPluginVideoTrack?.unregisterRender(render: self)
oldRtcVideoTrack!.remove(self.videoView)

oldPluginVideoTrack?.unregisterRender(render: self)
oldRtcVideoTrack!.remove(self.videoView.rtcVideoView!)
}
}

Expand All @@ -180,8 +265,8 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
let elementTop = data.object(forKey: "elementTop") as? Double ?? 0
let elementWidth = data.object(forKey: "elementWidth") as? Double ?? 0
let elementHeight = data.object(forKey: "elementHeight") as? Double ?? 0
var videoViewWidth = data.object(forKey: "videoViewWidth") as? Double ?? 0
var videoViewHeight = data.object(forKey: "videoViewHeight") as? Double ?? 0
let videoViewWidth = data.object(forKey: "videoViewWidth") as? Double ?? 0
let videoViewHeight = data.object(forKey: "videoViewHeight") as? Double ?? 0
let visible = data.object(forKey: "visible") as? Bool ?? true
let opacity = data.object(forKey: "opacity") as? Double ?? 1
let zIndex = data.object(forKey: "zIndex") as? Double ?? 0
Expand All @@ -197,63 +282,79 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {

let videoViewLeft: Double = (elementWidth - videoViewWidth) / 2
let videoViewTop: Double = (elementHeight - videoViewHeight) / 2

self.elementView.frame = CGRect(
x: CGFloat(elementLeft),
y: CGFloat(elementTop),
width: CGFloat(elementWidth),
height: CGFloat(elementHeight)
)

// NOTE: Avoid a zero-size UIView for the video (the library complains).
if videoViewWidth == 0 || videoViewHeight == 0 {
videoViewWidth = 1
videoViewHeight = 1
self.videoView.isHidden = true
} else {
self.videoView.isHidden = false

var oldValue : CGRect,
newValue : CGRect;

// Resize videoView only if needed
if (elementWidth <= 0 || elementHeight <= 0) {
newValue = videoView.bounds;
} else {
newValue = CGRect(
x: CGFloat(elementLeft),
y: CGFloat(elementTop),
width: CGFloat(elementWidth),
height: CGFloat(elementHeight)
)
}

oldValue = videoView.frame;
if (
newValue.origin.x != oldValue.origin.x
|| newValue.origin.y != oldValue.origin.y
|| newValue.size.width != oldValue.size.width
|| newValue.size.height != oldValue.size.height
) {

NSLog("PluginMediaStreamRenderer#refresh() videoView frame changed")
videoView.frame = newValue;
}

self.videoView.frame = CGRect(

// Resize rtcVideoView only if needed to apply aspect ratio
newValue = CGRect(
x: CGFloat(videoViewLeft),
y: CGFloat(videoViewTop),
width: CGFloat(videoViewWidth),
height: CGFloat(videoViewHeight)
)
oldValue = videoView.rtcVideoView!.frame;
if (
newValue.origin.x != oldValue.origin.x
|| newValue.origin.y != oldValue.origin.y
|| newValue.size.width != oldValue.size.width
|| newValue.size.height != oldValue.size.height
) {

NSLog("PluginMediaStreamRenderer#refresh() rtcVideoView frame changed")
videoView.rtcVideoView!.frame = newValue;
}

if visible {
self.elementView.isHidden = false
if (visible && (videoViewWidth != 0 && videoViewHeight != 0)) {
videoView.isHidden = false
} else {
self.elementView.isHidden = true
videoView.isHidden = true
}

self.elementView.alpha = CGFloat(opacity)
self.elementView.layer.zPosition = CGFloat(zIndex)
videoView.alpha = CGFloat(opacity)
videoView.layer.zPosition = CGFloat(zIndex)

// if the zIndex is 0 (the default) bring the view to the top, last one wins
if zIndex == 0 {
self.webView.bringSubviewToFront(self.elementView)
//self.webView?.bringSubview(toFront: self.elementView)
webView.bringSubviewToFront(videoView)
//self.webView?.bringSubview(toFront: self.videoView)
}

if !mirrored {
self.elementView.transform = CGAffineTransform.identity
} else {
self.elementView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0)
}
videoView.setMirror(mirror: mirrored)

if clip {
self.elementView.clipsToBounds = true
videoView.clipsToBounds = true
} else {
self.elementView.clipsToBounds = false
videoView.clipsToBounds = false
}

self.elementView.layer.cornerRadius = CGFloat(borderRadius)
let rgb = backgroundColor.components(separatedBy: ",").map{ CGFloat(($0 as NSString).floatValue) / 256.0 }
let color = UIColor(red: rgb[0], green: rgb[1], blue: rgb[2], alpha: 1)
self.elementView.backgroundColor = color
videoView.layer.cornerRadius = CGFloat(borderRadius)
}

func save(callback: (_ data: String) -> Void,
errback: (_ error: String) -> Void) {
//NSLog("PluginMediaStreamRenderer#save()")
Expand All @@ -279,7 +380,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
NSLog("PluginMediaStreamRenderer#close()")
self.closed = true
self.reset()
self.elementView.removeFromSuperview()
self.videoView.removeFromSuperview()
}

/**
Expand All @@ -290,7 +391,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate {
NSLog("PluginMediaStreamRenderer#reset()")

if self.rtcVideoTrack != nil {
self.rtcVideoTrack!.remove(self.videoView)
self.rtcVideoTrack!.remove(self.videoView.rtcVideoView!)
}
if self.pluginVideoTrack != nil {
self.pluginVideoTrack?.unregisterRender(render: self)
Expand Down