Skip to content
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

### 🐞 Fixed
- An issue that was causing the local participant's audio waveform visualization to stop working. [#912](https://github.com/GetStream/stream-video-swift/pull/912)
- Proximity policies weren't updating CallSettings correctly. That would cause issues where Speaker may not be reenabled or video not being stopped/restarted when proximity changes. [#913](https://github.com/GetStream/stream-video-swift/pull/913)

# [1.30.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.30.0)
_August 08, 2025_
Expand Down
6 changes: 5 additions & 1 deletion Sources/StreamVideo/Models/CallSettings.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import Combine
import Foundation

/// Represents the settings for a call.
public final class CallSettings: ObservableObject, Sendable, Equatable, ReflectiveStringConvertible {
public final class CallSettings: ObservableObject, Sendable, Equatable, CustomStringConvertible {
/// Whether the audio is on for the current user.
public let audioOn: Bool
/// Whether the video is on for the current user.
Expand Down Expand Up @@ -90,6 +90,10 @@ public final class CallSettings: ObservableObject, Sendable, Equatable, Reflecti
public var shouldPublish: Bool {
audioOn || videoOn
}

public var description: String {
"<CallSettings audioOn:\(audioOn) videoOn:\(videoOn) speakerOn:\(speakerOn) audioOutputOn:\(audioOutputOn) cameraPosition:\(cameraPosition)/>"
}
}

/// The camera position.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,23 +90,22 @@ extension RTCAudioStore {
""",
subsystems: .audioSession
)
delegate?.audioSessionAdapterDidUpdateCallSettings(
callSettings: activeCallSettings
.withUpdatedSpeakerState(session.currentRoute.isSpeaker)
delegate?.audioSessionAdapterDidUpdateSpeakerOn(
session.currentRoute.isSpeaker
)
}
return
}

switch (activeCallSettings.speakerOn, session.currentRoute.isSpeaker) {
case (true, false):
delegate?.audioSessionAdapterDidUpdateCallSettings(
callSettings: activeCallSettings.withUpdatedSpeakerState(false)
delegate?.audioSessionAdapterDidUpdateSpeakerOn(
false
)

case (false, true) where session.category == AVAudioSession.Category.playAndRecord.rawValue:
delegate?.audioSessionAdapterDidUpdateCallSettings(
callSettings: activeCallSettings.withUpdatedSpeakerState(true)
delegate?.audioSessionAdapterDidUpdateSpeakerOn(
true
)

default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ protocol StreamAudioSessionAdapterDelegate: AnyObject {
/// - Parameters:
/// - audioSession: The `AudioSession` instance that made the update.
/// - callSettings: The updated `CallSettings`.
func audioSessionAdapterDidUpdateCallSettings(
callSettings: CallSettings
func audioSessionAdapterDidUpdateSpeakerOn(
_ speakerOn: Bool
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -267,13 +267,13 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable {
transceiverStorage
.forEach { $0.value.track.isEnabled = false }

Task(disposableBag: disposableBag) { @MainActor [weak self] in
_ = await Task(disposableBag: disposableBag) { @MainActor [weak self] in
do {
try await self?.stopVideoCapturingSession()
} catch {
log.error(error, subsystems: .webRTC)
}
}
}.result

log.debug(
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,6 @@ extension WebRTCCoordinator.StateMachine.Stage {

try Task.checkCancellation()

await observeCallSettingsUpdates()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why do we remove this?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This method was here to forward CallSettings to the publisher PeerConnection. However, because it was relying on the Combine chain for updates, it was always a time-fracture behind.

Now, we are removing this and moving the publiher peerconnection update directly on the WebRTCStateAdapter next to when it updates its own CallSettings.


try Task.checkCancellation()

await observePeerConnectionState()

try Task.checkCancellation()
Expand Down Expand Up @@ -386,42 +382,6 @@ extension WebRTCCoordinator.StateMachine.Stage {
.store(in: disposableBag)
}

/// Observes updates to the `callSettings` and ensures that any changes are
/// reflected in the publisher. This ensures that updates to audio, video, and
/// audio output settings are applied correctly during a WebRTC session.
private func observeCallSettingsUpdates() async {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think I haven't seen a replacement for this? How do we listen to updates now?

await context
.coordinator?
.stateAdapter
.$callSettings
.compactMap { $0 }
.removeDuplicates()
.sinkTask(storeIn: disposableBag) { [weak self] callSettings in
guard let self else { return }
do {
guard
let publisher = await context.coordinator?.stateAdapter.publisher
else {
log.warning(
"PeerConnection hasn't been set up for publishing.",
subsystems: .webRTC
)
return
}

try await publisher.didUpdateCallSettings(callSettings)
log.debug("Publisher callSettings updated.", subsystems: .webRTC)
} catch {
log.warning(
"Will disconnect because failed to update callSettings on Publisher.[Error:\(error)]",
subsystems: .webRTC
)
transitionDisconnectOrError(error)
}
}
.store(in: disposableBag) // Store the Combine subscription in the disposable bag.
}

/// Observes the connection state of both the publisher and subscriber peer
/// connections. If a disconnection is detected, the method attempts to restart
/// ICE (Interactive Connectivity Establishment) for both the publisher and
Expand Down
24 changes: 14 additions & 10 deletions Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift
Original file line number Diff line number Diff line change
Expand Up @@ -88,16 +88,20 @@ struct WebRTCAuthenticator: WebRTCAuthenticating {
/// - Finally, applies the determined call settings to the state adapter.
let initialCallSettings = await coordinator.stateAdapter.initialCallSettings
let remoteCallSettings = CallSettings(response.call.settings)
var callSettings = initialCallSettings ?? remoteCallSettings
if
coordinator.stateAdapter.audioSession.currentRoute.isExternal,
callSettings.speakerOn
{
callSettings = callSettings.withUpdatedSpeakerState(false)
}
await coordinator.stateAdapter.set(
callSettings: callSettings
)
let callSettings = {
var result = initialCallSettings ?? remoteCallSettings
if
coordinator.stateAdapter.audioSession.currentRoute.isExternal,
result.speakerOn
{
result = result.withUpdatedSpeakerState(false)
}
return result
}()

await coordinator
.stateAdapter
.enqueueCallSettings { _ in callSettings }

await coordinator.stateAdapter.set(
videoOptions: .init(preferredCameraPosition: {
Expand Down
35 changes: 10 additions & 25 deletions Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
Original file line number Diff line number Diff line change
Expand Up @@ -131,11 +131,8 @@ final class WebRTCCoordinator: @unchecked Sendable {
func changeCameraMode(
position: CameraPosition
) async throws {
await stateAdapter.set(
callSettings: stateAdapter
.callSettings
.withUpdatedCameraPosition(position)
)
await stateAdapter
.enqueueCallSettings { $0.withUpdatedCameraPosition(position) }
try await stateAdapter.publisher?.didUpdateCameraPosition(
position == .front ? .front : .back
)
Expand All @@ -145,44 +142,32 @@ final class WebRTCCoordinator: @unchecked Sendable {
///
/// - Parameter isEnabled: Whether the audio should be enabled.
func changeAudioState(isEnabled: Bool) async {
await stateAdapter.set(
callSettings: stateAdapter
.callSettings
.withUpdatedAudioState(isEnabled)
)
await stateAdapter
.enqueueCallSettings { $0.withUpdatedAudioState(isEnabled) }
}

/// Changes the video state (enabled/disabled) for the call.
///
/// - Parameter isEnabled: Whether the video should be enabled.
func changeVideoState(isEnabled: Bool) async {
await stateAdapter.set(
callSettings: stateAdapter
.callSettings
.withUpdatedVideoState(isEnabled)
)
await stateAdapter
.enqueueCallSettings { $0.withUpdatedVideoState(isEnabled) }
}

/// Changes the audio output state (e.g., speaker or headphones).
///
/// - Parameter isEnabled: Whether the output should be enabled.
func changeSoundState(isEnabled: Bool) async {
await stateAdapter.set(
callSettings: stateAdapter
.callSettings
.withUpdatedAudioOutputState(isEnabled)
)
await stateAdapter
.enqueueCallSettings { $0.withUpdatedAudioOutputState(isEnabled) }
}

/// Changes the speaker state (enabled/disabled) for the call.
///
/// - Parameter isEnabled: Whether the speaker should be enabled.
func changeSpeakerState(isEnabled: Bool) async {
await stateAdapter.set(
callSettings: stateAdapter
.callSettings
.withUpdatedSpeakerState(isEnabled)
)
await stateAdapter
.enqueueCallSettings { $0.withUpdatedSpeakerState(isEnabled) }
}

/// Updates the visibility of a participant's track.
Expand Down
95 changes: 54 additions & 41 deletions Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {
private let peerConnectionsDisposableBag = DisposableBag()

private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
private let callSettingsProcessingQueue = OperationQueue(maxConcurrentOperationCount: 1)
private var queuedTraces: ConsumableBucket<WebRTCTrace> = .init()

/// Initializes the WebRTC state adapter with user details and connection
Expand Down Expand Up @@ -133,28 +134,12 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {
}

/// Sets the call settings.
func set(
private func set(
callSettings value: CallSettings,
file: StaticString = #file,
function: StaticString = #function,
line: UInt = #line
) {
guard value != callSettings else {
return
}
log.debug(
"""
Updating CallSettings
From: \(callSettings)
To: \(value)
""",
subsystems: .webRTC,
functionName: function,
fileName: file,
lineNumber: line
)
self.callSettings = value
}
) { self.callSettings = value }

/// Sets the initial call settings.
func set(initialCallSettings value: CallSettings?) { self.initialCallSettings = value }
Expand Down Expand Up @@ -491,6 +476,40 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {
}
}

func enqueueCallSettings(
functionName: StaticString = #function,
fileName: StaticString = #fileID,
lineNumber: UInt = #line,
_ operation: @Sendable @escaping (CallSettings) -> CallSettings
) {
callSettingsProcessingQueue.addTaskOperation { [weak self] in
guard
let self
else {
return
}

let currentCallSettings = await callSettings
let updatedCallSettings = operation(currentCallSettings)
guard
updatedCallSettings != currentCallSettings
else {
return
}

await set(callSettings: updatedCallSettings)

guard
let publisher = await self.publisher
else {
return
}

try await publisher.didUpdateCallSettings(updatedCallSettings)
log.debug("Publisher callSettings updated: \(updatedCallSettings).", subsystems: .webRTC)
}
}

func trace(_ trace: WebRTCTrace) {
if let statsAdapter {
statsAdapter.trace(trace)
Expand Down Expand Up @@ -561,25 +580,19 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {
return
}

let currentCallSettings = self.callSettings
let possibleNewCallSettings = {
switch event.type {
case .audio:
return currentCallSettings.withUpdatedAudioState(false)
case .video:
return currentCallSettings.withUpdatedVideoState(false)
default:
return currentCallSettings
}
}()

guard
currentCallSettings != possibleNewCallSettings
else {
return
enqueueCallSettings { currentCallSettings in
let possibleNewCallSettings = {
switch event.type {
case .audio:
return currentCallSettings.withUpdatedAudioState(false)
case .video:
return currentCallSettings.withUpdatedVideoState(false)
default:
return currentCallSettings
}
}()
return possibleNewCallSettings
}

set(callSettings: possibleNewCallSettings)
}

// MARK: - Private Helpers
Expand Down Expand Up @@ -649,16 +662,16 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {

// MARK: - AudioSessionDelegate

nonisolated func audioSessionAdapterDidUpdateCallSettings(
callSettings: CallSettings
) {
nonisolated func audioSessionAdapterDidUpdateSpeakerOn(_ speakerOn: Bool) {
Task(disposableBag: disposableBag) { [weak self] in
guard let self else {
return
}
await self.set(callSettings: callSettings)
await self.enqueueCallSettings {
$0.withUpdatedSpeakerState(speakerOn)
}
log.debug(
"AudioSession delegated updated call settings: \(callSettings)",
"AudioSession delegated updated speakerOn:\(speakerOn).",
subsystems: .audioSession
)
}
Expand Down
Loading
Loading