diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift index 3b3074a98..073d37cb1 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift @@ -33,6 +33,8 @@ public protocol AudioSessionProtocol: AnyObject { /// A Boolean value indicating whether audio is enabled for the session. var isAudioEnabled: Bool { get set } + var hasEarpiece: Bool { get } + /// Adds a delegate to receive updates about audio session events. /// - Parameter delegate: The delegate conforming to `RTCAudioSessionDelegate`. func add(_ delegate: RTCAudioSessionDelegate) diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/CallSettings+RTCAudioSessionConfiguration.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/CallSettings+RTCAudioSessionConfiguration.swift new file mode 100644 index 000000000..3151ab369 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/CallSettings+RTCAudioSessionConfiguration.swift @@ -0,0 +1,49 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +import StreamWebRTC + +extension CallSettings { + + var audioSessionConfiguration: RTCAudioSessionConfiguration { + let category: AVAudioSession.Category = audioOn == true + || speakerOn == true + || videoOn == true + ? .playAndRecord + : .playback + + let mode: AVAudioSession.Mode = category == .playAndRecord + ? speakerOn == true ? .videoChat : .voiceChat + : .default + + let categoryOptions: AVAudioSession.CategoryOptions = category == .playAndRecord + ? .playAndRecord + : .playback + + let result = RTCAudioSessionConfiguration.webRTC() + result.category = category.rawValue + result.mode = mode.rawValue + result.categoryOptions = categoryOptions + + return result + } +} + +extension RTCAudioSessionConfiguration: @unchecked Sendable { + + override open var description: String { + [ + "RTCAudioSessionConfiguration", + "(", + [ + "category:\(AVAudioSession.Category(rawValue: category))", + "mode:\(AVAudioSession.Mode(rawValue: mode))", + "categoryOptions:\(categoryOptions)" + ].joined(separator: ", "), + ")" + ].joined() + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift index 57c25bec3..e96587d26 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift @@ -4,33 +4,6 @@ import StreamWebRTC -extension RTCAudioSessionConfiguration { - /// Provides a default configuration for `RTCAudioSessionConfiguration` - /// tailored for WebRTC audio sessions, setting it to be suitable for - /// both playback and recording. - static let `default`: RTCAudioSessionConfiguration = { - // Creates a new WebRTC-specific audio session configuration instance. - let configuration = RTCAudioSessionConfiguration.webRTC() - - // Sets the audio mode to the default system mode. This typically - // configures the session to use system default settings for - // playback and recording. - configuration.mode = AVAudioSession.Mode.videoChat.rawValue - - // Sets the audio category to `.playAndRecord`, enabling the session - // to handle both audio playback and recording simultaneously. - // This category is commonly used in applications that require - // two-way audio, like video calls. - configuration.category = AVAudioSession.Category.playAndRecord.rawValue - - configuration.categoryOptions = .playAndRecord - - // Returns the fully configured default WebRTC audio session - // configuration. - return configuration - }() -} - extension AVAudioSession.CategoryOptions { static var playAndRecord: AVAudioSession.CategoryOptions = [ diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift index c852fcaf7..a9a5a2d24 100644 --- a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift +++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift @@ -18,9 +18,10 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche /// that manages WebRTC audio settings. private let audioSession: AudioSessionProtocol private let serialQueue = SerialActorQueue() + private var hasBeenConfigured = false /// The current active call settings, or `nil` if no active call is in session. - @Atomic private(set) var activeCallSettings: CallSettings? + @Atomic private(set) var activeCallSettings: CallSettings private let canRecordSubject = PassthroughSubject() var canRecordPublisher: AnyPublisher { canRecordSubject.eraseToAnyPublisher() } @@ -34,8 +35,12 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche /// for WebRTC.w /// - Parameter audioSession: An `AudioSessionProtocol` instance. Defaults /// to `StreamRTCAudioSession`. - required init(_ audioSession: AudioSessionProtocol = StreamRTCAudioSession()) { + required init( + _ audioSession: AudioSessionProtocol = StreamRTCAudioSession(), + callSettings: CallSettings + ) { self.audioSession = audioSession + activeCallSettings = callSettings super.init() /// Update the active call's `audioSession` to make available to other components. @@ -44,24 +49,9 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche audioSession.add(self) audioSession.useManualAudio = true audioSession.isAudioEnabled = true - - let configuration = RTCAudioSessionConfiguration.default - serialQueue.async { - await audioSession.updateConfiguration( - functionName: #function, - file: #fileID, - line: #line - ) { - try $0.setConfiguration(configuration) - log.debug( - "AudioSession updated \(configuration)", - subsystems: .audioSession - ) - } - } } - func dismantle() { + nonisolated func dismantle() { if StreamActiveCallAudioSessionKey.currentValue === self { // Reset activeCall audioSession. StreamActiveCallAudioSessionKey.currentValue = nil @@ -77,19 +67,19 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche ) { let oldValue = activeCallSettings activeCallSettings = settings - didUpdate(settings, oldValue: activeCallSettings) + didUpdate(settings, oldValue: oldValue) } func prepareForRecording() { - guard let activeCallSettings, !activeCallSettings.audioOn else { + guard !activeCallSettings.audioOn else { return } let settings = activeCallSettings .withUpdatedAudioState(true) let oldValue = activeCallSettings - self.activeCallSettings = settings - didUpdate(settings, oldValue: activeCallSettings) + activeCallSettings = settings + didUpdate(settings, oldValue: oldValue) } func requestRecordPermission() async -> Bool { @@ -127,10 +117,6 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche subsystems: .audioSession ) - guard let activeCallSettings else { - return - } - guard session.hasEarpiece else { if activeCallSettings.speakerOn != session.currentRoute.isSpeaker { delegate?.audioSessionAdapterDidUpdateCallSettings( @@ -162,8 +148,28 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche // MARK: - Private helpers + private func configureAudioSession(settings: CallSettings) async { + let configuration = settings.audioSessionConfiguration + await audioSession.updateConfiguration( + functionName: #function, + file: #fileID, + line: #line + ) { [weak self] in + guard let self else { + return + } + + try $0.setConfiguration(configuration) + hasBeenConfigured = true + log.debug( + "AudioSession was configured with \(configuration)", + subsystems: .audioSession + ) + } + } + private func didUpdate( - _ callSettings: CallSettings?, + _ callSettings: CallSettings, oldValue: CallSettings?, file: StaticString = #file, functionName: StaticString = #function, @@ -174,7 +180,12 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche return } - if callSettings?.audioOn == false, oldValue?.audioOn == true { + guard hasBeenConfigured else { + await configureAudioSession(settings: callSettings) + return + } + + if callSettings.audioOn == false, oldValue?.audioOn == true { log.debug( "Will defer execution until recording has stopped.", subsystems: .audioSession, @@ -185,13 +196,14 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche await deferExecutionUntilRecordingIsStopped() } - let category: AVAudioSession.Category = callSettings?.audioOn == true || callSettings? - .speakerOn == true || callSettings?.videoOn == true + let category: AVAudioSession.Category = callSettings.audioOn + || callSettings.speakerOn + || callSettings.videoOn ? .playAndRecord : .playback let mode: AVAudioSession.Mode = category == .playAndRecord - ? callSettings?.speakerOn == true ? .videoChat : .voiceChat + ? callSettings.speakerOn == true ? .videoChat : .voiceChat : .default let categoryOptions: AVAudioSession.CategoryOptions = category == .playAndRecord @@ -199,7 +211,7 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche : .playback let overridePort: AVAudioSession.PortOverride? = category == .playAndRecord - ? callSettings?.speakerOn == true ? .speaker : AVAudioSession.PortOverride.none + ? callSettings.speakerOn == true ? .speaker : AVAudioSession.PortOverride.none : nil await audioSession.updateConfiguration( @@ -234,7 +246,7 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche } log.debug( - "AudioSession updated with callSettings: \(callSettings?.description ?? "nil")", + "AudioSession updated with callSettings: \(callSettings.description)", subsystems: .audioSession, functionName: functionName, fileName: file, diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift index 4d55fe310..9d04641bb 100644 --- a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift +++ b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift @@ -46,6 +46,8 @@ final class StreamRTCAudioSession: AudioSessionProtocol { /// an external output, like Bluetooth or headphones. var isUsingExternalOutput: Bool { currentRoute.isExternal } + var hasEarpiece: Bool { source.hasEarpiece } + /// A Boolean value indicating whether the audio session uses manual /// audio routing. var useManualAudio: Bool { diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift index 0d7c93d21..dbd62a988 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift @@ -370,12 +370,6 @@ extension WebRTCCoordinator.StateMachine.Stage { return } - context - .coordinator? - .stateAdapter - .audioSession - .didUpdateCallSettings(callSettings) - try await publisher.didUpdateCallSettings(callSettings) log.debug("Publisher and AudioSession callSettings updated.", subsystems: .webRTC) } catch { diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift index 9549bc9c8..33b62a721 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift @@ -42,7 +42,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { let peerConnectionFactory: PeerConnectionFactory let videoCaptureSessionProvider: VideoCaptureSessionProvider let screenShareSessionProvider: ScreenShareSessionProvider - let audioSession: StreamAudioSessionAdapter = .init() + let audioSession: StreamAudioSessionAdapter = .init(callSettings: .init()) /// Published properties that represent different parts of the WebRTC state. @Published private(set) var sessionID: String = UUID().uuidString @@ -119,6 +119,12 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { self.screenShareSessionProvider = screenShareSessionProvider audioSession.delegate = self + Task { + await $callSettings + .removeDuplicates() + .sink { [weak audioSession] in audioSession?.didUpdateCallSettings($0) } + .store(in: disposableBag) + } } deinit { diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index 5cef082ad..61e6504b7 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -205,6 +205,7 @@ 4049CE842BBBF8EF003D07D2 /* StreamAsyncImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4049CE832BBBF8EF003D07D2 /* StreamAsyncImage.swift */; }; 404A5CFB2AD5648100EF1C62 /* DemoChatModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404A5CFA2AD5648100EF1C62 /* DemoChatModifier.swift */; }; 404AD5202D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404AD51F2D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift */; }; + 404AD5222D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404AD5212D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift */; }; 404C27CB2BF2552800DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; }; 404C27CC2BF2552900DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; }; 404CAEE72B8F48F6007087BC /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; }; @@ -1669,6 +1670,7 @@ 4049CE832BBBF8EF003D07D2 /* StreamAsyncImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAsyncImage.swift; sourceTree = ""; }; 404A5CFA2AD5648100EF1C62 /* DemoChatModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatModifier.swift; sourceTree = ""; }; 404AD51F2D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+HasEarpiece.swift"; sourceTree = ""; }; + 404AD5212D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CallSettings+RTCAudioSessionConfiguration.swift"; sourceTree = ""; }; 4059C3412AAF0CE40006928E /* DemoChatViewModel+Injection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "DemoChatViewModel+Injection.swift"; sourceTree = ""; }; 406128802CF32FEF007F5CDC /* SDPLineVisitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SDPLineVisitor.swift; sourceTree = ""; }; 406128822CF33000007F5CDC /* SDPParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SDPParser.swift; sourceTree = ""; }; @@ -3427,6 +3429,7 @@ 40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */, 40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */, 404AD51F2D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift */, + 404AD5212D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift */, ); path = Extensions; sourceTree = ""; @@ -7209,6 +7212,7 @@ 84DC38B829ADFCFD00946713 /* UpdateUserPermissionsResponse.swift in Sources */, 84DC38C029ADFCFD00946713 /* UserRequest.swift in Sources */, 84DC389629ADFCFD00946713 /* EndCallResponse.swift in Sources */, + 404AD5222D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift in Sources */, 847BE09C29DADE0100B55D21 /* Call.swift in Sources */, 848CCCEF2AB8ED8F002E83A2 /* ThumbnailsSettings.swift in Sources */, 40C4DF4D2C1C2CD80035DBC2 /* DefaultParticipantAutoLeavePolicy.swift in Sources */,