diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f57ea775..97f7cfe26 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). # Upcoming +### ✅ Added +- You can now configure the policy used by SDK's AudioSession. `DefaultAudioSessionPolicy` is meant to be used for active participants in a call (1:1, group calls) and `OwnCapabilitiesAudioSessionPolicy` was designed to be used from call participants who don't actively participate in the call, but they may do in the future (e.g. Livestream viewers, Twitter Space listener etc) + ### 🐞 Fixed - When a call is being created from another device than the one starting the call, if you don't provide any members, the SDK will get the information from the backend [#660](https://github.com/GetStream/stream-video-swift/pull/660) - The `OutgoingCallView` provided by the default `ViewFactory` implementation won't show the current user in the ringing member bubbles [#660](https://github.com/GetStream/stream-video-swift/pull/660) diff --git a/DemoApp/Sources/Components/AppEnvironment.swift b/DemoApp/Sources/Components/AppEnvironment.swift index 36bb3e385..5079d67f1 100644 --- a/DemoApp/Sources/Components/AppEnvironment.swift +++ b/DemoApp/Sources/Components/AppEnvironment.swift @@ -534,6 +534,35 @@ extension AppEnvironment { }() } +extension AppEnvironment { + + enum AudioSessionPolicyDebugConfiguration: Hashable, Debuggable, Sendable { + case `default`, ownCapabilities + + var title: String { + switch self { + case .default: + return "Default" + case .ownCapabilities: + return "OwnCapabilities" + } + } + + var value: AudioSessionPolicy { + switch self { + case .default: + return DefaultAudioSessionPolicy() + case .ownCapabilities: + return OwnCapabilitiesAudioSessionPolicy() + } + } + } + + static var audioSessionPolicy: AudioSessionPolicyDebugConfiguration = { + .default + }() +} + extension AppEnvironment { static var availableCallTypes: [String] = [ diff --git a/DemoApp/Sources/Views/CallView/CallingView/SimpleCallingView.swift b/DemoApp/Sources/Views/CallView/CallingView/SimpleCallingView.swift index 153fabcf9..654d81245 100644 --- a/DemoApp/Sources/Views/CallView/CallingView/SimpleCallingView.swift +++ b/DemoApp/Sources/Views/CallView/CallingView/SimpleCallingView.swift @@ -182,6 +182,11 @@ struct SimpleCallingView: View { ) } + private func setAudioSessionPolicyOverride(for callId: String) async throws { + let call = streamVideo.call(callType: callType, callId: callId) + try await call.updateAudioSessionPolicy(AppEnvironment.audioSessionPolicy.value) + } + private func parseURLIfRequired(_ text: String) { let adapter = DeeplinkAdapter() guard @@ -215,6 +220,7 @@ struct SimpleCallingView: View { switch action { case .lobby: await setPreferredVideoCodec(for: text) + try? await setAudioSessionPolicyOverride(for: text) viewModel.enterLobby( callType: callType, callId: text, @@ -222,9 +228,11 @@ struct SimpleCallingView: View { ) case .join: await setPreferredVideoCodec(for: text) + try? await setAudioSessionPolicyOverride(for: text) viewModel.joinCall(callType: callType, callId: text) case let .start(callId): await setPreferredVideoCodec(for: callId) + try? await setAudioSessionPolicyOverride(for: callId) viewModel.startCall( callType: callType, callId: callId, diff --git a/DemoApp/Sources/Views/Login/DebugMenu.swift b/DemoApp/Sources/Views/Login/DebugMenu.swift index 852198748..a808de345 100644 --- a/DemoApp/Sources/Views/Login/DebugMenu.swift +++ b/DemoApp/Sources/Views/Login/DebugMenu.swift @@ -118,6 +118,10 @@ struct DebugMenu: View { } } + @State private var audioSessionPolicy = AppEnvironment.audioSessionPolicy { + didSet { AppEnvironment.audioSessionPolicy = audioSessionPolicy } + } + var body: some View { Menu { makeMenu( @@ -177,6 +181,12 @@ struct DebugMenu: View { label: "ClosedCaptions Integration" ) { self.closedCaptionsIntegration = $0 } + makeMenu( + for: [.default, .ownCapabilities], + currentValue: audioSessionPolicy, + label: "AudioSession policy" + ) { self.audioSessionPolicy = $0 } + makeMenu( for: [.default, .lastParticipant], currentValue: autoLeavePolicy, diff --git a/Sources/StreamVideo/Call.swift b/Sources/StreamVideo/Call.swift index df00f89ed..45b56d833 100644 --- a/Sources/StreamVideo/Call.swift +++ b/Sources/StreamVideo/Call.swift @@ -1335,6 +1335,17 @@ public class Call: @unchecked Sendable, WSEventsSubscriber { } } + // MARK: - AudioSession + + /// Updates the current audio session policy for the call. + /// + /// - Parameter policy: A conforming `AudioSessionPolicy` that defines + /// the audio session configuration to be applied. + /// - Throws: An error if the update fails. + public func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async throws { + try await callController.updateAudioSessionPolicy(policy) + } + // MARK: - Internal internal func update(reconnectionStatus: ReconnectionStatus) { diff --git a/Sources/StreamVideo/Controllers/CallController.swift b/Sources/StreamVideo/Controllers/CallController.swift index 6d9f16e51..a5c3e426d 100644 --- a/Sources/StreamVideo/Controllers/CallController.swift +++ b/Sources/StreamVideo/Controllers/CallController.swift @@ -468,6 +468,10 @@ class CallController: @unchecked Sendable { ) } + func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async throws { + try await webRTCCoordinator.updateAudioSessionPolicy(policy) + } + // MARK: - private private func handleParticipantsUpdated() { diff --git a/Sources/StreamVideo/Models/CallSettings.swift b/Sources/StreamVideo/Models/CallSettings.swift index 1bd252b1c..6211f3dca 100644 --- a/Sources/StreamVideo/Models/CallSettings.swift +++ b/Sources/StreamVideo/Models/CallSettings.swift @@ -5,7 +5,7 @@ import Combine /// Represents the settings for a call. -public final class CallSettings: ObservableObject, Sendable, Equatable, CustomStringConvertible { +public final class CallSettings: ObservableObject, Sendable, Equatable, ReflectiveStringConvertible { /// Whether the audio is on for the current user. public let audioOn: Bool /// Whether the video is on for the current user. @@ -46,17 +46,6 @@ public final class CallSettings: ObservableObject, Sendable, Equatable, CustomSt public var shouldPublish: Bool { audioOn || videoOn } - - public var description: String { - """ - CallSettings - - audioOn: \(audioOn) - - videoOn: \(videoOn) - - speakerOn: \(speakerOn) - - audioOutputOn: \(audioOutputOn) - - cameraPosition: \(cameraPosition == .front ? "front" : "back") - """ - } } /// The camera position. diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift index e8500ec95..00a24e8ba 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift @@ -12,7 +12,7 @@ import StreamWebRTC /// publishing the average power of the audio signal. Additionally, it adjusts its behavior based on the /// presence of an active call, automatically stopping recording if needed. open class StreamCallAudioRecorder: @unchecked Sendable { - private struct StartRecordingRequest: Hashable { var hasActiveCall, ignoreActiveCall, isRecording: Bool } + private let processingQueue = SerialActorQueue() @Injected(\.activeCallProvider) private var activeCallProvider @Injected(\.activeCallAudioSession) private var activeCallAudioSession @@ -20,6 +20,11 @@ open class StreamCallAudioRecorder: @unchecked Sendable { /// The builder used to create the AVAudioRecorder instance. let audioRecorderBuilder: AVAudioRecorderBuilder + private let _isRecordingSubject: CurrentValueSubject = .init(false) + var isRecordingPublisher: AnyPublisher { + _isRecordingSubject.eraseToAnyPublisher() + } + /// A private task responsible for setting up the recorder in the background. private var setUpTask: Task? @@ -34,7 +39,12 @@ open class StreamCallAudioRecorder: @unchecked Sendable { /// A public publisher that exposes the average power of the audio signal. open private(set) lazy var metersPublisher: AnyPublisher = _metersPublisher.eraseToAnyPublisher() - @Atomic private var isRecording: Bool = false + @Atomic private(set) var isRecording: Bool = false { + willSet { + activeCallAudioSession?.isRecording = newValue + _isRecordingSubject.send(newValue) + } + } /// Indicates whether an active call is present, influencing recording behaviour. private var hasActiveCall: Bool = false { @@ -47,7 +57,7 @@ open class StreamCallAudioRecorder: @unchecked Sendable { } } - private var lastStartRecordingRequest: StartRecordingRequest? + private let disposableBag = DisposableBag() /// Initializes the recorder with a filename. /// @@ -84,76 +94,94 @@ open class StreamCallAudioRecorder: @unchecked Sendable { /// - ignoreActiveCall: Instructs the internal AudioRecorder to ignore the existence of an activeCall /// and start recording anyway. open func startRecording(ignoreActiveCall: Bool = false) async { - do { - let audioRecorder = try await setUpAudioCaptureIfRequired() - let startRecordingRequest = StartRecordingRequest( - hasActiveCall: hasActiveCall, - ignoreActiveCall: ignoreActiveCall, - isRecording: isRecording - ) - - guard startRecordingRequest != lastStartRecordingRequest else { - lastStartRecordingRequest = startRecordingRequest + await performOperation { [weak self] in + guard + let self, + !isRecording + else { return } - lastStartRecordingRequest = startRecordingRequest + var audioRecorder: AVAudioRecorder? + do { + audioRecorder = try await setUpAudioCaptureIfRequired() + } catch { + log.error("🎙️Failed to set up recording session", error: error) + } + guard - startRecordingRequest.hasActiveCall || startRecordingRequest.ignoreActiveCall, - !startRecordingRequest.isRecording + let audioRecorder, + hasActiveCall || ignoreActiveCall else { - log.debug( - """ - 🎙️Attempted to start recording but failed - hasActiveCall: \(startRecordingRequest.hasActiveCall) - ignoreActiveCall: \(startRecordingRequest.ignoreActiveCall) - isRecording: \(startRecordingRequest.isRecording) - """ - ) - return + return // No-op } + + await deferSessionActivation() audioRecorder.record() isRecording = true audioRecorder.isMeteringEnabled = true - log.debug("️🎙️Recording started.") - updateMetersTimerCancellable = Foundation.Timer + updateMetersTimerCancellable?.cancel() + disposableBag.remove("update-meters") + updateMetersTimerCancellable = Foundation + .Timer .publish(every: 0.1, on: .main, in: .default) .autoconnect() - .sink { [weak self, audioRecorder] _ in - Task { [weak self, audioRecorder] in - guard let self else { return } - audioRecorder.updateMeters() - self._metersPublisher.send(audioRecorder.averagePower(forChannel: 0)) - } + .sinkTask(storeIn: disposableBag, identifier: "update-meters") { [weak self, audioRecorder] _ in + audioRecorder.updateMeters() + self?._metersPublisher.send(audioRecorder.averagePower(forChannel: 0)) } - } catch { - isRecording = false - log.error("🎙️Failed to set up recording session", error: error) + + log.debug("️🎙️Recording started.") } } /// Stops recording audio asynchronously. open func stopRecording() async { - updateMetersTimerCancellable?.cancel() - updateMetersTimerCancellable = nil + await performOperation { [weak self] in + self?.updateMetersTimerCancellable?.cancel() + self?.updateMetersTimerCancellable = nil + self?.disposableBag.remove("update-meters") - guard - isRecording, - let audioRecorder = await audioRecorderBuilder.result - else { - return - } + guard + let self, + isRecording, + let audioRecorder = await audioRecorderBuilder.result + else { + return + } - audioRecorder.stop() - lastStartRecordingRequest = nil - isRecording = false - removeRecodingFile() - log.debug("️🎙️Recording stopped.") + audioRecorder.stop() + + // Ensure that recorder has stopped recording. + _ = try? await audioRecorder + .publisher(for: \.isRecording) + .filter { $0 == false } + .nextValue(timeout: 0.5) + + isRecording = false + removeRecodingFile() + + log.debug("️🎙️Recording stopped.") + } } // MARK: - Private helpers + private func performOperation( + file: StaticString = #file, + line: UInt = #line, + _ operation: @Sendable @escaping () async -> Void + ) async { + do { + try await processingQueue.sync { + await operation() + } + } catch { + log.error(ClientError(with: error, file, line)) + } + } + private func setUp() { setUpTask?.cancel() setUpTask = Task { @@ -170,9 +198,7 @@ open class StreamCallAudioRecorder: @unchecked Sendable { .hasActiveCallPublisher .receive(on: DispatchQueue.global(qos: .utility)) .removeDuplicates() - .sink { [weak self] in - self?.hasActiveCall = $0 - } + .assign(to: \.hasActiveCall, onWeak: self) } private func setUpAudioCaptureIfRequired() async throws -> AVAudioRecorder { @@ -200,6 +226,16 @@ open class StreamCallAudioRecorder: @unchecked Sendable { log.debug("🎙️Cannot delete \(fileURL).\(error)") } } + + private func deferSessionActivation() async { + guard let activeCallAudioSession else { + return + } + _ = try? await activeCallAudioSession + .$category + .filter { $0 == .playAndRecord } + .nextValue(timeout: 1) + } } /// Provides the default value of the `StreamCallAudioRecorder` class. diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift new file mode 100644 index 000000000..f22c4910c --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift @@ -0,0 +1,27 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +/// Represents the audio session configuration. +public struct AudioSessionConfiguration: ReflectiveStringConvertible, + Equatable { + /// The audio session category. + var category: AVAudioSession.Category + /// The audio session mode. + var mode: AVAudioSession.Mode + /// The audio session options. + var options: AVAudioSession.CategoryOptions + /// The audio session port override. + var overrideOutputAudioPort: AVAudioSession.PortOverride? + + /// Compares two `AudioSessionConfiguration` instances for equality. + public static func == (lhs: Self, rhs: Self) -> Bool { + lhs.category == rhs.category && + lhs.mode == rhs.mode && + lhs.options.rawValue == rhs.options.rawValue && + lhs.overrideOutputAudioPort?.rawValue == + rhs.overrideOutputAudioPort?.rawValue + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift deleted file mode 100644 index a539de4ee..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/AudioSessionProtocol.swift +++ /dev/null @@ -1,107 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Foundation -import StreamWebRTC - -/// A protocol defining the interface for managing an audio session, -/// with properties and methods to control audio settings, activation, -/// and routing configurations. -public protocol AudioSessionProtocol: AnyObject { - - /// A Boolean value indicating whether the audio session is active. - var isActive: Bool { get } - - /// The current route description for the audio session. - var currentRoute: AVAudioSessionRouteDescription { get } - - /// The audio category of the session. - var category: String { get } - - /// A Boolean value indicating whether the audio session uses speaker output. - var isUsingSpeakerOutput: Bool { get } - - /// A Boolean value indicating whether the audio session uses an external - /// audio output, such as headphones or Bluetooth. - var isUsingExternalOutput: Bool { get } - - /// A Boolean value indicating whether the session uses manual audio routing. - var useManualAudio: Bool { get set } - - /// A Boolean value indicating whether audio is enabled for the session. - var isAudioEnabled: Bool { get set } - - /// Adds a delegate to receive updates about audio session events. - /// - Parameter delegate: The delegate conforming to `RTCAudioSessionDelegate`. - func add(_ delegate: RTCAudioSessionDelegate) - - /// Sets the audio mode of the session. - /// - Parameter mode: The audio mode to set, such as `.videoChat` or `.voiceChat`. - /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. - /// Prefer wrapping this method using `updateConfiguration`. - func setMode(_ mode: String) throws - - /// Configures the audio category and options for the session. - /// - Parameters: - /// - category: The audio category to set, like `.playAndRecord`. - /// - categoryOptions: Options for the audio category, such as - /// `.allowBluetooth` or `.defaultToSpeaker`. - /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. - /// Prefer wrapping this method using `updateConfiguration`. - func setCategory( - _ category: String, - with categoryOptions: AVAudioSession.CategoryOptions - ) throws - - /// Activates or deactivates the audio session. - /// - Parameter isActive: A Boolean indicating whether the session - /// should be activated. - /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. - /// Prefer wrapping this method using `updateConfiguration`. - func setActive(_ isActive: Bool) throws - - /// Sets the session configuration for WebRTC audio settings. - /// - Parameter configuration: The configuration to apply to the session. - /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. - /// Prefer wrapping this method using `updateConfiguration`. - func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws - - /// Overrides the current output audio port for the session. - /// - Parameter port: The port to use, such as `.speaker` or `.none`. - /// - Throws: An error if setting the mode fails, usually because the configuration hasn't been locked. - /// Prefer wrapping this method using `updateConfiguration`. - func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws - - /// Updates the audio session configuration by performing an asynchronous - /// operation. - /// - Parameters: - /// - functionName: The name of the calling function. - /// - file: The source file of the calling function. - /// - line: The line number of the calling function. - /// - block: The closure to execute, providing the audio session for - /// configuration updates. - func updateConfiguration( - functionName: StaticString, - file: StaticString, - line: UInt, - _ block: @escaping (AudioSessionProtocol) throws -> Void - ) - - /// Requests permission to record audio from the user. - /// - Returns: A Boolean indicating whether permission was granted. - func requestRecordPermission() async -> Bool -} - -extension AVAudioSession { - /// Asynchronously requests permission to record audio. - /// - Returns: A Boolean indicating whether permission was granted. - private func requestRecordPermission() async -> Bool { - await withCheckedContinuation { continuation in - self.requestRecordPermission { result in - continuation.resume(returning: result) - } - } - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+RequestRecordPermission.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+RequestRecordPermission.swift new file mode 100644 index 000000000..7214bb3b1 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+RequestRecordPermission.swift @@ -0,0 +1,18 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +extension AVAudioSession { + /// Asynchronously requests permission to record audio. + /// - Returns: A Boolean indicating whether permission was granted. + private func requestRecordPermission() async -> Bool { + await withCheckedContinuation { continuation in + self.requestRecordPermission { result in + continuation.resume(returning: result) + } + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift new file mode 100644 index 000000000..87156adf4 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift @@ -0,0 +1,19 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import StreamWebRTC + +extension AVAudioSession.CategoryOptions { + + /// Category options for play and record. + static var playAndRecord: AVAudioSession.CategoryOptions = [ + .allowBluetooth, + .allowBluetoothA2DP, + .allowAirPlay + ] + + /// Category options for playback. + static var playback: AVAudioSession.CategoryOptions = [] +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategory+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategory+Convenience.swift new file mode 100644 index 000000000..52deb75bf --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategory+Convenience.swift @@ -0,0 +1,14 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +// MARK: - AVAudioSession.Category + +extension AVAudioSession.Category: CustomStringConvertible { + /// Returns the raw string value of the category. + public var description: String { + rawValue + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+CategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift similarity index 100% rename from Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+CategoryOptions+Convenience.swift rename to Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift new file mode 100644 index 000000000..b92ef9b87 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift @@ -0,0 +1,14 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +// MARK: - AVAudioSession.Mode + +extension AVAudioSession.Mode: CustomStringConvertible { + /// Returns the raw string value of the mode. + public var description: String { + rawValue + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortDescription+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortDescription+Convenience.swift deleted file mode 100644 index 70446bf91..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortDescription+Convenience.swift +++ /dev/null @@ -1,11 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation - -extension AVAudioSessionPortDescription { - override public var description: String { - "" - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift new file mode 100644 index 000000000..61c29b07c --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift @@ -0,0 +1,19 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +extension AVAudioSession.PortOverride: CustomStringConvertible { + /// Returns a string representing the port override value. + public var description: String { + switch self { + case .none: + return "None" + case .speaker: + return "Speaker" + @unknown default: + return "Unknown" + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession_RouteChangeReason+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteChangeReason+Convenience.swift similarity index 100% rename from Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession_RouteChangeReason+Convenience.swift rename to Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteChangeReason+Convenience.swift diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift index 8a4901338..168bc8bfb 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionRouteDescription+Convenience.swift @@ -7,12 +7,15 @@ import AVFoundation extension AVAudioSessionRouteDescription { override open var description: String { - let inputNames = inputs.map(\.portName).joined(separator: ",") let inputTypes = inputs.map(\.portType.rawValue).joined(separator: ",") - - let outputNames = outputs.map(\.portName).joined(separator: ",") let outputTypes = outputs.map(\.portType.rawValue).joined(separator: ",") - return "AudioSessionRoute isExternal:\(isExternal) input:[name:\(inputNames) types:\(inputTypes)] output:[name:\(outputNames) types:\(outputTypes)]." + let wrapperKey = isExternal ? ".external" : ".builtIn" + return [ + wrapperKey, + "(", + ["inputs:\(inputTypes)", "outputs:\(outputTypes)"].joined(separator: ", "), + ")" + ].joined() } /// A set of port types that represent external audio outputs, such as diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift deleted file mode 100644 index cf2b52446..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/RTCAudioSessionConfiguration+Default.swift +++ /dev/null @@ -1,30 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import StreamWebRTC - -extension RTCAudioSessionConfiguration { - /// Provides a default configuration for `RTCAudioSessionConfiguration` - /// tailored for WebRTC audio sessions, setting it to be suitable for - /// both playback and recording. - static let `default`: RTCAudioSessionConfiguration = { - // Creates a new WebRTC-specific audio session configuration instance. - let configuration = RTCAudioSessionConfiguration.webRTC() - - // Sets the audio mode to the default system mode. This typically - // configures the session to use system default settings for - // playback and recording. - configuration.mode = AVAudioSession.Mode.default.rawValue - - // Sets the audio category to `.playAndRecord`, enabling the session - // to handle both audio playback and recording simultaneously. - // This category is commonly used in applications that require - // two-way audio, like video calls. - configuration.category = AVAudioSession.Category.playAndRecord.rawValue - - // Returns the fully configured default WebRTC audio session - // configuration. - return configuration - }() -} diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/AudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/AudioSessionPolicy.swift new file mode 100644 index 000000000..6ed8a7a1b --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/AudioSessionPolicy.swift @@ -0,0 +1,21 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Defines a policy for configuring the audio session. +public protocol AudioSessionPolicy: Sendable { + + /// Returns the audio session configuration for the given call settings + /// and own capabilities. + /// + /// - Parameters: + /// - callSettings: The current call settings. + /// - ownCapabilities: The set of the user's own audio capabilities. + /// - Returns: The audio session configuration. + func configuration( + for callSettings: CallSettings, + ownCapabilities: Set + ) -> AudioSessionConfiguration +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift new file mode 100644 index 000000000..b9bc89d93 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift @@ -0,0 +1,31 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +/// A default implementation of the `AudioSessionPolicy` protocol. +public struct DefaultAudioSessionPolicy: AudioSessionPolicy { + + /// Initializes a new `DefaultAudioSessionPolicy` instance. + public init() {} + + /// Returns the audio session configuration for the given call settings + /// and own capabilities. + /// + /// - Parameters: + /// - callSettings: The current call settings. + /// - ownCapabilities: The set of the user's own audio capabilities. + /// - Returns: The audio session configuration. + public func configuration( + for callSettings: CallSettings, + ownCapabilities: Set + ) -> AudioSessionConfiguration { + .init( + category: .playAndRecord, + mode: callSettings.speakerOn ? .videoChat : .voiceChat, + options: .playAndRecord, + overrideOutputAudioPort: callSettings.speakerOn ? .speaker : AVAudioSession.PortOverride.none + ) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift new file mode 100644 index 000000000..5fd19e332 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift @@ -0,0 +1,73 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +/// An audio session policy that considers the users's own capabilities. +/// By using this category, you can allow users that don't have the `sendAudio` capability (e.g. livestream +/// watchers) or their `CallSettings` doesn't require the `playAndRecord` category, to mute completely +/// the audio - while they remain in call - by using the device's physical buttons on `ControlCentre` +/// +/// - Note: This policy defaults to `playback` category if the user does +/// not have the `sendAudio` capability. If the user has the `sendAudio` +/// capability, then the policy switches between `playback` and `playAndRecord` +/// based on the following criteria: +/// - `CallSettings.audioOn == true` or `CallSettings.speakerOn == true and +// currentDevice has earpiece`: we use `playAndRecord` category. +/// - Otherwise we use `playback` category. +public struct OwnCapabilitiesAudioSessionPolicy: AudioSessionPolicy { + + private let currentDevice = CurrentDevice.currentValue + + /// Initializes a new `OwnCapabilitiesAudioSessionPolicy` instance. + public init() {} + + /// Returns the audio session configuration based on call settings and + /// user capabilities. + /// + /// - Parameters: + /// - callSettings: The current call settings. + /// - ownCapabilities: The set of the user's own capabilities. + /// - Returns: The audio session configuration. + public func configuration( + for callSettings: CallSettings, + ownCapabilities: Set + ) -> AudioSessionConfiguration { + guard ownCapabilities.contains(.sendAudio) else { + return .init( + category: .playback, + mode: .default, + options: .playback, + overrideOutputAudioPort: nil + ) + } + + let currentDeviceHasEarpiece = currentDevice.deviceType == .phone + + let category: AVAudioSession.Category = callSettings.audioOn + || (callSettings.speakerOn && currentDeviceHasEarpiece) + ? .playAndRecord + : .playback + + let mode: AVAudioSession.Mode = category == .playAndRecord + ? callSettings.speakerOn ? .videoChat : .voiceChat + : .default + + let categoryOptions: AVAudioSession.CategoryOptions = category == .playAndRecord + ? .playAndRecord + : .playback + + let overrideOutputAudioPort: AVAudioSession.PortOverride? = category == .playAndRecord + ? callSettings.speakerOn == true ? .speaker : AVAudioSession.PortOverride.none + : nil + + return .init( + category: category, + mode: mode, + options: categoryOptions, + overrideOutputAudioPort: overrideOutputAudioPort + ) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioSessionDelegatePublisher.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioSessionDelegatePublisher.swift new file mode 100644 index 000000000..4449470be --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioSessionDelegatePublisher.swift @@ -0,0 +1,207 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import StreamWebRTC + +/// Enumeration representing all the events published by the delegate. +enum AudioSessionEvent { + case didBeginInterruption(session: RTCAudioSession) + + case didEndInterruption(session: RTCAudioSession, shouldResumeSession: Bool) + + case didChangeRoute( + session: RTCAudioSession, + reason: AVAudioSession.RouteChangeReason, + previousRoute: AVAudioSessionRouteDescription + ) + + case mediaServerTerminated(session: RTCAudioSession) + + case mediaServerReset(session: RTCAudioSession) + + case didChangeCanPlayOrRecord( + session: RTCAudioSession, + canPlayOrRecord: Bool + ) + + case didStartPlayOrRecord(session: RTCAudioSession) + + case didStopPlayOrRecord(session: RTCAudioSession) + + case didChangeOutputVolume( + audioSession: RTCAudioSession, + outputVolume: Float + ) + + case didDetectPlayoutGlitch( + audioSession: RTCAudioSession, + totalNumberOfGlitches: Int64 + ) + + case willSetActive(audioSession: RTCAudioSession, active: Bool) + + case didSetActive(audioSession: RTCAudioSession, active: Bool) + + case failedToSetActive( + audioSession: RTCAudioSession, + active: Bool, + error: Error + ) + + case audioUnitStartFailedWithError( + audioSession: RTCAudioSession, + error: Error + ) +} + +// MARK: - Delegate Publisher Class + +/// A delegate that publishes all RTCAudioSessionDelegate events via a Combine PassthroughSubject. +@objc +final class RTCAudioSessionDelegatePublisher: NSObject, RTCAudioSessionDelegate { + + /// The subject used to publish delegate events. + private let subject = PassthroughSubject() + + /// A public publisher that subscribers can listen to. + var publisher: AnyPublisher { + subject.eraseToAnyPublisher() + } + + // MARK: - RTCAudioSessionDelegate Methods + + func audioSessionDidBeginInterruption(_ session: RTCAudioSession) { + subject.send(.didBeginInterruption(session: session)) + } + + func audioSessionDidEndInterruption( + _ session: RTCAudioSession, + shouldResumeSession: Bool + ) { + subject.send( + .didEndInterruption( + session: session, + shouldResumeSession: shouldResumeSession + ) + ) + } + + func audioSessionDidChangeRoute( + _ session: RTCAudioSession, + reason: AVAudioSession.RouteChangeReason, + previousRoute: AVAudioSessionRouteDescription + ) { + subject.send( + .didChangeRoute( + session: session, + reason: reason, + previousRoute: previousRoute + ) + ) + } + + func audioSessionMediaServerTerminated(_ session: RTCAudioSession) { + subject.send(.mediaServerTerminated(session: session)) + } + + func audioSessionMediaServerReset(_ session: RTCAudioSession) { + subject.send(.mediaServerReset(session: session)) + } + + func audioSession( + _ session: RTCAudioSession, + didChangeCanPlayOrRecord canPlayOrRecord: Bool + ) { + subject.send( + .didChangeCanPlayOrRecord( + session: session, + canPlayOrRecord: canPlayOrRecord + ) + ) + } + + func audioSessionDidStartPlayOrRecord(_ session: RTCAudioSession) { + subject.send(.didStartPlayOrRecord(session: session)) + } + + func audioSessionDidStopPlayOrRecord(_ session: RTCAudioSession) { + subject.send(.didStopPlayOrRecord(session: session)) + } + + func audioSession( + _ audioSession: RTCAudioSession, + didChangeOutputVolume outputVolume: Float + ) { + subject.send( + .didChangeOutputVolume( + audioSession: audioSession, + outputVolume: outputVolume + ) + ) + } + + func audioSession( + _ audioSession: RTCAudioSession, + didDetectPlayoutGlitch totalNumberOfGlitches: Int64 + ) { + subject.send( + .didDetectPlayoutGlitch( + audioSession: audioSession, + totalNumberOfGlitches: totalNumberOfGlitches + ) + ) + } + + func audioSession( + _ audioSession: RTCAudioSession, + willSetActive active: Bool + ) { + subject.send( + .willSetActive( + audioSession: audioSession, + active: active + ) + ) + } + + func audioSession( + _ audioSession: RTCAudioSession, + didSetActive active: Bool + ) { + subject.send( + .didSetActive( + audioSession: audioSession, + active: active + ) + ) + } + + func audioSession( + _ audioSession: RTCAudioSession, + failedToSetActive active: Bool, + error: Error + ) { + subject.send( + .failedToSetActive( + audioSession: audioSession, + active: active, + error: error + ) + ) + } + + func audioSession( + _ audioSession: RTCAudioSession, + audioUnitStartFailedWithError error: Error + ) { + subject.send( + .audioUnitStartFailedWithError( + audioSession: audioSession, + error: error + ) + ) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSession.swift new file mode 100644 index 000000000..9c88cd4af --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSession.swift @@ -0,0 +1,410 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation +import StreamWebRTC + +/// Manages the app’s audio session, handling activation, configuration, +/// and routing to output devices such as speakers and in-ear speakers. +final class StreamAudioSession: @unchecked Sendable, ObservableObject { + + /// The last applied audio session configuration. + private var lastUsedConfiguration: AudioSessionConfiguration? + + /// The current device as is being described by ``UIUserInterfaceIdiom``. + private let currentDevice = CurrentDevice.currentValue + + /// The WebRTC-compatible audio session. + private let audioSession: AudioSessionProtocol + + /// Serial execution queue for processing session updates. + private let processingQueue = SerialActorQueue() + + /// A disposable bag holding all observation cancellable. + private let disposableBag = DisposableBag() + + /// The time to wait for recording to be stopped before we attempt to set the category to `.playback` + private let deferExecutionDueToRecordingInterval: TimeInterval = 1 + + /// The current call settings, or `nil` if no active call exists. + @Atomic private(set) var activeCallSettings: CallSettings + + /// The set of the user's own audio capabilities. + @Atomic private(set) var ownCapabilities: Set + + /// The policy defining audio session behavior. + @Atomic private(set) var policy: AudioSessionPolicy + + /// Published property to track the audio session category. + @Published private(set) var category: AVAudioSession.Category + + /// Delegate for handling audio session events. + weak var delegate: StreamAudioSessionAdapterDelegate? + + // MARK: - AudioSession State + + /// Indicates whether the session is recording. + @Published var isRecording: Bool = false + + /// Checks if the audio session is currently active. + var isActive: Bool { audioSession.isActive } + + /// Retrieves the current audio route description. + var currentRoute: AVAudioSessionRouteDescription { audioSession.currentRoute } + + /// Initializes a new `StreamAudioSessionAdapter` instance, configuring + /// the session with default settings and enabling manual audio control + /// for WebRTC. + /// + /// - Parameter callSettings: The settings for the current call. + /// - Parameter ownCapabilities: The set of the user's own audio + /// capabilities. + /// - Parameter policy: The policy defining audio session behavior. + /// - Parameter audioSession: An `AudioSessionProtocol` instance. Defaults + /// to `StreamRTCAudioSession`. + required init( + callSettings: CallSettings = .init(), + ownCapabilities: Set = [], + policy: AudioSessionPolicy = DefaultAudioSessionPolicy(), + audioSession: AudioSessionProtocol = StreamRTCAudioSession() + ) { + activeCallSettings = callSettings + self.ownCapabilities = ownCapabilities + self.policy = policy + self.audioSession = audioSession + category = audioSession.category + + /// Update the active call's `audioSession` to make available to + /// other components. + Self.currentValue = self + + var audioSession = self.audioSession + audioSession.useManualAudio = true + audioSession.isAudioEnabled = true + + audioSession + .eventPublisher + .compactMap { + guard case let .didChangeRoute(session, reason, previousRoute) = $0 else { + return nil + } + return (session, reason, previousRoute) + } + .filter { $0.0.isActive } + .log(.debug, subsystems: .audioSession) { [weak self] session, reason, previousRoute in + """ + AudioSession didChangeRoute reason:\(reason) + - isActive: \(session.isActive) + - isRecording: \(self?.isRecording.description ?? "-") + - category: \(AVAudioSession.Category(rawValue: session.category)) + - mode: \(AVAudioSession.Mode(rawValue: session.mode)) + - categoryOptions: \(session.categoryOptions) + - currentRoute:\(session.currentRoute) + - previousRoute:\(previousRoute) + """ + } + .sink { [weak self] in + self?.audioSessionDidChangeRoute( + $0, + reason: $1, + previousRoute: $2 + ) + } + .store(in: disposableBag) + + if let streamAudioSession = audioSession as? StreamRTCAudioSession { + streamAudioSession + .$state + .map(\.category) + .assign(to: \.category, onWeak: self) + .store(in: disposableBag) + } + } + + /// Removes all observers and resets the active audio session. + nonisolated func dismantle() { + disposableBag.removeAll() + if Self.currentValue === self { + // Reset activeCall audioSession. + Self.currentValue = nil + } + } + + // MARK: - OwnCapabilities + + /// Updates the audio session with new call settings. + /// + /// - Parameter ownCapabilities: The new set of `OwnCapability` to apply. + func didUpdateOwnCapabilities( + _ ownCapabilities: Set + ) async throws { + self.ownCapabilities = ownCapabilities + try await didUpdate( + callSettings: activeCallSettings, + ownCapabilities: ownCapabilities + ) + } + + // MARK: - CallSettings + + /// Updates the audio session with new call settings. + /// + /// - Parameter settings: The new `CallSettings` to apply. + func didUpdateCallSettings( + _ settings: CallSettings + ) async throws { + activeCallSettings = settings + try await didUpdate( + callSettings: settings, + ownCapabilities: ownCapabilities + ) + } + + // MARK: - Policy + + /// Updates the audio session with a new policy. + /// + /// - Parameter policy: The new `AudioSessionPolicy` to apply. + func didUpdatePolicy( + _ policy: AudioSessionPolicy + ) async throws { + self.policy = policy + try await didUpdate( + callSettings: activeCallSettings, + ownCapabilities: ownCapabilities + ) + } + + // MARK: - Recording + + /// Prepares the audio session for recording. + func prepareForRecording() async throws { + guard !activeCallSettings.audioOn else { + return + } + + activeCallSettings = activeCallSettings.withUpdatedAudioState(true) + try await didUpdate( + callSettings: activeCallSettings, + ownCapabilities: ownCapabilities + ) + log.debug( + "AudioSession completed preparation for recording.", + subsystems: .audioSession + ) + } + + /// Requests the record permission from the user. + func requestRecordPermission() async -> Bool { + guard !isRecording else { + return isRecording + } + let result = await audioSession.requestRecordPermission() + log.debug( + "AudioSession completed request for recording permission.", + subsystems: .audioSession + ) + return result + } + + // MARK: - Private helpers + + /// Handles audio route changes, updating the session based on the reason + /// for the change. + /// + /// For cases like `.newDeviceAvailable`, `.override`, + /// `.noSuitableRouteForCategory`, `.routeConfigurationChange`, `.default`, + /// or `.unknown`, the route change is accepted, and the `CallSettings` + /// are updated accordingly, triggering a delegate update. + /// + /// For other cases, the route change is ignored, enforcing the existing + /// `CallSettings`. + /// + /// - Parameters: + /// - session: The `RTCAudioSession` instance. + /// - reason: The reason for the route change. + /// - previousRoute: The previous audio route configuration. + private func audioSessionDidChangeRoute( + _ session: RTCAudioSession, + reason: AVAudioSession.RouteChangeReason, + previousRoute: AVAudioSessionRouteDescription + ) { + guard session.isActive else { + return + } + + guard session.category == category.rawValue else { + log.warning( + """ + AudioSession category mismatch between AVAudioSession & SDK: + - AVAudioSession.category: \(AVAudioSession.Category(rawValue: session.category)) + - SDK: \(category) + """, + subsystems: .audioSession + ) + return + } + + guard currentDevice.deviceType == .phone else { + if activeCallSettings.speakerOn != session.currentRoute.isSpeaker { + log.warning( + """ + AudioSession didChangeRoute with speakerOn:\(session.currentRoute.isSpeaker) + while CallSettings have speakerOn:\(activeCallSettings.speakerOn). + We will update CallSettings to match the AudioSession's + current configuration + """, + subsystems: .audioSession + ) + delegate?.audioSessionAdapterDidUpdateCallSettings( + self, + callSettings: activeCallSettings + .withUpdatedSpeakerState(session.currentRoute.isSpeaker) + ) + } + return + } + + switch (activeCallSettings.speakerOn, session.currentRoute.isSpeaker) { + case (true, false): + delegate?.audioSessionAdapterDidUpdateCallSettings( + self, + callSettings: activeCallSettings.withUpdatedSpeakerState(false) + ) + + case (false, true) where session.category == AVAudioSession.Category.playAndRecord.rawValue: + delegate?.audioSessionAdapterDidUpdateCallSettings( + self, + callSettings: activeCallSettings.withUpdatedSpeakerState(true) + ) + + default: + break + } + } + + /// Updates the audio session configuration based on the provided call + /// settings and own capabilities. + /// + /// - Parameters: + /// - callSettings: The current call settings. + /// - ownCapabilities: The set of the user's own audio capabilities. + /// - file: The file where this method is called. + /// - functionName: The name of the function where this method is called. + /// - line: The line number where this method is called. + private func didUpdate( + callSettings: CallSettings, + ownCapabilities: Set, + file: StaticString = #file, + functionName: StaticString = #function, + line: UInt = #line + ) async throws { + try await processingQueue.sync { [weak self] in + guard let self else { + return + } + + let configuration = policy.configuration( + for: callSettings, + ownCapabilities: ownCapabilities + ) + + guard configuration != lastUsedConfiguration else { + return + } + + log.debug( + """ + Will configure AudioSession with + - configuration: \(configuration) + - policy: \(type(of: policy)) + - settings: \(callSettings) + - ownCapabilities:\(ownCapabilities) + """, + subsystems: .audioSession, + functionName: functionName, + fileName: file, + lineNumber: line + ) + + if configuration.category == .playback, isRecording { + log.debug( + "AudioSession is currently recording. Defer execution until recording has stopped.", + subsystems: .audioSession, + functionName: functionName, + fileName: file, + lineNumber: line + ) + await deferExecutionUntilRecordingIsStopped() + } + + if + configuration.overrideOutputAudioPort == nil, + audioSession.category == AVAudioSession.Category.playAndRecord + { + try await audioSession.overrideOutputAudioPort(.none) + } + + do { + try await audioSession.setCategory( + configuration.category, + mode: configuration.mode, + with: configuration.options + ) + } catch { + log.error( + "Failed while setting AudioSession category:\(configuration.category) mode:\(configuration.mode) options:\(configuration.options)", + subsystems: .audioSession, + error: error, + functionName: functionName, + fileName: file, + lineNumber: line + ) + } + + if let overrideOutputAudioPort = configuration.overrideOutputAudioPort { + try await audioSession.overrideOutputAudioPort(overrideOutputAudioPort) + } + + lastUsedConfiguration = configuration + } + } + + /// Defers execution until recording is stopped. + private func deferExecutionUntilRecordingIsStopped() async { + do { + _ = try await $isRecording + .filter { $0 == false } + .nextValue(timeout: deferExecutionDueToRecordingInterval) + try await Task.sleep(nanoseconds: 250 * 1_000_000) + } catch { + log.error( + "Defer execution until recording has stopped failed.", + subsystems: .audioSession, + error: error + ) + } + } +} + +/// A key for dependency injection of an `AudioSessionProtocol` instance +/// that represents the active call audio session. +extension StreamAudioSession: InjectionKey { + static var currentValue: StreamAudioSession? +} + +extension InjectedValues { + /// The active call's audio session. The value is being set on + /// `StreamAudioSession` `init` / `deinit` + var activeCallAudioSession: StreamAudioSession? { + get { + Self[StreamAudioSession.self] + } + set { + Self[StreamAudioSession.self] = newValue + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift deleted file mode 100644 index e1a31be35..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapter.swift +++ /dev/null @@ -1,278 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Combine -import Foundation -import StreamWebRTC - -/// The `StreamAudioSessionAdapter` class manages the device's audio session -/// for an app, enabling control over activation, configuration, and routing -/// to output devices like speakers and in-ear speakers. -final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unchecked Sendable { - - /// An enum defining actions to update speaker routing based on call settings. - private enum SpeakerAction { - case routeUpdate(CallSettings) - case respectCallSettings(CallSettings) - } - - /// The shared audio session instance conforming to `AudioSessionProtocol` - /// that manages WebRTC audio settings. - private let audioSession: AudioSessionProtocol - - /// The current active call settings, or `nil` if no active call is in session. - @Atomic private(set) var activeCallSettings: CallSettings? - - /// The delegate for receiving audio session events, such as call settings - /// updates. - weak var delegate: StreamAudioSessionAdapterDelegate? - - /// Initializes a new `StreamAudioSessionAdapter` instance, configuring - /// the session with default settings and enabling manual audio control - /// for WebRTC. - /// - Parameter audioSession: An `AudioSessionProtocol` instance. Defaults - /// to `StreamRTCAudioSession`. - required init(_ audioSession: AudioSessionProtocol = StreamRTCAudioSession()) { - self.audioSession = audioSession - super.init() - - /// Update the active call's `audioSession` to make available to other components. - StreamActiveCallAudioSessionKey.currentValue = audioSession - - audioSession.add(self) - audioSession.useManualAudio = true - audioSession.isAudioEnabled = true - - let configuration = RTCAudioSessionConfiguration.default - audioSession.updateConfiguration( - functionName: #function, - file: #fileID, - line: #line - ) { - try $0.setConfiguration(.default) - log.debug( - "AudioSession updated configuration with category: \(configuration.category) options: \(configuration.categoryOptions) mode: \(configuration.mode)", - subsystems: .audioSession - ) - } - } - - deinit { - if StreamActiveCallAudioSessionKey.currentValue === audioSession { - // Reset activeCall audioSession. - StreamActiveCallAudioSessionKey.currentValue = nil - } - } - - // MARK: - CallSettings - - /// Updates the audio session with new call settings. - /// - Parameter settings: The new `CallSettings` to apply. - func didUpdateCallSettings( - _ settings: CallSettings - ) { - guard settings != activeCallSettings else { return } - - performSessionAction(settings.audioOutputOn) - performSpeakerUpdateAction(.respectCallSettings(settings)) - activeCallSettings = settings - - log.debug( - "AudioSession updated isActive:\(settings.audioOutputOn) speakerOn:\(settings.speakerOn).", - subsystems: .audioSession - ) - } - - // MARK: - RTCAudioSessionDelegate - - /// Handles audio route changes, updating the session based on the reason - /// for the change. - /// - /// For cases like `.newDeviceAvailable`, `.override`, - /// `.noSuitableRouteForCategory`, `.routeConfigurationChange`, `.default`, - /// or `.unknown`, the route change is accepted, and the `CallSettings` - /// are updated accordingly, triggering a delegate update. - /// - /// For other cases, the route change is ignored, enforcing the existing - /// `CallSettings`. - /// - /// - Parameters: - /// - session: The `RTCAudioSession` instance. - /// - reason: The reason for the route change. - /// - previousRoute: The previous audio route configuration. - func audioSessionDidChangeRoute( - _ session: RTCAudioSession, - reason: AVAudioSession.RouteChangeReason, - previousRoute: AVAudioSessionRouteDescription - ) { - guard let activeCallSettings else { - return - } - - switch reason { - case .unknown: - performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) - case .newDeviceAvailable: - performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) - case .oldDeviceUnavailable: - performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) - case .categoryChange: - performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) - case .override: - performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) - case .wakeFromSleep: - performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) - case .noSuitableRouteForCategory: - performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) - case .routeConfigurationChange: - performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) - @unknown default: - performSpeakerUpdateAction(.routeUpdate(activeCallSettings)) - } - } - - /// Logs the status when the session can play or record. - /// - Parameters: - /// - session: The `RTCAudioSession` instance. - /// - canPlayOrRecord: A Boolean indicating whether play or record - /// capabilities are available. - func audioSession( - _ session: RTCAudioSession, - didChangeCanPlayOrRecord canPlayOrRecord: Bool - ) { - log.info( - "AudioSession can playOrRecord:\(canPlayOrRecord).", - subsystems: .audioSession - ) - } - - /// Logs when the session stops playing or recording. - /// - Parameter session: The `RTCAudioSession` instance. - func audioSessionDidStopPlayOrRecord( - _ session: RTCAudioSession - ) { log.info("AudioSession cannot playOrRecord.", subsystems: .audioSession) } - - /// Configures the session's active state when it changes. - /// - Parameters: - /// - audioSession: The `RTCAudioSession` instance. - /// - active: A Boolean indicating the desired active state. - func audioSession( - _ audioSession: RTCAudioSession, - didSetActive active: Bool - ) { - guard let activeCallSettings else { return } - performSessionAction(active) - performSpeakerUpdateAction(.respectCallSettings(activeCallSettings)) - } - - /// Logs and manages failure when setting the active state. - /// - Parameters: - /// - audioSession: The `RTCAudioSession` instance. - /// - active: The desired active state. - /// - error: The error encountered during the state change. - func audioSession( - _ audioSession: RTCAudioSession, - failedToSetActive active: Bool, - error: any Error - ) { - log.error( - "AudioSession failedToSetActive active:\(active)", - subsystems: .audioSession, - error: error - ) - performSessionAction(false) - } - - /// Handles failure in starting audio unit playback or recording. - /// - Parameters: - /// - audioSession: The `RTCAudioSession` instance. - /// - error: The error encountered during startup. - func audioSession( - _ audioSession: RTCAudioSession, - audioUnitStartFailedWithError error: any Error - ) { - log.error( - "AudioSession audioUnitStartFailedWithError", - subsystems: .audioSession, - error: error - ) - performSessionAction(false) - } - - // MARK: - Private helpers - - /// Executes an action to update the speaker routing based on current - /// call settings. - /// - Parameter action: The action to perform, affecting routing. - private func performSpeakerUpdateAction(_ action: SpeakerAction) { - switch action { - case let .routeUpdate(currentCallSettings): - let updatedCallSettings = currentCallSettings - .withUpdatedSpeakerState(audioSession.isUsingSpeakerOutput) - - guard currentCallSettings != updatedCallSettings else { - return - } - - delegate?.audioSessionAdapterDidUpdateCallSettings( - self, - callSettings: updatedCallSettings - ) - log.debug( - "AudioSession route requires speaker update \(currentCallSettings.speakerOn) → \(updatedCallSettings.speakerOn).", - subsystems: .audioSession - ) - - case let .respectCallSettings(currentCallSettings): - if audioSession.isUsingSpeakerOutput != currentCallSettings.speakerOn { - let category = audioSession.category - let categoryOptions: AVAudioSession.CategoryOptions = currentCallSettings.speakerOn - ? [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP] - : [.allowBluetooth, .allowBluetoothA2DP] - - let mode: AVAudioSession.Mode = currentCallSettings.speakerOn - ? .videoChat - : .voiceChat - - let overrideOutputAudioPort: AVAudioSession.PortOverride = currentCallSettings.speakerOn - ? .speaker - : .none - - audioSession.updateConfiguration( - functionName: #function, - file: #fileID, - line: #line - ) { - try $0.setMode(mode.rawValue) - try $0.setCategory(category, with: categoryOptions) - try $0.overrideOutputAudioPort(overrideOutputAudioPort) - - log.debug( - "AudioSession updated mode:\(mode.rawValue) category:\(category) options:\(categoryOptions) overrideOutputAudioPort:\(overrideOutputAudioPort == .speaker ? ".speaker" : ".none")", - subsystems: .audioSession - ) - } - } - } - } - - /// Updates the active state of the session. - /// - Parameter isActive: A Boolean indicating if the session should be - /// active. - private func performSessionAction(_ isActive: Bool) { - guard audioSession.isActive != isActive else { - return - } - log.debug( - "AudioSession will attempt to set isActive:\(isActive).", - subsystems: .audioSession - ) - audioSession.updateConfiguration( - functionName: #function, - file: #fileID, - line: #line - ) { try $0.setActive(isActive) } - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift index ef4bdb5e0..2a5e2f3be 100644 --- a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift +++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift @@ -12,7 +12,7 @@ protocol StreamAudioSessionAdapterDelegate: AnyObject { /// - audioSession: The `AudioSession` instance that made the update. /// - callSettings: The updated `CallSettings`. func audioSessionAdapterDidUpdateCallSettings( - _ adapter: StreamAudioSessionAdapter, + _ adapter: StreamAudioSession, callSettings: CallSettings ) } diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift index ecf7247a0..76ec1fcb9 100644 --- a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift +++ b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift @@ -3,22 +3,83 @@ // import AVFoundation +import Combine import Foundation import StreamWebRTC +/// A protocol defining an interface for managing an audio session. +/// This allows for dependency injection and easier testing. +protocol AudioSessionProtocol { + + /// A publisher that emits audio session events. + var eventPublisher: AnyPublisher { get } + + /// A Boolean value indicating whether the audio session is active. + var isActive: Bool { get } + + /// The current audio route description for the session. + var currentRoute: AVAudioSessionRouteDescription { get } + + var category: AVAudioSession.Category { get } + + /// A Boolean value indicating whether manual audio routing is used. + var useManualAudio: Bool { get set } + + /// A Boolean value indicating whether audio is enabled. + var isAudioEnabled: Bool { get set } + + /// Configures the audio session category and options. + /// - Parameters: + /// - category: The audio category (e.g., `.playAndRecord`). + /// - mode: The audio mode (e.g., `.videoChat`). + /// - categoryOptions: The options for the category (e.g., `.allowBluetooth`). + /// - Throws: An error if setting the category fails. + func setCategory( + _ category: AVAudioSession.Category, + mode: AVAudioSession.Mode, + with categoryOptions: AVAudioSession.CategoryOptions + ) async throws + + /// Activates or deactivates the audio session. + /// - Parameter isActive: Whether to activate the session. + /// - Throws: An error if activation fails. + func setActive(_ isActive: Bool) async throws + + /// Overrides the audio output port (e.g., to speaker). + /// - Parameter port: The output port override. + /// - Throws: An error if overriding fails. + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) async throws + + /// Requests permission to record audio from the user. + /// - Returns: `true` if permission was granted, otherwise `false`. + func requestRecordPermission() async -> Bool +} + /// A class implementing the `AudioSessionProtocol` that manages the WebRTC /// audio session for the application, handling settings and route management. -final class StreamRTCAudioSession: AudioSessionProtocol { +final class StreamRTCAudioSession: AudioSessionProtocol, @unchecked Sendable, ReflectiveStringConvertible { + + struct State: ReflectiveStringConvertible, Equatable { + var category: AVAudioSession.Category + var mode: AVAudioSession.Mode + var options: AVAudioSession.CategoryOptions + var overrideOutputPort: AVAudioSession.PortOverride = .none + } + + @Published private(set) var state: State /// A queue for processing audio session operations asynchronously. - private let processingQueue = DispatchQueue( - label: "io.getstream.audiosession", - target: .global(qos: .userInteractive) - ) + private let processingQueue = SerialActorQueue() /// The shared instance of `RTCAudioSession` used for WebRTC audio /// configuration and management. - private let source: RTCAudioSession = .sharedInstance() + private let source: RTCAudioSession + private let sourceDelegate: RTCAudioSessionDelegatePublisher = .init() + private let disposableBag = DisposableBag() + + var eventPublisher: AnyPublisher { + sourceDelegate.publisher + } /// A Boolean value indicating whether the audio session is currently active. var isActive: Bool { source.isActive } @@ -26,16 +87,7 @@ final class StreamRTCAudioSession: AudioSessionProtocol { /// The current audio route description for the session. var currentRoute: AVAudioSessionRouteDescription { source.currentRoute } - /// The audio category of the session, such as `.playAndRecord`. - var category: String { source.category } - - /// A Boolean value indicating whether the audio session is using - /// the device's speaker. - var isUsingSpeakerOutput: Bool { currentRoute.isSpeaker } - - /// A Boolean value indicating whether the audio session is using - /// an external output, like Bluetooth or headphones. - var isUsingExternalOutput: Bool { currentRoute.isExternal } + var category: AVAudioSession.Category { state.category } /// A Boolean value indicating whether the audio session uses manual /// audio routing. @@ -50,18 +102,20 @@ final class StreamRTCAudioSession: AudioSessionProtocol { get { source.isAudioEnabled } } - /// Adds a delegate to receive updates from the audio session. - /// - Parameter delegate: A delegate conforming to `RTCAudioSessionDelegate`. - func add(_ delegate: RTCAudioSessionDelegate) { - source.add(delegate) + // MARK: - Lifecycle + + init() { + let source = RTCAudioSession.sharedInstance() + self.source = source + state = .init( + category: .init(rawValue: source.category), + mode: .init(rawValue: source.mode), + options: source.categoryOptions + ) + source.add(sourceDelegate) } - /// Sets the audio mode for the session, such as `.videoChat`. - /// - Parameter mode: The audio mode to set. - /// - Throws: An error if setting the mode fails. - func setMode(_ mode: String) throws { - try source.setMode(AVAudioSession.Mode(rawValue: mode)) - } + // MARK: - Configuration /// Configures the audio category and category options for the session. /// - Parameters: @@ -70,61 +124,86 @@ final class StreamRTCAudioSession: AudioSessionProtocol { /// `.allowBluetooth` and `.defaultToSpeaker`. /// - Throws: An error if setting the category fails. func setCategory( - _ category: String, + _ category: AVAudioSession.Category, + mode: AVAudioSession.Mode, with categoryOptions: AVAudioSession.CategoryOptions - ) throws { - try source.setCategory(AVAudioSession.Category(rawValue: category), with: categoryOptions) + ) async throws { + try await performOperation { [weak self] in + guard let self else { return } + + let state = self.state + let needsCategoryUpdate = category != state.category + let needsModeUpdate = mode != state.mode + let needsOptionsUpdate = categoryOptions != state.options + + guard needsCategoryUpdate || needsModeUpdate || needsOptionsUpdate else { + return + } + + if needsCategoryUpdate || needsOptionsUpdate { + if needsModeUpdate { + try source.setCategory( + category, + mode: mode, + options: categoryOptions + ) + } else { + try source.setCategory(category, with: categoryOptions) + } + } else if needsModeUpdate { + try source.setMode(mode) + } + + self.state = .init( + category: category, + mode: mode, + options: categoryOptions, + overrideOutputPort: state.overrideOutputPort + ) + + updateWebRTCConfiguration(with: self.state) + + log.debug("AudioSession updated with state \(self.state)", subsystems: .audioSession) + } } /// Activates or deactivates the audio session. /// - Parameter isActive: A Boolean indicating whether the session /// should be active. /// - Throws: An error if activation or deactivation fails. - func setActive(_ isActive: Bool) throws { - try source.setActive(isActive) - } + func setActive( + _ isActive: Bool + ) async throws { + try await performOperation { [weak self] in + guard let self else { + return + } - /// Sets the audio configuration for the WebRTC session. - /// - Parameter configuration: The configuration to apply. - /// - Throws: An error if setting the configuration fails. - func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws { - try source.setConfiguration(configuration) + try source.setActive(isActive) + } } /// Overrides the audio output port, such as switching to speaker output. /// - Parameter port: The output port to use, such as `.speaker`. /// - Throws: An error if overriding the output port fails. - func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws { - try source.overrideOutputAudioPort(port) - } + func overrideOutputAudioPort( + _ port: AVAudioSession.PortOverride + ) async throws { + try await performOperation { [weak self] in + guard let self else { + return + } - /// Performs an asynchronous update to the audio session configuration. - /// - Parameters: - /// - functionName: The name of the calling function. - /// - file: The source file of the calling function. - /// - line: The line number of the calling function. - /// - block: A closure that performs an audio configuration update. - func updateConfiguration( - functionName: StaticString, - file: StaticString, - line: UInt, - _ block: @escaping (any AudioSessionProtocol) throws -> Void - ) { - processingQueue.async { [weak self] in - guard let self else { return } - source.lockForConfiguration() - defer { source.unlockForConfiguration() } - do { - try block(self) - } catch { - log.error( - error, - subsystems: .audioSession, - functionName: functionName, - fileName: file, - lineNumber: line - ) + guard + state.category == .playAndRecord, + state.overrideOutputPort != port + else { + return } + + try source.overrideOutputAudioPort(port) + state.overrideOutputPort = port + log.debug("AudioSession updated with state \(self.state)", subsystems: .audioSession) } } @@ -137,23 +216,32 @@ final class StreamRTCAudioSession: AudioSessionProtocol { } } } -} -/// A key for dependency injection of an `AudioSessionProtocol` instance -/// that represents the active call audio session. -struct StreamActiveCallAudioSessionKey: InjectionKey { - static var currentValue: AudioSessionProtocol? -} + // MARK: - Private Helpers -extension InjectedValues { - /// The active call's audio session. The value is being set on `StreamAudioSessionAdapter` - /// `init` / `deinit` - var activeCallAudioSession: AudioSessionProtocol? { - get { - Self[StreamActiveCallAudioSessionKey.self] - } - set { - Self[StreamActiveCallAudioSessionKey.self] = newValue + private func performOperation( + _ operation: @Sendable @escaping () async throws -> Void + ) async throws { + try await processingQueue.sync { [weak self] in + guard let self else { return } + source.lockForConfiguration() + defer { source.unlockForConfiguration() } + try await operation() } } + + /// Updates the WebRTC audio session configuration. + /// + /// - Parameter state: The current state of the audio session. + /// + /// - Note: This is required to ensure that the WebRTC audio session + /// is configured correctly when the AVAudioSession is updated in + /// order to avoid unexpected changes to the category. + private func updateWebRTCConfiguration(with state: State) { + let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC() + webRTCConfiguration.category = state.category.rawValue + webRTCConfiguration.mode = state.mode.rawValue + webRTCConfiguration.categoryOptions = state.options + RTCAudioSessionConfiguration.setWebRTC(webRTCConfiguration) + } } diff --git a/Sources/StreamVideo/Utils/CurrentDevice/CurrentDevice.swift b/Sources/StreamVideo/Utils/CurrentDevice/CurrentDevice.swift new file mode 100644 index 000000000..baaff35c2 --- /dev/null +++ b/Sources/StreamVideo/Utils/CurrentDevice/CurrentDevice.swift @@ -0,0 +1,81 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +#if canImport(UIKit) +import UIKit +#endif + +/// +/// A class that determines the current device type by inspecting available iOS +/// or macOS APIs. It reports whether the device is a phone, pad, TV, CarPlay, +/// mac, vision, or unspecified. +/// +/// This information can be used throughout the app to adjust layout and +/// functionality based on the user's device. +/// +/// ``` +/// let device = CurrentDevice.currentValue +/// if device.deviceType == .phone { +/// // Configure layouts for phone +/// } +/// ``` +final class CurrentDevice: Sendable { + + /// An enumeration describing the type of device. Each case can guide UI + /// or behavior adjustments. For example, `.phone` might use a phone layout. + enum DeviceType { + /// The type was not determined or is unknown. + case unspecified + /// The current device is an iPhone or iPod touch. + case phone + /// The current device is an iPad. + case pad + /// The current device is an Apple TV. + case tv + /// The current device is CarPlay. + case carPlay + /// The current device is a Mac. + case mac + /// The current device is Vision Pro. + case vision + } + + /// The identified `DeviceType` for the current environment. + let deviceType: DeviceType + + /// Creates a `CurrentDevice` by inspecting the user interface idiom. + /// - Important: On platforms where UIKit is unavailable, the type defaults + /// to `.mac` (AppKit) or `.unspecified`. + + private init() { + #if canImport(UIKit) + deviceType = switch UIDevice.current.userInterfaceIdiom { + case .unspecified: .unspecified + case .phone: .phone + case .pad: .pad + case .tv: .tv + case .carPlay: .carPlay + case .mac: .mac + case .vision: .vision + @unknown default: .unspecified + } + #elseif canImport(AppKit) + deviceType = .mac + #else + deviceType = .unspecified + #endif + } +} + +extension CurrentDevice: InjectionKey { + static var currentValue: CurrentDevice = .init() +} + +extension InjectedValues { + /// Retrieves the shared `CurrentDevice` instance. This can be used to query + /// the device type at runtime. + var currentDevice: CurrentDevice { Self[CurrentDevice.self] } +} diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Publisher+TaskSink.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Publisher+TaskSink.swift index ac3097679..8ac247ef4 100644 --- a/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Publisher+TaskSink.swift +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/Foundation/Publisher+TaskSink.swift @@ -50,7 +50,7 @@ extension Publisher { } } catch { // Log any unexpected errors during task execution. - LogConfig.logger.error(error) + LogConfig.logger.error(ClientError(with: error)) } } diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift index 0d7c93d21..fa770289a 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift @@ -370,17 +370,11 @@ extension WebRTCCoordinator.StateMachine.Stage { return } - context - .coordinator? - .stateAdapter - .audioSession - .didUpdateCallSettings(callSettings) - try await publisher.didUpdateCallSettings(callSettings) - log.debug("Publisher and AudioSession callSettings updated.", subsystems: .webRTC) + log.debug("Publisher callSettings updated.", subsystems: .webRTC) } catch { log.warning( - "Will disconnect because failed to update callSettings on Publisher or AudioSession.[Error:\(error)]", + "Will disconnect because failed to update callSettings on Publisher.[Error:\(error)]", subsystems: .webRTC ) transitionDisconnectOrError(error) diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift index aa4e1092c..5deddb7fa 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift @@ -432,6 +432,10 @@ final class WebRTCCoordinator: @unchecked Sendable { ) } + func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async throws { + try await stateAdapter.audioSession.didUpdatePolicy(policy) + } + // MARK: - Private /// Creates the state machine for managing WebRTC stages. diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift index 79629a817..ac602049f 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift @@ -42,7 +42,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { let peerConnectionFactory: PeerConnectionFactory let videoCaptureSessionProvider: VideoCaptureSessionProvider let screenShareSessionProvider: ScreenShareSessionProvider - let audioSession: StreamAudioSessionAdapter = .init() + let audioSession: StreamAudioSession = .init() /// Published properties that represent different parts of the WebRTC state. @Published private(set) var sessionID: String = UUID().uuidString @@ -118,7 +118,13 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { self.videoCaptureSessionProvider = videoCaptureSessionProvider self.screenShareSessionProvider = screenShareSessionProvider - audioSession.delegate = self + Task { + await configureAudioSession() + } + } + + deinit { + audioSession.dismantle() } /// Sets the session ID. @@ -485,6 +491,34 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { previousParticipantOperation = newTask } + /// Assigns media tracks to participants based on their media type. + /// - Parameter participants: The storage containing participant information. + /// - Returns: An updated participants storage with assigned tracks. + func assignTracks( + on participants: ParticipantsStorage + ) -> ParticipantsStorage { + /// Reduces the participants to a new storage with updated tracks. + participants.reduce(into: ParticipantsStorage()) { partialResult, entry in + var newParticipant = entry + .value + /// Updates the participant with a video track if available. + .withUpdated(track: track(for: entry.value, of: .video) as? RTCVideoTrack) + /// Updates the participant with a screensharing track if available. + .withUpdated(screensharingTrack: track(for: entry.value, of: .screenshare) as? RTCVideoTrack) + + /// For participants other than the local one, we check if the incomingVideoQualitySettings + /// provide additional limits. + if + newParticipant.sessionId != sessionID, + incomingVideoQualitySettings.isVideoDisabled(for: entry.value.sessionId) + { + newParticipant = newParticipant.withUpdated(track: nil) + } + + partialResult[entry.key] = newParticipant + } + } + // MARK: - Private Helpers /// Handles track events when they are added or removed from peer connections. @@ -530,44 +564,42 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { ) } - /// Assigns media tracks to participants based on their media type. - /// - Parameter participants: The storage containing participant information. - /// - Returns: An updated participants storage with assigned tracks. - func assignTracks( - on participants: ParticipantsStorage - ) -> ParticipantsStorage { - /// Reduces the participants to a new storage with updated tracks. - participants.reduce(into: ParticipantsStorage()) { partialResult, entry in - var newParticipant = entry - .value - /// Updates the participant with a video track if available. - .withUpdated(track: track(for: entry.value, of: .video) as? RTCVideoTrack) - /// Updates the participant with a screensharing track if available. - .withUpdated(screensharingTrack: track(for: entry.value, of: .screenshare) as? RTCVideoTrack) + private func configureAudioSession() { + audioSession.delegate = self - /// For participants other than the local one, we check if the incomingVideoQualitySettings - /// provide additional limits. - if - newParticipant.sessionId != sessionID, - incomingVideoQualitySettings.isVideoDisabled(for: entry.value.sessionId) - { - newParticipant = newParticipant.withUpdated(track: nil) + $callSettings + .removeDuplicates() + .sinkTask { [weak audioSession] in + do { + try await audioSession?.didUpdateCallSettings($0) + } catch { + log.error(error) + } } - - partialResult[entry.key] = newParticipant - } + .store(in: disposableBag) + + $ownCapabilities + .removeDuplicates() + .sinkTask { [weak audioSession] in + do { + try await audioSession?.didUpdateOwnCapabilities($0) + } catch { + log.error(error) + } + } + .store(in: disposableBag) } // MARK: - AudioSessionDelegate nonisolated func audioSessionAdapterDidUpdateCallSettings( - _ adapter: StreamAudioSessionAdapter, + _ adapter: StreamAudioSession, callSettings: CallSettings ) { Task { await self.set(callSettings: callSettings) log.debug( - "AudioSession updated call settings: \(callSettings)", + "AudioSession delegated updated call settings: \(callSettings)", subsystems: .audioSession ) } diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index ff3bf4944..d67f4e2bc 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -44,7 +44,7 @@ 40149DC32B7E202600473176 /* ParticipantEventViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DC22B7E202600473176 /* ParticipantEventViewModifier.swift */; }; 40149DCC2B7E814300473176 /* AVAudioRecorderBuilder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCB2B7E814300473176 /* AVAudioRecorderBuilder.swift */; }; 40149DCE2B7E837A00473176 /* StreamCallAudioRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCD2B7E837A00473176 /* StreamCallAudioRecorder.swift */; }; - 40149DD02B7E839500473176 /* AudioSessionProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCF2B7E839500473176 /* AudioSessionProtocol.swift */; }; + 40149DD02B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCF2B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift */; }; 401A0F032AB1C1B600BE2DBD /* ThermalStateObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = 401A0F022AB1C1B600BE2DBD /* ThermalStateObserver.swift */; }; 401A64A52A9DF79E00534ED1 /* StreamChatSwiftUI in Frameworks */ = {isa = PBXBuildFile; productRef = 401A64A42A9DF79E00534ED1 /* StreamChatSwiftUI */; }; 401A64A82A9DF7B400534ED1 /* EffectsLibrary in Frameworks */ = {isa = PBXBuildFile; productRef = 401A64A72A9DF7B400534ED1 /* EffectsLibrary */; }; @@ -226,15 +226,14 @@ 4067A5D82AE1249400CFDEB1 /* CornerClipper_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067A5D72AE1249400CFDEB1 /* CornerClipper_Tests.swift */; }; 4067F3082CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */; }; 4067F30B2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30A2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift */; }; - 4067F30D2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30C2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift */; }; - 4067F30F2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30E2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift */; }; - 4067F3112CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3102CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift */; }; - 4067F3132CDA33C6002E28BD /* RTCAudioSessionConfiguration+Default.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3122CDA33C4002E28BD /* RTCAudioSessionConfiguration+Default.swift */; }; + 4067F30D2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */; }; + 4067F30F2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */; }; + 4067F3132CDA33C6002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */; }; 4067F3152CDA4094002E28BD /* StreamRTCAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */; }; - 4067F3172CDA40CC002E28BD /* StreamAudioSessionAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3162CDA40CC002E28BD /* StreamAudioSessionAdapter.swift */; }; + 4067F3172CDA40CC002E28BD /* StreamAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3162CDA40CC002E28BD /* StreamAudioSession.swift */; }; 4067F3192CDA469F002E28BD /* MockAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3182CDA469C002E28BD /* MockAudioSession.swift */; }; 4067F31C2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */; }; - 4067F31E2CDA5A56002E28BD /* StreamAudioSessionAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31D2CDA5A53002E28BD /* StreamAudioSessionAdapter_Tests.swift */; }; + 4067F31E2CDA5A56002E28BD /* StreamAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31D2CDA5A53002E28BD /* StreamAudioSession_Tests.swift */; }; 4069A0042AD985D2009A3A06 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 4069A0052AD985D3009A3A06 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 406A8E8D2AA1D78C001F598A /* AppEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4030E59F2A9DF5BD003E8CBA /* AppEnvironment.swift */; }; @@ -450,6 +449,9 @@ 40AF6A452C935D1900BA2935 /* WebRTCCoordinatorStateMachine_LeavingStageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40AF6A442C935D1900BA2935 /* WebRTCCoordinatorStateMachine_LeavingStageTests.swift */; }; 40AF6A492C935EB600BA2935 /* WebRTCCoordinatorStateMachine_CleanUpStageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40AF6A482C935EB600BA2935 /* WebRTCCoordinatorStateMachine_CleanUpStageTests.swift */; }; 40AF6A4B2C9369A900BA2935 /* WebRTCCoordinatorStateMachine_DisconnectedStageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40AF6A4A2C9369A900BA2935 /* WebRTCCoordinatorStateMachine_DisconnectedStageTests.swift */; }; + 40B284DF2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B284DE2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift */; }; + 40B284E12D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */; }; + 40B284E32D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */; }; 40B31AA82D10594F005FB448 /* PublishOptions+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B31AA72D10594F005FB448 /* PublishOptions+Dummy.swift */; }; 40B31AA92D10594F005FB448 /* PublishOptions+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B31AA72D10594F005FB448 /* PublishOptions+Dummy.swift */; }; 40B48C122D14C43F002C4EAB /* PublishOptions_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B48C112D14C43F002C4EAB /* PublishOptions_Tests.swift */; }; @@ -607,6 +609,8 @@ 40E363712D0A27640028C52A /* BroadcastCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363702D0A27640028C52A /* BroadcastCaptureHandler.swift */; }; 40E363752D0A2C6B0028C52A /* CGSize+Adapt.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */; }; 40E363772D0A2E320028C52A /* BroadcastBufferReaderKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */; }; + 40E741FA2D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E741F92D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift */; }; + 40E741FF2D553ACD0044C955 /* CurrentDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */; }; 40E9B3B12BCD755F00ACF18F /* MemberResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */; }; 40E9B3B32BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */; }; 40E9B3B52BCD93F500ACF18F /* Credentials+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */; }; @@ -649,6 +653,14 @@ 40F0C3A72BC7FAA400AB75AD /* VideoRendererPool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0C3A52BC7F8E300AB75AD /* VideoRendererPool.swift */; }; 40F0C3AA2BC8121F00AB75AD /* ReusePool.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0C3A82BC8121400AB75AD /* ReusePool.swift */; }; 40F0C3AC2BC8138A00AB75AD /* ReusePool_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0C3AB2BC8138A00AB75AD /* ReusePool_Tests.swift */; }; + 40F101662D5A324700C49481 /* OwnCapabilitiesAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F101652D5A324700C49481 /* OwnCapabilitiesAudioSessionPolicy.swift */; }; + 40F101682D5A653200C49481 /* AudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F101672D5A653200C49481 /* AudioSessionPolicy.swift */; }; + 40F1016A2D5A653B00C49481 /* AudioSessionConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F101692D5A653B00C49481 /* AudioSessionConfiguration.swift */; }; + 40F1016C2D5A654300C49481 /* DefaultAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1016B2D5A654300C49481 /* DefaultAudioSessionPolicy.swift */; }; + 40F101792D5CBA2D00C49481 /* XCTAsyncUnwrap.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F101782D5CBA2D00C49481 /* XCTAsyncUnwrap.swift */; }; + 40F1017C2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1017B2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift */; }; + 40F1017E2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1017D2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift */; }; + 40F101802D5D078800C49481 /* MockAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1017F2D5D078800C49481 /* MockAudioSessionPolicy.swift */; }; 40F161AB2A4C6B5C00846E3E /* ScreenSharingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F161AA2A4C6B5C00846E3E /* ScreenSharingSession.swift */; }; 40F18B8C2BEBAC4C00ADF76E /* CallEndedViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403EFC9E2BDBFE050057C248 /* CallEndedViewModifier.swift */; }; 40F18B8E2BEBB65100ADF76E /* View+OptionalPublisher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F18B8D2BEBB65100ADF76E /* View+OptionalPublisher.swift */; }; @@ -1555,7 +1567,7 @@ 40149DC22B7E202600473176 /* ParticipantEventViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParticipantEventViewModifier.swift; sourceTree = ""; }; 40149DCB2B7E814300473176 /* AVAudioRecorderBuilder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioRecorderBuilder.swift; sourceTree = ""; }; 40149DCD2B7E837A00473176 /* StreamCallAudioRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamCallAudioRecorder.swift; sourceTree = ""; }; - 40149DCF2B7E839500473176 /* AudioSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSessionProtocol.swift; sourceTree = ""; }; + 40149DCF2B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+RequestRecordPermission.swift"; sourceTree = ""; }; 401A0F022AB1C1B600BE2DBD /* ThermalStateObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThermalStateObserver.swift; sourceTree = ""; }; 401A64AA2A9DF7EC00534ED1 /* DemoChatAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatAdapter.swift; sourceTree = ""; }; 401A64B02A9DF83200534ED1 /* TokenResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TokenResponse.swift; sourceTree = ""; }; @@ -1681,15 +1693,14 @@ 4067A5D72AE1249400CFDEB1 /* CornerClipper_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CornerClipper_Tests.swift; sourceTree = ""; }; 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSessionAdapterDelegate.swift; sourceTree = ""; }; 4067F30A2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionRouteDescription+Convenience.swift"; sourceTree = ""; }; - 4067F30C2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession_RouteChangeReason+Convenience.swift"; sourceTree = ""; }; - 4067F30E2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+CategoryOptions+Convenience.swift"; sourceTree = ""; }; - 4067F3102CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionPortDescription+Convenience.swift"; sourceTree = ""; }; - 4067F3122CDA33C4002E28BD /* RTCAudioSessionConfiguration+Default.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioSessionConfiguration+Default.swift"; sourceTree = ""; }; + 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionRouteChangeReason+Convenience.swift"; sourceTree = ""; }; + 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionCategoryOptions+Convenience.swift"; sourceTree = ""; }; + 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession.CategoryOptions+Convenience.swift"; sourceTree = ""; }; 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCAudioSession.swift; sourceTree = ""; }; - 4067F3162CDA40CC002E28BD /* StreamAudioSessionAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSessionAdapter.swift; sourceTree = ""; }; + 4067F3162CDA40CC002E28BD /* StreamAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSession.swift; sourceTree = ""; }; 4067F3182CDA469C002E28BD /* MockAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSession.swift; sourceTree = ""; }; 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCAudioSession_Tests.swift; sourceTree = ""; }; - 4067F31D2CDA5A53002E28BD /* StreamAudioSessionAdapter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSessionAdapter_Tests.swift; sourceTree = ""; }; + 4067F31D2CDA5A53002E28BD /* StreamAudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSession_Tests.swift; sourceTree = ""; }; 406AF2002AF3D98F00ED4D0C /* SimulatorScreenCapturer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorScreenCapturer.swift; sourceTree = ""; }; 406AF2042AF3DE4000ED4D0C /* test.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = test.mp4; sourceTree = ""; }; 406B3BD62C8F331F00FC93A1 /* RTCVideoTrack+Sendable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCVideoTrack+Sendable.swift"; sourceTree = ""; }; @@ -1834,6 +1845,9 @@ 40AF6A442C935D1900BA2935 /* WebRTCCoordinatorStateMachine_LeavingStageTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCCoordinatorStateMachine_LeavingStageTests.swift; sourceTree = ""; }; 40AF6A482C935EB600BA2935 /* WebRTCCoordinatorStateMachine_CleanUpStageTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCCoordinatorStateMachine_CleanUpStageTests.swift; sourceTree = ""; }; 40AF6A4A2C9369A900BA2935 /* WebRTCCoordinatorStateMachine_DisconnectedStageTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCCoordinatorStateMachine_DisconnectedStageTests.swift; sourceTree = ""; }; + 40B284DE2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionMode+Convenience.swift"; sourceTree = ""; }; + 40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionPortOverride+Convenience.swift"; sourceTree = ""; }; + 40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionCategory+Convenience.swift"; sourceTree = ""; }; 40B31AA72D10594F005FB448 /* PublishOptions+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "PublishOptions+Dummy.swift"; sourceTree = ""; }; 40B48C0B2D14B75B002C4EAB /* StreamAppStateAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAppStateAdapter.swift; sourceTree = ""; }; 40B48C0F2D14B901002C4EAB /* StreamAppStateAdapter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAppStateAdapter_Tests.swift; sourceTree = ""; }; @@ -1983,6 +1997,8 @@ 40E363702D0A27640028C52A /* BroadcastCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastCaptureHandler.swift; sourceTree = ""; }; 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGSize+Adapt.swift"; sourceTree = ""; }; 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastBufferReaderKey.swift; sourceTree = ""; }; + 40E741F92D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionDelegatePublisher.swift; sourceTree = ""; }; + 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentDevice.swift; sourceTree = ""; }; 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "MemberResponse+Dummy.swift"; sourceTree = ""; }; 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "JoinCallResponse+Dummy.swift"; sourceTree = ""; }; 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Credentials+Dummy.swift"; sourceTree = ""; }; @@ -2022,6 +2038,14 @@ 40F0C3A52BC7F8E300AB75AD /* VideoRendererPool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoRendererPool.swift; sourceTree = ""; }; 40F0C3A82BC8121400AB75AD /* ReusePool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ReusePool.swift; sourceTree = ""; }; 40F0C3AB2BC8138A00AB75AD /* ReusePool_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ReusePool_Tests.swift; sourceTree = ""; }; + 40F101652D5A324700C49481 /* OwnCapabilitiesAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OwnCapabilitiesAudioSessionPolicy.swift; sourceTree = ""; }; + 40F101672D5A653200C49481 /* AudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSessionPolicy.swift; sourceTree = ""; }; + 40F101692D5A653B00C49481 /* AudioSessionConfiguration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSessionConfiguration.swift; sourceTree = ""; }; + 40F1016B2D5A654300C49481 /* DefaultAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DefaultAudioSessionPolicy.swift; sourceTree = ""; }; + 40F101782D5CBA2D00C49481 /* XCTAsyncUnwrap.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = XCTAsyncUnwrap.swift; sourceTree = ""; }; + 40F1017B2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioSessionCategoryOptions_Tests.swift; sourceTree = ""; }; + 40F1017D2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionDelegatePublisher_Tests.swift; sourceTree = ""; }; + 40F1017F2D5D078800C49481 /* MockAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSessionPolicy.swift; sourceTree = ""; }; 40F161AA2A4C6B5C00846E3E /* ScreenSharingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenSharingSession.swift; sourceTree = ""; }; 40F18B8D2BEBB65100ADF76E /* View+OptionalPublisher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "View+OptionalPublisher.swift"; sourceTree = ""; }; 40F445AB2A9DFC13004BE3DA /* UserCredentials.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserCredentials.swift; sourceTree = ""; }; @@ -3397,11 +3421,13 @@ 4067F3062CDA32F0002E28BD /* AudioSession */ = { isa = PBXGroup; children = ( + 40F101632D5A322E00C49481 /* Policies */, 4067F3092CDA330E002E28BD /* Extensions */, 40149DCA2B7E813500473176 /* AudioRecorder */, - 40149DCF2B7E839500473176 /* AudioSessionProtocol.swift */, + 40F101692D5A653B00C49481 /* AudioSessionConfiguration.swift */, 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */, - 4067F3162CDA40CC002E28BD /* StreamAudioSessionAdapter.swift */, + 4067F3162CDA40CC002E28BD /* StreamAudioSession.swift */, + 40E741F92D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift */, 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */, ); path = AudioSession; @@ -3410,11 +3436,14 @@ 4067F3092CDA330E002E28BD /* Extensions */ = { isa = PBXGroup; children = ( - 4067F3122CDA33C4002E28BD /* RTCAudioSessionConfiguration+Default.swift */, - 4067F3102CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift */, - 4067F30E2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift */, - 4067F30C2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift */, + 40149DCF2B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift */, + 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */, + 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */, + 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */, 4067F30A2CDA3359002E28BD /* AVAudioSessionRouteDescription+Convenience.swift */, + 40B284DE2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift */, + 40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */, + 40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */, ); path = Extensions; sourceTree = ""; @@ -3422,8 +3451,10 @@ 4067F31A2CDA55D1002E28BD /* AudioSession */ = { isa = PBXGroup; children = ( + 40F1017A2D5CE7E400C49481 /* Extensions */, + 40F1017D2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift */, 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */, - 4067F31D2CDA5A53002E28BD /* StreamAudioSessionAdapter_Tests.swift */, + 4067F31D2CDA5A53002E28BD /* StreamAudioSession_Tests.swift */, ); path = AudioSession; sourceTree = ""; @@ -4417,6 +4448,14 @@ path = Broadcast; sourceTree = ""; }; + 40E741FD2D553AB40044C955 /* CurrentDevice */ = { + isa = PBXGroup; + children = ( + 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */, + ); + path = CurrentDevice; + sourceTree = ""; + }; 40F0173C2BBEB85F00E89FD1 /* Utilities */ = { isa = PBXGroup; children = ( @@ -4526,6 +4565,24 @@ path = VideoRendererPool; sourceTree = ""; }; + 40F101632D5A322E00C49481 /* Policies */ = { + isa = PBXGroup; + children = ( + 40F101672D5A653200C49481 /* AudioSessionPolicy.swift */, + 40F1016B2D5A654300C49481 /* DefaultAudioSessionPolicy.swift */, + 40F101652D5A324700C49481 /* OwnCapabilitiesAudioSessionPolicy.swift */, + ); + path = Policies; + sourceTree = ""; + }; + 40F1017A2D5CE7E400C49481 /* Extensions */ = { + isa = PBXGroup; + children = ( + 40F1017B2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift */, + ); + path = Extensions; + sourceTree = ""; + }; 40F445B52A9E04B2004BE3DA /* Storage */ = { isa = PBXGroup; children = ( @@ -5268,6 +5325,7 @@ 842747F929EEEC5A00E063AD /* EventLogger.swift */, 842747FB29EEECBA00E063AD /* AssertTestQueue.swift */, 8414081029F284A800FF2D7C /* AssertJSONEqual.swift */, + 40F101782D5CBA2D00C49481 /* XCTAsyncUnwrap.swift */, 84DCA2102A389160000C3411 /* AssertDelay.swift */, 4013387B2BF248E9007318BD /* Mockable.swift */, ); @@ -5321,6 +5379,7 @@ 401338772BF248B9007318BD /* MockStreamVideo.swift */, 401338792BF248CC007318BD /* MockCall.swift */, 40AAD1902D2EF18A00D10330 /* MockCaptureDevice.swift */, + 40F1017F2D5D078800C49481 /* MockAudioSessionPolicy.swift */, ); path = Mock; sourceTree = ""; @@ -5418,6 +5477,7 @@ 84AF64D3287C79220012A503 /* Utils */ = { isa = PBXGroup; children = ( + 40E741FD2D553AB40044C955 /* CurrentDevice */, 401C1EE92D4900BA00304609 /* ClosedCaptionsAdapter */, 401C1EE32D48F09100304609 /* AyncStreamPublisher */, 401C1EDF2D48EB9800304609 /* OrderedCapacityQueue */, @@ -6785,7 +6845,7 @@ 8449824E2C738A830029734D /* StopAllRTMPBroadcastsResponse.swift in Sources */, 40E363522D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift in Sources */, 84D2E37729DC856D001D2118 /* CallMemberUpdatedEvent.swift in Sources */, - 40149DD02B7E839500473176 /* AudioSessionProtocol.swift in Sources */, + 40149DD02B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift in Sources */, 40DFA88D2CC10FF3003DCE05 /* Stream_Video_Sfu_Models_AppleThermalState+Convenience.swift in Sources */, 8409465B29AF4EEC007AF5BF /* ListRecordingsResponse.swift in Sources */, 8490DD21298D4ADF007E53D2 /* StreamJsonDecoder.swift in Sources */, @@ -6817,6 +6877,7 @@ 842B8E292A2DFED900863A87 /* StartTranscriptionResponse.swift in Sources */, 843697CF28C7898A00839D99 /* VideoOptions.swift in Sources */, 842B8E2A2A2DFED900863A87 /* CallSessionResponse.swift in Sources */, + 40B284E32D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift in Sources */, 841BAA452BD15CDE000C73E4 /* UserInfoResponse.swift in Sources */, 8490032129D308A000AD9BB4 /* RingSettingsRequest.swift in Sources */, 8409465629AF4EEC007AF5BF /* CallReactionEvent.swift in Sources */, @@ -6842,6 +6903,7 @@ 40C2B5BB2C2C41DA00EC2C2D /* RejectCallRequest+Reason.swift in Sources */, 40C9E4482C94743800802B28 /* Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift in Sources */, 40034C282CFE156800A318B1 /* CallKitAvailabilityPolicy.swift in Sources */, + 40F1016C2D5A654300C49481 /* DefaultAudioSessionPolicy.swift in Sources */, 840042C92A6FF9A200917B30 /* BroadcastConstants.swift in Sources */, 84F73854287C1A2D00A363F4 /* InjectedValuesExtensions.swift in Sources */, 40C9E44A2C94744E00802B28 /* Stream_Video_Sfu_Models_VideoDimension+Convenience.swift in Sources */, @@ -6885,6 +6947,7 @@ 84DC38B529ADFCFD00946713 /* CallStateResponseFields.swift in Sources */, 40B48C472D14E803002C4EAB /* StreamVideoCapturing.swift in Sources */, 842E70D72B91BE1700D2D68B /* CallRecordingFailedEvent.swift in Sources */, + 40F1016A2D5A653B00C49481 /* AudioSessionConfiguration.swift in Sources */, 40BBC4BE2C6280E4002AEF92 /* LocalScreenShareMediaAdapter.swift in Sources */, 841BAA332BD15CDE000C73E4 /* SFULocationResponse.swift in Sources */, 84DC38D129ADFCFD00946713 /* Credentials.swift in Sources */, @@ -6941,7 +7004,7 @@ 40C4DF492C1C2C210035DBC2 /* Publisher+WeakAssign.swift in Sources */, 4157FF912C9AC9EC0093D839 /* RTMPBroadcastRequest.swift in Sources */, 844982472C738A830029734D /* DeleteRecordingResponse.swift in Sources */, - 4067F3172CDA40CC002E28BD /* StreamAudioSessionAdapter.swift in Sources */, + 4067F3172CDA40CC002E28BD /* StreamAudioSession.swift in Sources */, 40AB34DA2C5D5A7B00B5B6B3 /* WebRTCStatsReporter.swift in Sources */, 408679F72BD12F1000D027E0 /* AudioFilter.swift in Sources */, 8456E6D2287EC343004E180E /* ConsoleLogDestination.swift in Sources */, @@ -6964,7 +7027,7 @@ 841BAA392BD15CDE000C73E4 /* UserSessionStats.swift in Sources */, 406B3BD72C8F332200FC93A1 /* RTCVideoTrack+Sendable.swift in Sources */, 406128812CF32FEF007F5CDC /* SDPLineVisitor.swift in Sources */, - 4067F3132CDA33C6002E28BD /* RTCAudioSessionConfiguration+Default.swift in Sources */, + 4067F3132CDA33C6002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift in Sources */, 8409465829AF4EEC007AF5BF /* SendReactionRequest.swift in Sources */, 40BBC4BA2C627F83002AEF92 /* TrackEvent.swift in Sources */, 406128832CF33000007F5CDC /* SDPParser.swift in Sources */, @@ -6988,7 +7051,6 @@ 84DC389F29ADFCFD00946713 /* JoinCallResponse.swift in Sources */, 84A7E1AE2883E6B300526C98 /* HTTPUtils.swift in Sources */, 4065839D2B877B6500B4F979 /* UIDevice+NeuralEngine.swift in Sources */, - 4067F3112CDA33AB002E28BD /* AVAudioSessionPortDescription+Convenience.swift in Sources */, 40FB15142BF77D9000D5E580 /* StreamCallStateMachine+Stage.swift in Sources */, 84DC38D229ADFCFD00946713 /* UpdatedCallPermissionsEvent.swift in Sources */, 40429D5B2C779ADB00AC7FFF /* SFUEventAdapter.swift in Sources */, @@ -7036,11 +7098,12 @@ 4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */, 4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */, 84A7E1862883632100526C98 /* ConnectionStatus.swift in Sources */, + 40E741FA2D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift in Sources */, 841BAA472BD15CDE000C73E4 /* CallTranscriptionReadyEvent.swift in Sources */, 4012B1922BFCA518006B0031 /* StreamCallStateMachine+AcceptingStage.swift in Sources */, 841BAA352BD15CDE000C73E4 /* CallTranscriptionStartedEvent.swift in Sources */, 84DC38A729ADFCFD00946713 /* GoLiveResponse.swift in Sources */, - 4067F30D2CDA3377002E28BD /* AVAudioSession_RouteChangeReason+Convenience.swift in Sources */, + 4067F30D2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift in Sources */, 84DC38C329ADFCFD00946713 /* GeofenceSettings.swift in Sources */, 842B8E162A2DFED900863A87 /* CallRingEvent.swift in Sources */, 842B8E2D2A2DFED900863A87 /* StopTranscriptionResponse.swift in Sources */, @@ -7092,6 +7155,7 @@ 40BBC4C32C6373C4002AEF92 /* WebRTCStateAdapter.swift in Sources */, 403CA9B22CC7BAD6001A88C2 /* VideoLayer.swift in Sources */, 84DC38A229ADFCFD00946713 /* UnblockedUserEvent.swift in Sources */, + 40B284DF2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift in Sources */, 4065838B2B87695500B4F979 /* VideoFilters.swift in Sources */, 84F73855287C1A3400A363F4 /* Utils.swift in Sources */, 84DC389029ADFCFD00946713 /* SendEventResponse.swift in Sources */, @@ -7123,9 +7187,10 @@ 40BBC49F2C623D03002AEF92 /* RTCPeerConnectionState+CustomStringConvertible.swift in Sources */, 842B8E172A2DFED900863A87 /* EgressRTMPResponse.swift in Sources */, 841BAA312BD15CDE000C73E4 /* VideoQuality.swift in Sources */, - 4067F30F2CDA3394002E28BD /* AVAudioSession+CategoryOptions+Convenience.swift in Sources */, + 4067F30F2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift in Sources */, 40BBC4A32C623D03002AEF92 /* RTCIceGatheringState+CustomStringConvertible.swift in Sources */, 84A7E1A82883E46200526C98 /* Timers.swift in Sources */, + 40B284E12D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift in Sources */, 84DC38B129ADFCFD00946713 /* AudioSettings.swift in Sources */, 848CCCED2AB8ED8F002E83A2 /* CallHLSBroadcastingStartedEvent.swift in Sources */, 840F59932A77FDCB00EF3EB2 /* PinResponse.swift in Sources */, @@ -7136,6 +7201,7 @@ 842E70D92B91BE1700D2D68B /* CallClosedCaption.swift in Sources */, 84FC2C2428AD1B5E00181490 /* WebRTCEventDecoder.swift in Sources */, 40149DCE2B7E837A00473176 /* StreamCallAudioRecorder.swift in Sources */, + 40E741FF2D553ACD0044C955 /* CurrentDevice.swift in Sources */, 84DC389B29ADFCFD00946713 /* PermissionRequestEvent.swift in Sources */, 406B3C432C91E41400FC93A1 /* WebRTCAuthenticator.swift in Sources */, 84BAD77A2A6BFEF900733156 /* BroadcastBufferUploader.swift in Sources */, @@ -7158,6 +7224,7 @@ 40BBC4E62C63A619002AEF92 /* WebRTCCoordinator+Rejoining.swift in Sources */, 8490032229D308A000AD9BB4 /* AudioSettingsRequest.swift in Sources */, 40AB34B62C5D089E00B5B6B3 /* Task+Timeout.swift in Sources */, + 40F101682D5A653200C49481 /* AudioSessionPolicy.swift in Sources */, 84DC38B629ADFCFD00946713 /* QueryMembersResponse.swift in Sources */, 4159F16C2C86FA41002B94D3 /* TURNAggregatedStats.swift in Sources */, 40BBC4E02C63A564002AEF92 /* WebRTCCoordinator+Disconnected.swift in Sources */, @@ -7265,6 +7332,7 @@ 8409465A29AF4EEC007AF5BF /* ReactionResponse.swift in Sources */, 402F04AA2B70ED8600CA1986 /* Statistics+Convenience.swift in Sources */, 8490032429D308A000AD9BB4 /* RingSettings.swift in Sources */, + 40F101662D5A324700C49481 /* OwnCapabilitiesAudioSessionPolicy.swift in Sources */, 840F598E2A77FDCB00EF3EB2 /* BroadcastSettingsRequest.swift in Sources */, 40E3635B2D0A15E40028C52A /* CameraCapturePhotoHandler.swift in Sources */, 842B8E272A2DFED900863A87 /* CallSessionEndedEvent.swift in Sources */, @@ -7288,7 +7356,7 @@ 406B3C3A2C909CA600FC93A1 /* RTCPeerConnectionCoordinator_Tests.swift in Sources */, 406B3C412C919F5A00FC93A1 /* MockRTCPeerConnectionCoordinatorStack.swift in Sources */, 40F017472BBEEF5100E89FD1 /* ThumbnailResponse+Dummy.swift in Sources */, - 4067F31E2CDA5A56002E28BD /* StreamAudioSessionAdapter_Tests.swift in Sources */, + 4067F31E2CDA5A56002E28BD /* StreamAudioSession_Tests.swift in Sources */, 40AF6A412C9356B700BA2935 /* WebRTCCoordinatorStateMachine_MigratingStageTests.swift in Sources */, 408CE0F72BD95EB60052EC3A /* VideoConfig+Dummy.swift in Sources */, 84F58B7029EE914400010C4C /* BackgroundTaskScheduler_Tests.swift in Sources */, @@ -7336,6 +7404,7 @@ 406B3C4F2C91F0CA00FC93A1 /* WebRTCCoordinatorStateMachine_ConnectingStageTests.swift in Sources */, 40C9E44C2C948A1F00802B28 /* WebRTCAuthenticator_Tests.swift in Sources */, 406B3C292C905E9D00FC93A1 /* AudioMediaAdapter_Tests.swift in Sources */, + 40F1017C2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift in Sources */, 406303422AD848000091AE77 /* CallParticipant_Mock.swift in Sources */, 845C09872C0DF3D100F725B3 /* LimitsSettingsResponse+Dummy.swift in Sources */, 841FF5032A5D6FEC00809BBB /* CallsController_Tests.swift in Sources */, @@ -7394,6 +7463,7 @@ 406B3C142C8F870400FC93A1 /* MockActiveCallProvider.swift in Sources */, 40C2B5C62C2D7AED00EC2C2D /* RejectionReasonProvider_Tests.swift in Sources */, 403FB15C2BFE22170047A696 /* StreamCallStateMachineStageAcceptingStage_Tests.swift in Sources */, + 40F1017E2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift in Sources */, 841FF5172A5EA7F600809BBB /* CallParticipants_Tests.swift in Sources */, 40F017452BBEEE6D00E89FD1 /* UserResponse+Dummy.swift in Sources */, 40F017422BBEC81C00E89FD1 /* CallKitServiceTests.swift in Sources */, @@ -7424,6 +7494,7 @@ 403FB15A2BFE21A90047A696 /* StreamCallStateMachineStageJoinedStage_Tests.swift in Sources */, 84F58B8F29EEB32700010C4C /* WebSocketPingController_Mock.swift in Sources */, 40AB34C12C5D3DBC00B5B6B3 /* AggregatedStatsReport+Dummy.swift in Sources */, + 40F101802D5D078800C49481 /* MockAudioSessionPolicy.swift in Sources */, 406B3C0A2C8F605000FC93A1 /* PeerConnectionFactory+Mock.swift in Sources */, 40382F472C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift in Sources */, 84DC44982BA3ACC70050290C /* CallStatsReporting_Tests.swift in Sources */, @@ -7481,6 +7552,7 @@ 400D63F72AC3273F0000BB30 /* ThermalStateObserverTests.swift in Sources */, 409774AE2CC1979F00E0D3EE /* MockCallController.swift in Sources */, 40AB34BC2C5D30AD00B5B6B3 /* URLSessionConfiguration_WaitsForConnectivityTests.swift in Sources */, + 40F101792D5CBA2D00C49481 /* XCTAsyncUnwrap.swift in Sources */, 401C1EEE2D493BE700304609 /* AsyncStreamPublisherTests.swift in Sources */, 40C4DF502C1C415F0035DBC2 /* LastParticipantAutoLeavePolicyTests.swift in Sources */, 4013387C2BF248E9007318BD /* Mockable.swift in Sources */, diff --git a/StreamVideoTests/Mock/MockAudioSession.swift b/StreamVideoTests/Mock/MockAudioSession.swift index 9b764d37c..5d6b5b95c 100644 --- a/StreamVideoTests/Mock/MockAudioSession.swift +++ b/StreamVideoTests/Mock/MockAudioSession.swift @@ -3,146 +3,169 @@ // import AVFoundation +import Combine @testable import StreamVideo import StreamWebRTC final class MockAudioSession: AudioSessionProtocol, Mockable { - final class WeakBox { - weak var value: T? - init(value: T?) { self.value = value } - } // MARK: - Mockable typealias FunctionKey = MockFunctionKey typealias FunctionInputKey = MockFunctionInputKey + + /// Defines the "functions" or property accesses we want to track or stub. enum MockFunctionKey: CaseIterable { - case add - case setMode case setCategory case setActive - case setConfiguration case overrideOutputAudioPort - case updateConfiguration case requestRecordPermission } + /// Defines typed payloads passed along with tracked function calls. enum MockFunctionInputKey: Payloadable { - case add(delegate: WeakBox) - case setMode(mode: String) - case setCategory(category: String, categoryOptions: AVAudioSession.CategoryOptions) + case setCategory( + category: AVAudioSession.Category, + mode: AVAudioSession.Mode, + options: AVAudioSession.CategoryOptions + ) case setActive(value: Bool) - case setConfiguration(value: RTCAudioSessionConfiguration) case overrideOutputAudioPort(value: AVAudioSession.PortOverride) - case updateConfiguration case requestRecordPermission + // Return an untyped payload for storage in the base Mockable dictionary. var payload: Any { switch self { - case let .add(delegate): - return delegate - - case let .setMode(mode): - return mode - - case let .setCategory(category, categoryOptions): - return (category, categoryOptions) + case let .setCategory(category, mode, options): + return (category, mode, options) case let .setActive(value): return value - case let .setConfiguration(value): - return value - case let .overrideOutputAudioPort(value): return value - case .updateConfiguration: - return () - case .requestRecordPermission: return () } } } + // MARK: - Mockable Storage + var stubbedProperty: [String: Any] = [:] var stubbedFunction: [FunctionKey: Any] = [:] - @Atomic var stubbedFunctionInput: [FunctionKey: [FunctionInputKey]] = FunctionKey.allCases - .reduce(into: [FunctionKey: [FunctionInputKey]]()) { $0[$1] = [] } - func stub(for keyPath: KeyPath, with value: T) { stubbedProperty[propertyKey(for: keyPath)] = value } - func stub(for function: FunctionKey, with value: T) { stubbedFunction[function] = value } + @Atomic + var stubbedFunctionInput: [FunctionKey: [FunctionInputKey]] = FunctionKey.allCases + .reduce(into: [FunctionKey: [MockFunctionInputKey]]()) { $0[$1] = [] } - // MARK: - AudioSessionProtocol + func stub(for keyPath: KeyPath, with value: T) { + stubbedProperty[propertyKey(for: keyPath)] = value + } - var isActive: Bool = false + func stub(for function: FunctionKey, with value: T) { + stubbedFunction[function] = value + } - var currentRoute: AVAudioSessionRouteDescription = .init() + // MARK: - AudioSessionProtocol - var category: String = "" + let eventSubject = PassthroughSubject() - var isUsingSpeakerOutput: Bool = false + init() { + stub(for: \.eventPublisher, with: eventSubject.eraseToAnyPublisher()) + stub(for: \.isActive, with: false) + stub(for: \.currentRoute, with: AVAudioSessionRouteDescription()) + stub(for: \.category, with: AVAudioSession.Category.soloAmbient) + stub(for: \.useManualAudio, with: false) + stub(for: \.isAudioEnabled, with: false) + } - var isUsingExternalOutput: Bool = false + /// Publishes audio session-related events. + var eventPublisher: AnyPublisher { + get { self[dynamicMember: \.eventPublisher] } + set { stub(for: \.eventPublisher, with: newValue) } + } - var useManualAudio: Bool = false + /// Indicates whether the audio session is active. + var isActive: Bool { + get { self[dynamicMember: \.isActive] } + set { stub(for: \.isActive, with: newValue) } + } + + /// The current audio route for the session. + var currentRoute: AVAudioSessionRouteDescription { + get { self[dynamicMember: \.currentRoute] } + set { stub(for: \.currentRoute, with: newValue) } + } - var isAudioEnabled: Bool = false + /// The current audio session category. + var category: AVAudioSession.Category { + get { self[dynamicMember: \.category] } + set { stub(for: \.category, with: newValue) } + } - func add(_ delegate: RTCAudioSessionDelegate) { - stubbedFunctionInput[.add]?.append(.add(delegate: .init(value: delegate))) + /// A Boolean value indicating if manual audio routing is used. + var useManualAudio: Bool { + get { self[dynamicMember: \.useManualAudio] } + set { stub(for: \.useManualAudio, with: newValue) } } - func setMode(_ mode: String) throws { - stubbedFunctionInput[.setMode]?.append(.setMode(mode: mode)) + /// A Boolean value indicating if audio is enabled. + var isAudioEnabled: Bool { + get { self[dynamicMember: \.isAudioEnabled] } + set { stub(for: \.isAudioEnabled, with: newValue) } } + /// Sets the audio category, mode, and options. func setCategory( - _ category: String, + _ category: AVAudioSession.Category, + mode: AVAudioSession.Mode, with categoryOptions: AVAudioSession.CategoryOptions - ) throws { - stubbedFunctionInput[.setCategory]?.append( - .setCategory( - category: category, - categoryOptions: categoryOptions - ) - ) + ) async throws { + record(.setCategory, input: .setCategory( + category: category, + mode: mode, + options: categoryOptions + )) + if let error = stubbedFunction[.setCategory] as? Error { + throw error + } } - func setActive(_ isActive: Bool) throws { - stubbedFunctionInput[.setActive]?.append(.setActive(value: isActive)) + /// Activates or deactivates the audio session. + func setActive(_ isActive: Bool) async throws { + record(.setActive, input: .setActive(value: isActive)) + if let error = stubbedFunction[.setActive] as? Error { + throw error + } } - func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws { - stubbedFunctionInput[.setConfiguration]?.append( - .setConfiguration( - value: configuration - ) - ) + /// Overrides the audio output port. + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) async throws { + record(.overrideOutputAudioPort, input: .overrideOutputAudioPort(value: port)) + if let error = stubbedFunction[.overrideOutputAudioPort] as? Error { + throw error + } } - func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws { - stubbedFunctionInput[.overrideOutputAudioPort]?.append( - .overrideOutputAudioPort(value: port) - ) + /// Requests permission to record audio. + func requestRecordPermission() async -> Bool { + record(.requestRecordPermission, input: .requestRecordPermission) + return (stubbedFunction[.requestRecordPermission] as? Bool) ?? false } - func updateConfiguration( - functionName: StaticString, - file: StaticString, - line: UInt, - _ block: @escaping (AudioSessionProtocol) throws -> Void + // MARK: - Helpers + + /// Tracks calls to a specific function/property in the mock. + private func record( + _ function: FunctionKey, + input: FunctionInputKey? = nil ) { - do { - try block(self) - stubbedFunctionInput[.updateConfiguration]?.append(.updateConfiguration) - } catch { - /* No-op */ + if let input { + stubbedFunctionInput[function]?.append(input) + } else { + // Still record the call, but with no input + stubbedFunctionInput[function]?.append(contentsOf: []) } } - - func requestRecordPermission() async -> Bool { - stubbedFunctionInput[.requestRecordPermission]?.append(.requestRecordPermission) - return stubbedFunction[.requestRecordPermission] as? Bool ?? false - } } diff --git a/StreamVideoTests/Mock/MockAudioSessionPolicy.swift b/StreamVideoTests/Mock/MockAudioSessionPolicy.swift new file mode 100644 index 000000000..5cbd073c3 --- /dev/null +++ b/StreamVideoTests/Mock/MockAudioSessionPolicy.swift @@ -0,0 +1,65 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +@testable import StreamVideo + +final class MockAudioSessionPolicy: Mockable, AudioSessionPolicy, @unchecked Sendable { + + // MARK: - Mockable + + typealias FunctionKey = MockFunctionKey + typealias FunctionInputKey = MockFunctionInputKey + var stubbedProperty: [String: Any] = [:] + var stubbedFunction: [FunctionKey: Any] = [:] + @Atomic var stubbedFunctionInput: [FunctionKey: [FunctionInputKey]] = FunctionKey.allCases + .reduce(into: [FunctionKey: [FunctionInputKey]]()) { $0[$1] = [] } + func stub(for keyPath: KeyPath, with value: T) { + stubbedProperty[propertyKey(for: keyPath)] = value + } + + func stub(for function: FunctionKey, with value: T) { stubbedFunction[function] = value } + + enum MockFunctionKey: Hashable, CaseIterable { + case configuration + } + + enum MockFunctionInputKey: Payloadable { + case configuration(callSettings: CallSettings, ownCapabilities: Set) + + var payload: Any { + switch self { + case let .configuration(callSettings, ownCapabilities): + return (callSettings, ownCapabilities) + } + } + } + + // MARK: - AudioSessionPolicy + + init() { + stub( + for: .configuration, + with: AudioSessionConfiguration( + category: .soloAmbient, + mode: .default, + options: [] + ) + ) + } + + func configuration( + for callSettings: CallSettings, + ownCapabilities: Set + ) -> AudioSessionConfiguration { + stubbedFunctionInput[.configuration]? + .append( + .configuration( + callSettings: callSettings, + ownCapabilities: ownCapabilities + ) + ) + return stubbedFunction[.configuration] as! AudioSessionConfiguration + } +} diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift index 53e1a7d6f..bbb19b4b3 100644 --- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift +++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift @@ -12,7 +12,7 @@ struct MockRTCPeerConnectionCoordinatorStack { let peerConnection: MockRTCPeerConnection let peerConnectionFactory: PeerConnectionFactory let mockSFUStack: MockSFUStack - let audioSession: StreamAudioSessionAdapter + let audioSession: StreamAudioSession let spySubject: PassthroughSubject let mockLocalAudioMediaAdapter: MockLocalMediaAdapter let mockLocalVideoMediaAdapter: MockLocalMediaAdapter @@ -33,7 +33,7 @@ struct MockRTCPeerConnectionCoordinatorStack { peerConnection: MockRTCPeerConnection = .init(), peerConnectionFactory: PeerConnectionFactory = .mock(), mockSFUStack: MockSFUStack = .init(), - audioSession: StreamAudioSessionAdapter = .init(), + audioSession: StreamAudioSession = .init(), spySubject: PassthroughSubject = .init(), mockLocalAudioMediaAdapter: MockLocalMediaAdapter = .init(), mockLocalVideoMediaAdapter: MockLocalMediaAdapter = .init(), diff --git a/StreamVideoTests/TestUtils/XCTAsyncUnwrap.swift b/StreamVideoTests/TestUtils/XCTAsyncUnwrap.swift new file mode 100644 index 000000000..da1b578d3 --- /dev/null +++ b/StreamVideoTests/TestUtils/XCTAsyncUnwrap.swift @@ -0,0 +1,18 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import XCTest + +extension XCTestCase { + + func XCTAsyncUnwrap( + _ expression: @autoclosure () async throws -> T?, + _ message: @autoclosure () -> String = "", + file: StaticString = #filePath, + line: UInt = #line + ) async throws -> T { + let expressionResult = try await expression() + return try XCTUnwrap(expressionResult, message(), file: file, line: line) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift b/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift new file mode 100644 index 000000000..4ff05420b --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions_Tests.swift @@ -0,0 +1,25 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +@testable import StreamVideo +import XCTest + +final class AVAudioSessionCategoryOptionsTests: XCTestCase { + + // MARK: - playAndRecord + + func test_playAndRecord_whenAccessed_thenReturnsExpectedOptions() { + XCTAssertEqual( + AVAudioSession.CategoryOptions.playAndRecord, + [.allowBluetooth, .allowBluetoothA2DP, .allowAirPlay] + ) + } + + // MARK: - playback + + func test_playback_whenAccessed_thenReturnsEmptyOptions() { + XCTAssertEqual(AVAudioSession.CategoryOptions.playback, []) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioSessionDelegatePublisher_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioSessionDelegatePublisher_Tests.swift new file mode 100644 index 000000000..c0d7a8487 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioSessionDelegatePublisher_Tests.swift @@ -0,0 +1,246 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RTCAudioSessionDelegatePublisherTests: XCTestCase { + private var session: RTCAudioSession! = .sharedInstance() + private var disposableBag: DisposableBag! = .init() + private var subject: RTCAudioSessionDelegatePublisher! = .init() + + override func tearDown() { + subject = nil + disposableBag.removeAll() + super.tearDown() + } + + // MARK: - audioSessionDidBeginInterruption + + func test_audioSessionDidBeginInterruption_givenSession_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSessionDidBeginInterruption(session), + validator: { + if case let .didBeginInterruption(receivedSession) = $0 { + XCTAssertEqual(receivedSession, self.session) + } + } + ) + } + + // MARK: - audioSessionDidEndInterruption + + func test_audioSessionDidEndInterruption_givenSessionAndShouldResume_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSessionDidEndInterruption(session, shouldResumeSession: true), + validator: { + if case let .didEndInterruption(receivedSession, receivedShouldResume) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertTrue(receivedShouldResume) + } + } + ) + } + + // MARK: - audioSessionDidChangeRoute + + func test_audioSessionDidChangeRoute_givenSessionReasonAndPreviousRoute_whenCalled_thenPublishesEvent() { + let reason: AVAudioSession.RouteChangeReason = .newDeviceAvailable + let previousRoute = AVAudioSessionRouteDescription() + + assertAudioSessionEvent( + subject.audioSessionDidChangeRoute( + session, + reason: reason, + previousRoute: previousRoute + ), + validator: { + if case let .didChangeRoute(receivedSession, receivedReason, receivedPreviousRoute) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertEqual(receivedReason, reason) + XCTAssertEqual(receivedPreviousRoute, previousRoute) + } + } + ) + } + + // MARK: - audioSessionMediaServerTerminated + + func test_audioSessionMediaServerTerminated_givenSession_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSessionMediaServerTerminated(session), + validator: { + if case let .mediaServerTerminated(receivedSession) = $0 { + XCTAssertEqual(receivedSession, self.session) + } + } + ) + } + + // MARK: - audioSessionMediaServerReset + + func test_audioSessionMediaServerReset_givenSession_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSessionMediaServerReset(session), + validator: { + if case let .mediaServerReset(receivedSession) = $0 { + XCTAssertEqual(receivedSession, self.session) + } + } + ) + } + + // MARK: - audioSessionDidChangeCanPlayOrRecord + + func test_audioSessionDidChangeCanPlayOrRecord_givenSessionAndCanPlayOrRecord_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSession(session, didChangeCanPlayOrRecord: true), + validator: { + if case let .didChangeCanPlayOrRecord(receivedSession, receivedCanPlayOrRecord) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertTrue(receivedCanPlayOrRecord) + } + } + ) + } + + // MARK: - audioSessionDidStartPlayOrRecord + + func test_audioSessionDidStartPlayOrRecord_givenSession_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSessionDidStartPlayOrRecord(session), + validator: { + if case let .didStartPlayOrRecord(receivedSession) = $0 { + XCTAssertEqual(receivedSession, self.session) + } + } + ) + } + + // MARK: - audioSessionDidStopPlayOrRecord + + func test_audioSessionDidStopPlayOrRecord_givenSession_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSessionDidStopPlayOrRecord(session), + validator: { + if case let .didStopPlayOrRecord(receivedSession) = $0 { + XCTAssertEqual(receivedSession, self.session) + } + } + ) + } + + // MARK: - audioSessionDidChangeOutputVolume + + func test_audioSessionDidChangeOutputVolume_givenSessionAndOutputVolume_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSession(session, didChangeOutputVolume: 0.5), + validator: { + if case let .didChangeOutputVolume(receivedSession, receivedOutputVolume) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertEqual(receivedOutputVolume, 0.5) + } + } + ) + } + + // MARK: - audioSessionDidDetectPlayoutGlitch + + func test_audioSessionDidDetectPlayoutGlitch_givenSessionAndTotalNumberOfGlitches_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSession(session, didDetectPlayoutGlitch: 10), + validator: { + if case let .didDetectPlayoutGlitch(receivedSession, receivedTotalNumberOfGlitches) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertEqual(receivedTotalNumberOfGlitches, 10) + } + } + ) + } + + // MARK: - audioSessionWillSetActive + + func test_audioSessionWillSetActive_givenSessionAndActive_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSession(session, willSetActive: true), + validator: { + if case let .willSetActive(receivedSession, receivedActive) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertTrue(receivedActive) + } + } + ) + } + + // MARK: - audioSessionDidSetActive + + func test_audioSessionDidSetActive_givenSessionAndActive_whenCalled_thenPublishesEvent() { + assertAudioSessionEvent( + subject.audioSession(session, didSetActive: true), + validator: { + if case let .didSetActive(receivedSession, receivedActive) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertTrue(receivedActive) + } + } + ) + } + + // MARK: - audioSessionFailedToSetActive + + func test_audioSessionFailedToSetActive_givenSessionActiveAndError_whenCalled_thenPublishesEvent() { + let error = NSError(domain: "TestError", code: 1, userInfo: nil) + assertAudioSessionEvent( + subject.audioSession(session, failedToSetActive: true, error: error), + validator: { + if case let .failedToSetActive(receivedSession, receivedActive, receivedError) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertTrue(receivedActive) + XCTAssertEqual(receivedError as NSError, error) + } + } + ) + } + + // MARK: - audioSessionAudioUnitStartFailedWithError + + func test_audioSessionAudioUnitStartFailedWithError_givenSessionAndError_whenCalled_thenPublishesEvent() { + let error = NSError(domain: "TestError", code: 1, userInfo: nil) + assertAudioSessionEvent( + subject.audioSession(session, audioUnitStartFailedWithError: error), + validator: { + if case let .audioUnitStartFailedWithError(receivedSession, receivedError) = $0 { + XCTAssertEqual(receivedSession, self.session) + XCTAssertEqual(receivedError as NSError, error) + } + } + ) + } + + // MARK: - Private helpers + + private func assertAudioSessionEvent( + _ action: @autoclosure () -> Void, + validator: @escaping (AudioSessionEvent) -> Void + ) { + let expectation = self.expectation(description: "AudioSession event received.") + let session = RTCAudioSession.sharedInstance() + let canPlayOrRecord = true + + subject + .publisher + .sink { + validator($0) + expectation.fulfill() + } + .store(in: disposableBag) + + action() + + waitForExpectations(timeout: 1, handler: nil) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/StreamAudioSessionAdapter_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamAudioSessionAdapter_Tests.swift deleted file mode 100644 index 982f4d6ea..000000000 --- a/StreamVideoTests/Utils/AudioSession/StreamAudioSessionAdapter_Tests.swift +++ /dev/null @@ -1,354 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -@testable import StreamVideo -import StreamWebRTC -@preconcurrency import XCTest - -final class StreamAudioSessionAdapter_Tests: XCTestCase, @unchecked Sendable { - - private lazy var audioSession: MockAudioSession! = .init() - private lazy var subject: StreamAudioSessionAdapter! = StreamAudioSessionAdapter(audioSession) - - // MARK: - Lifecycle - - override func tearDown() { - subject = nil - audioSession = nil - super.tearDown() - } - - // MARK: - init - - func test_init_callAudioSessionKeyCurrentValueUpdated() { - _ = subject - XCTAssertTrue(StreamActiveCallAudioSessionKey.currentValue === audioSession) - } - - func test_init_setsManualAudioAndEnabled() { - _ = subject - - XCTAssertTrue(audioSession.useManualAudio) - XCTAssertTrue(audioSession.isAudioEnabled) - } - - func test_init_updatesConfiguration() async throws { - let expected = RTCAudioSessionConfiguration.default - - _ = subject - await fulfillment { self.audioSession.timesCalled(.updateConfiguration) == 1 } - - let actual = try XCTUnwrap( - audioSession.recordedInputPayload( - RTCAudioSessionConfiguration.self, - for: .setConfiguration - )?.first - ) - XCTAssertEqual(actual, expected) - } - - // MARK: - deinit - - func test_deinit_callAudioSessionKeyCurrentValueSetToNil() { - _ = subject - XCTAssertTrue(StreamActiveCallAudioSessionKey.currentValue === audioSession) - - subject = nil - - XCTAssertNil(StreamActiveCallAudioSessionKey.currentValue) - } - - // MARK: - Active Call Settings Tests - - func test_didUpdateCallSettings_withUpdatedCallSettingsAudioOutputOn_updatesAudioSession() async throws { - let callSettings = CallSettings(audioOn: false, audioOutputOn: true) - - subject.didUpdateCallSettings(callSettings) - - await fulfillment { self.audioSession.timesCalled(.setActive) == 1 } - } - - func test_didUpdateCallSettings_withUpdatedCallSettingsSpeakerOn_updatesAudioSession() async throws { - audioSession.category = .unique - let callSettings = CallSettings(speakerOn: true) - - subject.didUpdateCallSettings(callSettings) - - await fulfillment { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } - XCTAssertEqual( - audioSession.recordedInputPayload( - String.self, - for: .setMode - )?.first, - AVAudioSession.Mode.videoChat.rawValue - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - (String, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first?.0, - audioSession.category - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - (String, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first?.1, - [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP] - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - AVAudioSession.PortOverride.self, - for: .overrideOutputAudioPort - )?.first, - .speaker - ) - } - - func test_didUpdateCallSettings_withUpdatedCallSettingsSpeakerOf_updatesAudioSession() async throws { - audioSession.category = .unique - subject.didUpdateCallSettings(CallSettings(speakerOn: true)) - await fulfillment { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } - audioSession.resetRecords(for: .setMode) - audioSession.resetRecords(for: .setCategory) - audioSession.resetRecords(for: .overrideOutputAudioPort) - audioSession.isUsingSpeakerOutput = true - - subject.didUpdateCallSettings(CallSettings(speakerOn: false)) - - await fulfillment { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } - XCTAssertEqual( - audioSession.recordedInputPayload( - String.self, - for: .setMode - )?.first, - AVAudioSession.Mode.voiceChat.rawValue - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - (String, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first?.0, - audioSession.category - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - (String, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first?.1, - [.allowBluetooth, .allowBluetoothA2DP] - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - AVAudioSession.PortOverride.self, - for: .overrideOutputAudioPort - )?.first, - AVAudioSession.PortOverride.none - ) - } - - func test_didUpdateCallSettings_withoutChanges_doesNotUpdateAudioSession() async throws { - audioSession.isActive = true - let callSettings = CallSettings(audioOn: false, videoOn: true) - subject.didUpdateCallSettings(callSettings) - await fulfillment { self.audioSession.timesCalled(.updateConfiguration) > 0 } - audioSession.resetRecords(for: .updateConfiguration) - - subject.didUpdateCallSettings(callSettings) - - XCTAssertEqual(audioSession.timesCalled(.updateConfiguration), 0) - } - - // MARK: - Audio Session Delegate Tests - - // MARK: routeUpdate - - func test_audioSessionDidChangeRoute_reasonUnkwnown_updatesCallSettingsForNewRoute() async { - await assertRouteUpdate( - initialSpeakerOn: true, - reason: .unknown, - expectedSpeakerOn: false - ) - } - - func test_audioSessionDidChangeRoute_reasonNewDeviceAvailable_updatesCallSettingsForNewRoute() async { - await assertRouteUpdate( - initialSpeakerOn: true, - reason: .newDeviceAvailable, - expectedSpeakerOn: false - ) - } - - func test_audioSessionDidChangeRoute_reasonOverride_updatesCallSettingsForNewRoute() async { - await assertRouteUpdate( - initialSpeakerOn: true, - reason: .override, - expectedSpeakerOn: false - ) - } - - func test_audioSessionDidChangeRoute_reasonNoSuitableRouteForCategory_updatesCallSettingsForNewRoute() async { - await assertRouteUpdate( - initialSpeakerOn: true, - reason: .noSuitableRouteForCategory, - expectedSpeakerOn: false - ) - } - - // MARK: respectCallSettings - - func test_audioSessionDidChangeRoute_reasonOldDeviceUnavailable_updatesCallSettingsForNewRoute() async { - await assertRespectCallSettings( - callSettingsSpeakerOn: true, - reason: .oldDeviceUnavailable, - isUsingSpeakerOutput: false, - expectedMode: .videoChat, - expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], - expectedOverrideOutputAudioPort: .speaker - ) - } - - func test_audioSessionDidChangeRoute_reasonCategoryChange_updatesCallSettingsForNewRoute() async { - await assertRespectCallSettings( - callSettingsSpeakerOn: true, - reason: .categoryChange, - isUsingSpeakerOutput: false, - expectedMode: .videoChat, - expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], - expectedOverrideOutputAudioPort: .speaker - ) - } - - func test_audioSessionDidChangeRoute_reasonWakeFromSleep_updatesCallSettingsForNewRoute() async { - await assertRespectCallSettings( - callSettingsSpeakerOn: true, - reason: .wakeFromSleep, - isUsingSpeakerOutput: false, - expectedMode: .videoChat, - expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], - expectedOverrideOutputAudioPort: .speaker - ) - } - - func test_audioSessionDidChangeRoute_reasonRouteConfigurationChange_updatesCallSettingsForNewRoute() async { - await assertRespectCallSettings( - callSettingsSpeakerOn: true, - reason: .routeConfigurationChange, - isUsingSpeakerOutput: false, - expectedMode: .videoChat, - expectedCategoryOptions: [.defaultToSpeaker, .allowBluetooth, .allowBluetoothA2DP], - expectedOverrideOutputAudioPort: .speaker - ) - } - - // MARK: - Private Helper Tests - - private func assertRouteUpdate( - initialSpeakerOn: Bool, - reason: AVAudioSession.RouteChangeReason, - expectedSpeakerOn: Bool, - file: StaticString = #file, - line: UInt = #line - ) async { - subject.didUpdateCallSettings(.init(speakerOn: initialSpeakerOn)) - audioSession.isUsingSpeakerOutput = expectedSpeakerOn - let delegate = MockStreamAudioSessionAdapterDelegate() - subject.delegate = delegate - - subject.audioSessionDidChangeRoute( - .sharedInstance(), - reason: reason, - previousRoute: .init() - ) - - await fulfillment( - file: file, - line: line - ) { delegate.audioSessionAdapterDidUpdateCallSettingsWithCallSettings != nil } - XCTAssertEqual( - delegate.audioSessionAdapterDidUpdateCallSettingsWithCallSettings?.speakerOn, - expectedSpeakerOn, - file: file, - line: line - ) - } - - private func assertRespectCallSettings( - callSettingsSpeakerOn: Bool, - reason: AVAudioSession.RouteChangeReason, - isUsingSpeakerOutput: Bool, - expectedMode: AVAudioSession.Mode, - expectedCategoryOptions: AVAudioSession.CategoryOptions, - expectedOverrideOutputAudioPort: AVAudioSession.PortOverride, - file: StaticString = #file, - line: UInt = #line - ) async { - audioSession.category = .unique - subject.didUpdateCallSettings(.init(speakerOn: callSettingsSpeakerOn)) - audioSession.isUsingSpeakerOutput = isUsingSpeakerOutput - let delegate = MockStreamAudioSessionAdapterDelegate() - subject.delegate = delegate - audioSession.resetRecords(for: .setMode) - audioSession.resetRecords(for: .setCategory) - audioSession.resetRecords(for: .overrideOutputAudioPort) - - subject.audioSessionDidChangeRoute( - .sharedInstance(), - reason: reason, - previousRoute: .init() - ) - - await fulfillment( - file: file, - line: line - ) { self.audioSession.timesCalled(.overrideOutputAudioPort) == 1 } - XCTAssertEqual( - audioSession.recordedInputPayload( - String.self, - for: .setMode - )?.first, - expectedMode.rawValue, - file: file, - line: line - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - (String, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first?.0, - audioSession.category, - file: file, - line: line - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - (String, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first?.1, - expectedCategoryOptions, - file: file, - line: line - ) - XCTAssertEqual( - audioSession.recordedInputPayload( - AVAudioSession.PortOverride.self, - for: .overrideOutputAudioPort - )?.first, - expectedOverrideOutputAudioPort, - file: file, - line: line - ) - } -} - -final class MockStreamAudioSessionAdapterDelegate: StreamAudioSessionAdapterDelegate, @unchecked Sendable { - private(set) var audioSessionAdapterDidUpdateCallSettingsWithCallSettings: CallSettings? - func audioSessionAdapterDidUpdateCallSettings( - _ adapter: StreamAudioSessionAdapter, - callSettings: CallSettings - ) { - audioSessionAdapterDidUpdateCallSettingsWithCallSettings = callSettings - } -} diff --git a/StreamVideoTests/Utils/AudioSession/StreamAudioSession_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamAudioSession_Tests.swift new file mode 100644 index 000000000..15049b474 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/StreamAudioSession_Tests.swift @@ -0,0 +1,254 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class StreamAudioSession_Tests: XCTestCase { + + private lazy var disposableBag: DisposableBag! = .init() + private lazy var mockAudioSession: MockAudioSession! = .init() + private lazy var mockPolicy: MockAudioSessionPolicy! = .init() + private lazy var subject: StreamAudioSession! = .init( + policy: mockPolicy, + audioSession: mockAudioSession + ) + + override func tearDown() { + subject.dismantle() + subject = nil + disposableBag.removeAll() + mockAudioSession = nil + mockPolicy = nil + super.tearDown() + } + + // MARK: - init + + func test_init_configuresManualAudioAndEnablesAudioByDefault() throws { + _ = subject + + XCTAssertTrue(mockAudioSession.useManualAudio) + XCTAssertTrue(mockAudioSession.isAudioEnabled) + } + + func test_init_currentValueWasSet() { + _ = subject + + XCTAssertTrue(StreamAudioSession.currentValue === subject) + } + + // MARK: - didUpdateOwnCapabilities(_:) + + func test_didUpdateOwnCapabilities_policyWasCalled() async throws { + let ownCapabilities: Set = [.createCall] + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.didUpdateOwnCapabilities(ownCapabilities) + }, expectedInput: [(subject.activeCallSettings, ownCapabilities)]) + } + + func test_didUpdateOwnCapabilities_withoutAnyChanges_policyWasCalledTwice() async throws { + let ownCapabilities: Set = [.createCall] + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.didUpdateOwnCapabilities(ownCapabilities) + try await self.subject.didUpdateOwnCapabilities(ownCapabilities) + }, expectedInput: [ + (subject.activeCallSettings, ownCapabilities), + (subject.activeCallSettings, ownCapabilities) + ]) + } + + // MARK: - didUpdateCallSettings(_:) + + func test_didUpdateCallSettings_policyWasCalled() async throws { + let callSettings = CallSettings(speakerOn: false) + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.didUpdateCallSettings(callSettings) + }, expectedInput: [(callSettings, [])]) + } + + func test_didUpdateCallSettings_withoutAnyChanges_policyWasCalledTwice() async throws { + let callSettings = CallSettings(speakerOn: false) + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.didUpdateCallSettings(callSettings) + try await self.subject.didUpdateCallSettings(callSettings) + }, expectedInput: [ + (callSettings, []), + (callSettings, []) + ]) + } + + func test_didUpdateCallSettings_policyReturnsNoOverrideOutputPortWithCategoryPlayAndRecord_overrideOutputAudioPortWasCalledWithNone( + ) async throws { + mockPolicy.stub( + for: .configuration, + with: AudioSessionConfiguration( + category: .ambient, + mode: .default, + options: [.allowAirPlay] + ) + ) + mockAudioSession.category = .playAndRecord + + try await subject.didUpdateCallSettings(.init(audioOn: false)) + + XCTAssertEqual(mockAudioSession.timesCalled(.setCategory), 1) + let payload = try XCTUnwrap( + mockAudioSession.recordedInputPayload( + (AVAudioSession.Category, AVAudioSession.Mode, AVAudioSession.CategoryOptions).self, + for: .setCategory + )?.first + ) + XCTAssertEqual(payload.0, .ambient) + XCTAssertEqual(payload.1, .default) + XCTAssertEqual(payload.2, [.allowAirPlay]) + } + + func test_didUpdateCallSettings_policyReturnsConfiguration_audioSessionWasCalledWithExpectedConfiguration() async throws { + mockPolicy.stub( + for: .configuration, + with: AudioSessionConfiguration( + category: .ambient, + mode: .default, + options: [] + ) + ) + mockAudioSession.category = .playAndRecord + + try await subject.didUpdateCallSettings(.init(audioOn: false)) + + XCTAssertEqual(mockAudioSession.timesCalled(.overrideOutputAudioPort), 1) + let payload = try XCTUnwrap( + mockAudioSession + .recordedInputPayload(AVAudioSession.PortOverride.self, for: .overrideOutputAudioPort)?.first + ) + XCTAssertEqual(payload, .none) + } + + func test_didUpdateCallSettings_policyReturnsConfigurationWithOverrideOutputAudioPort_audioSessionWasCalledWithExpectedOverrideOutputAudioPort( + ) async throws { + mockPolicy.stub( + for: .configuration, + with: AudioSessionConfiguration( + category: .playAndRecord, + mode: .default, + options: [], + overrideOutputAudioPort: .speaker + ) + ) + + try await subject.didUpdateCallSettings(.init(audioOn: false)) + + XCTAssertEqual(mockAudioSession.timesCalled(.overrideOutputAudioPort), 1) + let payload = try XCTUnwrap( + mockAudioSession + .recordedInputPayload(AVAudioSession.PortOverride.self, for: .overrideOutputAudioPort)?.first + ) + XCTAssertEqual(payload, .speaker) + } + + func test_didUpdateCallSettings_policyReturnsSameConfigurationAsPreviously_audioSessionWasNotCalled() async throws { + mockPolicy.stub( + for: .configuration, + with: AudioSessionConfiguration( + category: .playAndRecord, + mode: .default, + options: [], + overrideOutputAudioPort: .speaker + ) + ) + + try await subject.didUpdateCallSettings(.init(audioOn: false)) + try await subject.didUpdateCallSettings(.init(audioOn: false)) + + XCTAssertEqual(mockAudioSession.timesCalled(.setCategory), 1) + } + + // MARK: - didUpdatePolicy(_:) + + func test_didUpdatePolicy_policyWasCalled() async throws { + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.didUpdatePolicy(self.mockPolicy) + }, expectedInput: [(subject.activeCallSettings, subject.ownCapabilities)]) + } + + func test_didUpdatePolicy_withoutAnyChanges_policyWasCalledTwice() async throws { + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.didUpdatePolicy(self.mockPolicy) + try await self.subject.didUpdatePolicy(self.mockPolicy) + }, expectedInput: [ + (subject.activeCallSettings, subject.ownCapabilities), + (subject.activeCallSettings, subject.ownCapabilities) + ]) + } + + // MARK: - prepareForRecording + + func test_prepareForRecording_whenAudioOff_setsAudioOn_andCallsSetCategory() async throws { + subject = .init( + callSettings: .init(audioOn: false), + policy: mockPolicy, + audioSession: mockAudioSession + ) + + try await assertConfigurationWasCalledOnPolicy({ + try await self.subject.prepareForRecording() + }, expectedInput: [ + (.init(audioOn: true), subject.ownCapabilities) + ]) + XCTAssertTrue(subject.activeCallSettings.audioOn) + } + + func test_prepareForRecording_whenAudioAlreadyOn_doesNotCallSetCategory() async throws { + subject = .init( + callSettings: .init(audioOn: true), + policy: mockPolicy, + audioSession: mockAudioSession + ) + + try await subject.prepareForRecording() + + XCTAssertTrue(subject.activeCallSettings.audioOn) + XCTAssertEqual(mockPolicy.timesCalled(.configuration), 0) + } + + // MARK: - requestRecordPermission + + func test_requestRecordPermission_whenNotRecording_callsMockAudioSession() async { + _ = await subject.requestRecordPermission() + + XCTAssertEqual(mockAudioSession.timesCalled(.requestRecordPermission), 1) + } + + func test_requestRecordPermission_whenIsRecording_doesNotCallSession() async { + subject.isRecording = true + _ = await subject.requestRecordPermission() + + XCTAssertEqual(mockAudioSession.timesCalled(.requestRecordPermission), 0) + } + + // MARK: - dismantle + + func test_dismantle_resetsGlobalCurrentValue() { + subject.dismantle() + + XCTAssertNil(StreamAudioSession.currentValue) + } + + // MARK: - Private Helpers + + private func assertConfigurationWasCalledOnPolicy( + _ trigger: @escaping () async throws -> Void, + expectedInput: @autoclosure () -> [(CallSettings, Set)] + ) async throws { + try await trigger() + XCTAssertEqual(mockPolicy.timesCalled(.configuration), expectedInput().endIndex) + let payloads = try XCTUnwrap(mockPolicy.recordedInputPayload((CallSettings, Set).self, for: .configuration)) + XCTAssertEqual(payloads.map(\.0), expectedInput().map(\.0)) + XCTAssertEqual(payloads.map(\.1), expectedInput().map(\.1)) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift index 65cb9988b..66fa61c3a 100644 --- a/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift +++ b/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift @@ -6,201 +6,164 @@ import StreamWebRTC import XCTest -final class StreamRTCAudioSession_Tests: XCTestCase { - - // MARK: - Lazy Properties +import AVFoundation +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest - private var rtcAudioSession: RTCAudioSession! = .sharedInstance() - private lazy var subject: StreamRTCAudioSession! = StreamRTCAudioSession() +final class StreamRTCAudioSessionTests: XCTestCase { - // MARK: - Lifecycle + private lazy var subject: StreamRTCAudioSession! = .init() + private lazy var rtcAudioSession: RTCAudioSession! = .sharedInstance() + private var cancellables: Set! = [] - override func tearDown() { + override func tearDown() async throws { + cancellables = nil subject = nil rtcAudioSession = nil - super.tearDown() + try await super.tearDown() } - // MARK: - isActive + // MARK: - Initialization - func test_isActive_returnsCorrectState() throws { - // Given - XCTAssertEqual(subject.isActive, rtcAudioSession.isActive) + func test_init_setsInitialState() { + XCTAssertEqual(subject.state.category.rawValue, rtcAudioSession.category) + XCTAssertEqual(subject.state.mode.rawValue, rtcAudioSession.mode) + XCTAssertEqual(subject.state.options, rtcAudioSession.categoryOptions) + XCTAssertEqual(subject.state.overrideOutputPort, .none) + } - // When - rtcAudioSession.lockForConfiguration() - try rtcAudioSession.setActive(true) - rtcAudioSession.unlockForConfiguration() + // MARK: - setCategory - // Then - XCTAssertTrue(rtcAudioSession.isActive) - XCTAssertEqual(subject.isActive, rtcAudioSession.isActive) - } + func test_setCategory_whenNoChangesNeeded_thenDoesNotUpdateState() async throws { + let initialState = subject.state - // MARK: - currentRoute + try await subject.setCategory( + initialState.category, + mode: initialState.mode, + with: initialState.options + ) - func test_currentRoute_returnsCorrectRoute() { - XCTAssertEqual(subject.currentRoute.inputs.map(\.portType), rtcAudioSession.currentRoute.inputs.map(\.portType)) - XCTAssertEqual(subject.currentRoute.outputs.map(\.portType), rtcAudioSession.currentRoute.outputs.map(\.portType)) + XCTAssertEqual(subject.state, initialState) } - // MARK: - category + func test_setCategory_whenCategoryChanges_thenUpdatesState() async throws { + let newCategory: AVAudioSession.Category = .playback + let initialState = subject.state - func test_category_returnsCorrectCategory() throws { - rtcAudioSession.lockForConfiguration() - try rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord) - rtcAudioSession.unlockForConfiguration() + try await subject.setCategory( + newCategory, + mode: initialState.mode, + with: initialState.options + ) - // Then - XCTAssertEqual(subject.category, rtcAudioSession.category) + XCTAssertEqual(subject.state.category, newCategory) + XCTAssertEqual(subject.state.mode, initialState.mode) + XCTAssertEqual(subject.state.options, initialState.options) } - // MARK: - isUsingSpeakerOutput + func test_setCategory_whenModeChanges_thenUpdatesState() async throws { + let newMode: AVAudioSession.Mode = .videoChat + let initialState = subject.state - func test_isUsingSpeakerOutput_returnsCorrectValue() throws { - // Given - rtcAudioSession.lockForConfiguration() - try rtcAudioSession.overrideOutputAudioPort(.speaker) - rtcAudioSession.unlockForConfiguration() - - // When - let isUsingSpeakerOutput = subject.isUsingSpeakerOutput + try await subject.setCategory( + initialState.category, + mode: newMode, + with: initialState.options + ) - // Then - XCTAssertTrue(isUsingSpeakerOutput) + XCTAssertEqual(subject.state.category, initialState.category) + XCTAssertEqual(subject.state.mode, newMode) + XCTAssertEqual(subject.state.options, initialState.options) } - // MARK: - useManualAudio + func test_setCategory_whenOptionsChange_thenUpdatesState() async throws { + let newOptions: AVAudioSession.CategoryOptions = .mixWithOthers + let initialState = subject.state - func test_useManualAudio_setAndGet() { - // When - subject.useManualAudio = true + try await subject.setCategory( + initialState.category, + mode: initialState.mode, + with: newOptions + ) - // Then - XCTAssertTrue(rtcAudioSession.useManualAudio) - XCTAssertEqual(subject.useManualAudio, rtcAudioSession.useManualAudio) + XCTAssertEqual(subject.state.category, initialState.category) + XCTAssertEqual(subject.state.mode, initialState.mode) + XCTAssertEqual(subject.state.options, newOptions) } - // MARK: - isAudioEnabled + func test_setCategory_thenUpdatesWebRTCConfiguration() async throws { + let newOptions: AVAudioSession.CategoryOptions = .mixWithOthers - func test_isAudioEnabled_setAndGet() { - // When - subject.isAudioEnabled = true + try await subject.setCategory( + .soloAmbient, + mode: .default, + with: newOptions + ) - // Then - XCTAssertTrue(rtcAudioSession.isAudioEnabled) - XCTAssertEqual(subject.isAudioEnabled, rtcAudioSession.isAudioEnabled) + let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC() + XCTAssertEqual(subject.state.category.rawValue, webRTCConfiguration.category) + XCTAssertEqual(subject.state.mode.rawValue, webRTCConfiguration.mode) + XCTAssertEqual(subject.state.options, webRTCConfiguration.categoryOptions) } - // MARK: - addDelegate - - func test_addDelegate() throws { - final class MockRTCAudioSessionDelegate: NSObject, RTCAudioSessionDelegate { - private(set) var didSetActiveWasCalled: Bool = false - func audioSession(_ audioSession: RTCAudioSession, didSetActive active: Bool) { didSetActiveWasCalled = true } - } + // MARK: - overrideOutputAudioPort - // Given - let delegate = MockRTCAudioSessionDelegate() - subject.add(delegate) + func test_overrideOutputAudioPort_whenCategoryIsNotPlayAndRecord_thenDoesNotUpdateState() async throws { + try await subject.setCategory(.playback, mode: .default, with: []) + let initialState = subject.state - // When - rtcAudioSession.lockForConfiguration() - try rtcAudioSession.setActive(true) - rtcAudioSession.unlockForConfiguration() + try await subject.overrideOutputAudioPort(.speaker) - // Then - XCTAssertTrue(delegate.didSetActiveWasCalled) + XCTAssertEqual(subject.state, initialState) } - // MARK: - setMode + func test_overrideOutputAudioPort_whenPortIsSameAsCurrent_thenDoesNotUpdateState() async throws { + try await subject.setCategory(.playAndRecord, mode: .default, with: []) + try await subject.overrideOutputAudioPort(.speaker) + let initialState = subject.state - func test_setMode_modeUpdatedOnAudioSession() throws { - // Given - rtcAudioSession.lockForConfiguration() - try subject.setMode(AVAudioSession.Mode.videoChat.rawValue) - rtcAudioSession.unlockForConfiguration() + try await subject.overrideOutputAudioPort(.speaker) - // Then - XCTAssertEqual(rtcAudioSession.mode, AVAudioSession.Mode.videoChat.rawValue) + XCTAssertEqual(subject.state, initialState) } - // MARK: - setCategory + func test_overrideOutputAudioPort_whenValidChange_thenUpdatesState() async throws { + try await subject.setCategory(.playAndRecord, mode: .default, with: []) - func test_setCategory_categoryUpdatedOnAudioSession() throws { - // Given - rtcAudioSession.lockForConfiguration() - try subject.setCategory( - AVAudioSession.Category.playAndRecord.rawValue, - with: [.allowBluetooth] - ) - rtcAudioSession.unlockForConfiguration() + try await subject.overrideOutputAudioPort(.speaker) - // Then - XCTAssertEqual( - rtcAudioSession.category, - AVAudioSession.Category.playAndRecord.rawValue - ) - XCTAssertEqual( - rtcAudioSession.categoryOptions, - [.allowBluetooth] - ) + XCTAssertEqual(subject.state.overrideOutputPort, .speaker) } - // MARK: - setActive + // MARK: - Properties - func test_setActive_isActiveUpdatedOnAudioSession() throws { - // Given - rtcAudioSession.lockForConfiguration() - try subject.setActive(true) - rtcAudioSession.unlockForConfiguration() - - // Then - XCTAssertTrue(rtcAudioSession.isActive) + func test_isActive_returnsSourceValue() { + XCTAssertEqual(subject.isActive, rtcAudioSession.isActive) } - // MARK: - setConfiguration - - func test_setConfiguration_configurationUpdatedOnAudioSession() throws { - // Given - rtcAudioSession.lockForConfiguration() - let configuration = RTCAudioSessionConfiguration() - configuration.category = AVAudioSession.Category.playAndRecord.rawValue - configuration.categoryOptions = [.allowBluetooth] - configuration.mode = AVAudioSession.Mode.videoChat.rawValue - try subject.setConfiguration(configuration) - rtcAudioSession.unlockForConfiguration() - - // Then - XCTAssertEqual(rtcAudioSession.mode, AVAudioSession.Mode.videoChat.rawValue) - XCTAssertEqual( - rtcAudioSession.category, - AVAudioSession.Category.playAndRecord.rawValue - ) - XCTAssertEqual( - rtcAudioSession.categoryOptions, - [.allowBluetooth] - ) + func test_currentRoute_returnsSourceValue() { + XCTAssertEqual(subject.currentRoute, rtcAudioSession.currentRoute) } - // MARK: - updateConfiguration + func test_category_returnsStateCategory() { + XCTAssertEqual(subject.category, subject.state.category) + } - func test_updateConfiguration_executesBlockOnQueue() { - // Given - let expectation = self.expectation(description: "Configuration updated") + func test_useManualAudio_whenSet_updatesSourceValue() { + subject.useManualAudio = true + XCTAssertTrue(rtcAudioSession.useManualAudio) - // When - subject.updateConfiguration( - functionName: #function, - file: #file, - line: #line - ) { session in - try session.setMode(AVAudioSession.Mode.videoChat.rawValue) - expectation.fulfill() - } + subject.useManualAudio = false + XCTAssertFalse(rtcAudioSession.useManualAudio) + } - wait(for: [expectation], timeout: defaultTimeout) + func test_isAudioEnabled_whenSet_updatesSourceValue() { + subject.isAudioEnabled = true + XCTAssertTrue(rtcAudioSession.isAudioEnabled) - XCTAssertEqual(rtcAudioSession.mode, AVAudioSession.Mode.videoChat.rawValue) + subject.isAudioEnabled = false + XCTAssertFalse(rtcAudioSession.isAudioEnabled) } } diff --git a/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift b/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift index 51c257f8e..5085bfb19 100644 --- a/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift +++ b/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift @@ -11,6 +11,7 @@ final class StreamAudioRecorderTests: XCTestCase { private lazy var builder: AVAudioRecorderBuilder! = .init(cachedResult: mockAudioRecorder) private lazy var mockAudioSession: MockAudioSession! = .init() + private lazy var audioSession: StreamAudioSession! = .init(audioSession: mockAudioSession) private lazy var mockActiveCallProvider: MockStreamActiveCallProvider! = .init() private var mockAudioRecorder: MockAudioRecorder! private lazy var subject: StreamCallAudioRecorder! = .init(audioRecorderBuilder: builder) @@ -18,7 +19,7 @@ final class StreamAudioRecorderTests: XCTestCase { override func setUp() async throws { try await super.setUp() StreamActiveCallProviderKey.currentValue = mockActiveCallProvider - StreamActiveCallAudioSessionKey.currentValue = mockAudioSession + _ = audioSession mockAudioRecorder = try .init( url: URL(string: "test.wav")!, settings: AVAudioRecorderBuilder.defaultRecordingSettings @@ -118,7 +119,7 @@ final class StreamAudioRecorderTests: XCTestCase { mockAudioSession.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() - + await setUpHasActiveCall(false) try await assertRecording(false) @@ -184,7 +185,7 @@ final class StreamAudioRecorderTests: XCTestCase { // Mocks for unit testing -private class MockAudioRecorder: AVAudioRecorder { +private class MockAudioRecorder: AVAudioRecorder, @unchecked Sendable { private var _isRecoding = false override var isRecording: Bool { _isRecoding } @@ -213,16 +214,3 @@ private class MockStreamActiveCallProvider: StreamActiveCallProviding { _activeCallSubject.eraseToAnyPublisher() } } - -extension XCTestCase { - - func XCTAsyncUnwrap( - _ expression: @autoclosure () async throws -> T?, - _ message: @autoclosure () -> String = "", - file: StaticString = #filePath, - line: UInt = #line - ) async throws -> T { - let expressionResult = try await expression() - return try XCTUnwrap(expressionResult, message(), file: file, line: line) - } -} diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift index 2fa653c95..14567ca85 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift @@ -14,7 +14,7 @@ final class AudioMediaAdapter_Tests: XCTestCase { private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var spySubject: PassthroughSubject! = .init() private lazy var mockMediaAdapter: MockLocalMediaAdapter! = .init() - private lazy var audioSession: StreamAudioSessionAdapter! = .init() + private lazy var audioSession: StreamAudioSession! = .init() private lazy var subject: AudioMediaAdapter! = .init( sessionID: sessionId, peerConnection: mockPeerConnection, diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift index 1bc11899d..1e59dfe4e 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift @@ -18,7 +18,7 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase, @unchecked Sendable { private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var mockSFUStack: MockSFUStack! = .init() private lazy var audioSession: MockAudioSession! = .init() - private lazy var audioSessionAdapter: StreamAudioSessionAdapter! = .init(audioSession) + private lazy var audioSessionAdapter: StreamAudioSession! = .init(audioSession: audioSession) private lazy var spySubject: PassthroughSubject! = .init() private lazy var subject: LocalAudioMediaAdapter! = .init( sessionID: sessionId, diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift index 2effe993c..547e90553 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift @@ -14,7 +14,7 @@ final class RTCPeerConnectionCoordinator_Tests: XCTestCase { private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var mockSFUStack: MockSFUStack! = .init() - private lazy var audioSession: StreamAudioSessionAdapter! = .init() + private lazy var audioSession: StreamAudioSession! = .init() private lazy var spySubject: PassthroughSubject! = .init() private lazy var mockLocalMediaAdapterA: MockLocalMediaAdapter! = .init() private lazy var mockLocalMediaAdapterB: MockLocalMediaAdapter! = .init() diff --git a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift index 1004690d3..2fb5f6b15 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift @@ -40,7 +40,9 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { // MARK: - audioSession func test_audioSession_delegateWasSetAsExpected() async throws { - await assertTrueAsync(await subject.audioSession.delegate === subject) + await fulfillment { + await self.subject.audioSession.delegate === self.subject + } } // MARK: - setSessionID