Skip to content

Commit

Permalink
Apply fixes for when a device has no earPiece
Browse files Browse the repository at this point in the history
  • Loading branch information
ipavlidakis committed Feb 6, 2025
1 parent 4b7af4a commit fb7ad4a
Show file tree
Hide file tree
Showing 8 changed files with 109 additions and 67 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ public protocol AudioSessionProtocol: AnyObject {
/// A Boolean value indicating whether audio is enabled for the session.
var isAudioEnabled: Bool { get set }

var hasEarpiece: Bool { get }

/// Adds a delegate to receive updates about audio session events.
/// - Parameter delegate: The delegate conforming to `RTCAudioSessionDelegate`.
func add(_ delegate: RTCAudioSessionDelegate)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
//
// Copyright © 2025 Stream.io Inc. All rights reserved.
//

import AVFoundation
import Foundation
import StreamWebRTC

extension CallSettings {

var audioSessionConfiguration: RTCAudioSessionConfiguration {
let category: AVAudioSession.Category = audioOn == true
|| speakerOn == true
|| videoOn == true
? .playAndRecord
: .playback

let mode: AVAudioSession.Mode = category == .playAndRecord
? speakerOn == true ? .videoChat : .voiceChat
: .default

let categoryOptions: AVAudioSession.CategoryOptions = category == .playAndRecord
? .playAndRecord
: .playback

let result = RTCAudioSessionConfiguration.webRTC()
result.category = category.rawValue
result.mode = mode.rawValue
result.categoryOptions = categoryOptions

return result
}
}

extension RTCAudioSessionConfiguration: @unchecked Sendable {

override open var description: String {
[
"RTCAudioSessionConfiguration",
"(",
[
"category:\(AVAudioSession.Category(rawValue: category))",
"mode:\(AVAudioSession.Mode(rawValue: mode))",
"categoryOptions:\(categoryOptions)"
].joined(separator: ", "),
")"
].joined()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,6 @@

import StreamWebRTC

extension RTCAudioSessionConfiguration {
/// Provides a default configuration for `RTCAudioSessionConfiguration`
/// tailored for WebRTC audio sessions, setting it to be suitable for
/// both playback and recording.
static let `default`: RTCAudioSessionConfiguration = {
// Creates a new WebRTC-specific audio session configuration instance.
let configuration = RTCAudioSessionConfiguration.webRTC()

// Sets the audio mode to the default system mode. This typically
// configures the session to use system default settings for
// playback and recording.
configuration.mode = AVAudioSession.Mode.videoChat.rawValue

// Sets the audio category to `.playAndRecord`, enabling the session
// to handle both audio playback and recording simultaneously.
// This category is commonly used in applications that require
// two-way audio, like video calls.
configuration.category = AVAudioSession.Category.playAndRecord.rawValue

configuration.categoryOptions = .playAndRecord

// Returns the fully configured default WebRTC audio session
// configuration.
return configuration
}()
}

extension AVAudioSession.CategoryOptions {

static var playAndRecord: AVAudioSession.CategoryOptions = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
/// that manages WebRTC audio settings.
private let audioSession: AudioSessionProtocol
private let serialQueue = SerialActorQueue()
private var hasBeenConfigured = false

/// The current active call settings, or `nil` if no active call is in session.
@Atomic private(set) var activeCallSettings: CallSettings?
@Atomic private(set) var activeCallSettings: CallSettings

private let canRecordSubject = PassthroughSubject<Bool, Never>()
var canRecordPublisher: AnyPublisher<Bool, Never> { canRecordSubject.eraseToAnyPublisher() }
Expand All @@ -34,8 +35,12 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
/// for WebRTC.w
/// - Parameter audioSession: An `AudioSessionProtocol` instance. Defaults
/// to `StreamRTCAudioSession`.
required init(_ audioSession: AudioSessionProtocol = StreamRTCAudioSession()) {
required init(
_ audioSession: AudioSessionProtocol = StreamRTCAudioSession(),
callSettings: CallSettings
) {
self.audioSession = audioSession
activeCallSettings = callSettings
super.init()

/// Update the active call's `audioSession` to make available to other components.
Expand All @@ -44,24 +49,9 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
audioSession.add(self)
audioSession.useManualAudio = true
audioSession.isAudioEnabled = true

let configuration = RTCAudioSessionConfiguration.default
serialQueue.async {
await audioSession.updateConfiguration(
functionName: #function,
file: #fileID,
line: #line
) {
try $0.setConfiguration(configuration)
log.debug(
"AudioSession updated \(configuration)",
subsystems: .audioSession
)
}
}
}

func dismantle() {
nonisolated func dismantle() {
if StreamActiveCallAudioSessionKey.currentValue === self {
// Reset activeCall audioSession.
StreamActiveCallAudioSessionKey.currentValue = nil
Expand All @@ -77,19 +67,19 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
) {
let oldValue = activeCallSettings
activeCallSettings = settings
didUpdate(settings, oldValue: activeCallSettings)
didUpdate(settings, oldValue: oldValue)
}

func prepareForRecording() {
guard let activeCallSettings, !activeCallSettings.audioOn else {
guard !activeCallSettings.audioOn else {
return
}

let settings = activeCallSettings
.withUpdatedAudioState(true)
let oldValue = activeCallSettings
self.activeCallSettings = settings
didUpdate(settings, oldValue: activeCallSettings)
activeCallSettings = settings
didUpdate(settings, oldValue: oldValue)
}

func requestRecordPermission() async -> Bool {
Expand Down Expand Up @@ -127,10 +117,6 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
subsystems: .audioSession
)

guard let activeCallSettings else {
return
}

guard session.hasEarpiece else {
if activeCallSettings.speakerOn != session.currentRoute.isSpeaker {
delegate?.audioSessionAdapterDidUpdateCallSettings(
Expand Down Expand Up @@ -162,8 +148,28 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche

// MARK: - Private helpers

private func configureAudioSession(settings: CallSettings) async {
let configuration = settings.audioSessionConfiguration
await audioSession.updateConfiguration(
functionName: #function,
file: #fileID,
line: #line
) { [weak self] in
guard let self else {
return
}

try $0.setConfiguration(configuration)
hasBeenConfigured = true
log.debug(
"AudioSession was configured with \(configuration)",
subsystems: .audioSession
)
}
}

private func didUpdate(
_ callSettings: CallSettings?,
_ callSettings: CallSettings,
oldValue: CallSettings?,
file: StaticString = #file,
functionName: StaticString = #function,
Expand All @@ -174,7 +180,12 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
return
}

if callSettings?.audioOn == false, oldValue?.audioOn == true {
guard hasBeenConfigured else {
await configureAudioSession(settings: callSettings)
return
}

if callSettings.audioOn == false, oldValue?.audioOn == true {
log.debug(
"Will defer execution until recording has stopped.",
subsystems: .audioSession,
Expand All @@ -185,21 +196,22 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
await deferExecutionUntilRecordingIsStopped()
}

let category: AVAudioSession.Category = callSettings?.audioOn == true || callSettings?
.speakerOn == true || callSettings?.videoOn == true
let category: AVAudioSession.Category = callSettings.audioOn
|| callSettings.speakerOn
|| callSettings.videoOn
? .playAndRecord
: .playback

let mode: AVAudioSession.Mode = category == .playAndRecord
? callSettings?.speakerOn == true ? .videoChat : .voiceChat
? callSettings.speakerOn == true ? .videoChat : .voiceChat
: .default

let categoryOptions: AVAudioSession.CategoryOptions = category == .playAndRecord
? .playAndRecord
: .playback

let overridePort: AVAudioSession.PortOverride? = category == .playAndRecord
? callSettings?.speakerOn == true ? .speaker : AVAudioSession.PortOverride.none
? callSettings.speakerOn == true ? .speaker : AVAudioSession.PortOverride.none
: nil

await audioSession.updateConfiguration(
Expand Down Expand Up @@ -234,7 +246,7 @@ final class StreamAudioSessionAdapter: NSObject, RTCAudioSessionDelegate, @unche
}

log.debug(
"AudioSession updated with callSettings: \(callSettings?.description ?? "nil")",
"AudioSession updated with callSettings: \(callSettings.description)",
subsystems: .audioSession,
functionName: functionName,
fileName: file,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ final class StreamRTCAudioSession: AudioSessionProtocol {
/// an external output, like Bluetooth or headphones.
var isUsingExternalOutput: Bool { currentRoute.isExternal }

var hasEarpiece: Bool { source.hasEarpiece }

/// A Boolean value indicating whether the audio session uses manual
/// audio routing.
var useManualAudio: Bool {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -370,12 +370,6 @@ extension WebRTCCoordinator.StateMachine.Stage {
return
}

context
.coordinator?
.stateAdapter
.audioSession
.didUpdateCallSettings(callSettings)

try await publisher.didUpdateCallSettings(callSettings)
log.debug("Publisher and AudioSession callSettings updated.", subsystems: .webRTC)
} catch {
Expand Down
8 changes: 7 additions & 1 deletion Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {
let peerConnectionFactory: PeerConnectionFactory
let videoCaptureSessionProvider: VideoCaptureSessionProvider
let screenShareSessionProvider: ScreenShareSessionProvider
let audioSession: StreamAudioSessionAdapter = .init()
let audioSession: StreamAudioSessionAdapter = .init(callSettings: .init())

/// Published properties that represent different parts of the WebRTC state.
@Published private(set) var sessionID: String = UUID().uuidString
Expand Down Expand Up @@ -119,6 +119,12 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate {
self.screenShareSessionProvider = screenShareSessionProvider

audioSession.delegate = self
Task {
await $callSettings
.removeDuplicates()
.sink { [weak audioSession] in audioSession?.didUpdateCallSettings($0) }
.store(in: disposableBag)
}
}

deinit {
Expand Down
4 changes: 4 additions & 0 deletions StreamVideo.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,7 @@
4049CE842BBBF8EF003D07D2 /* StreamAsyncImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4049CE832BBBF8EF003D07D2 /* StreamAsyncImage.swift */; };
404A5CFB2AD5648100EF1C62 /* DemoChatModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404A5CFA2AD5648100EF1C62 /* DemoChatModifier.swift */; };
404AD5202D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404AD51F2D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift */; };
404AD5222D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404AD5212D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift */; };
404C27CB2BF2552800DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; };
404C27CC2BF2552900DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; };
404CAEE72B8F48F6007087BC /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; };
Expand Down Expand Up @@ -1669,6 +1670,7 @@
4049CE832BBBF8EF003D07D2 /* StreamAsyncImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAsyncImage.swift; sourceTree = "<group>"; };
404A5CFA2AD5648100EF1C62 /* DemoChatModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatModifier.swift; sourceTree = "<group>"; };
404AD51F2D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+HasEarpiece.swift"; sourceTree = "<group>"; };
404AD5212D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CallSettings+RTCAudioSessionConfiguration.swift"; sourceTree = "<group>"; };
4059C3412AAF0CE40006928E /* DemoChatViewModel+Injection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "DemoChatViewModel+Injection.swift"; sourceTree = "<group>"; };
406128802CF32FEF007F5CDC /* SDPLineVisitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SDPLineVisitor.swift; sourceTree = "<group>"; };
406128822CF33000007F5CDC /* SDPParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SDPParser.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -3427,6 +3429,7 @@
40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */,
40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */,
404AD51F2D5378BA00A820A4 /* AVAudioSession+HasEarpiece.swift */,
404AD5212D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift */,
);
path = Extensions;
sourceTree = "<group>";
Expand Down Expand Up @@ -7209,6 +7212,7 @@
84DC38B829ADFCFD00946713 /* UpdateUserPermissionsResponse.swift in Sources */,
84DC38C029ADFCFD00946713 /* UserRequest.swift in Sources */,
84DC389629ADFCFD00946713 /* EndCallResponse.swift in Sources */,
404AD5222D53807B00A820A4 /* CallSettings+RTCAudioSessionConfiguration.swift in Sources */,
847BE09C29DADE0100B55D21 /* Call.swift in Sources */,
848CCCEF2AB8ED8F002E83A2 /* ThumbnailsSettings.swift in Sources */,
40C4DF4D2C1C2CD80035DBC2 /* DefaultParticipantAutoLeavePolicy.swift in Sources */,
Expand Down

0 comments on commit fb7ad4a

Please sign in to comment.