Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Fix]AudioSession management via CallSettings #585

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- You can now provide the preferred Video stream codec to use [#583](https://github.com/GetStream/stream-video-swift/pull/583)

### 🐞 Fixed
- Toggling the speaker during a call wasn't always working. [#585](https://github.com/GetStream/stream-video-swift/pull/585)
- In some cases when joining a call setup wasn't completed correctly which lead in issues during the call (e.g. missing video tracks or mute state not updating). [#586](https://github.com/GetStream/stream-video-swift/pull/586)

# [1.13.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.13.0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,9 +196,6 @@ open class StreamCallAudioRecorder: @unchecked Sendable {
}

private func setUpAudioCaptureIfRequired() async throws -> AVAudioRecorder {
try audioSession.setCategory(.playAndRecord)
try audioSession.setActive(true, options: [])

guard
await audioSession.requestRecordPermission()
else {
Expand Down
253 changes: 213 additions & 40 deletions Sources/StreamVideo/WebRTC/AudioSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,65 +2,233 @@
// Copyright © 2024 Stream.io Inc. All rights reserved.
//

import AVFoundation
import Combine
import Foundation
import StreamWebRTC

extension RTCAudioSessionConfiguration: @unchecked Sendable {}
/// The `AudioSession` class manages the device's audio session for an application,
/// providing control over activation, mode configuration, and routing to speakers or in-ear speakers.
final class AudioSession {

actor AudioSession {

private let rtcAudioSession: RTCAudioSession = RTCAudioSession.sharedInstance()
private let speakerQueue = UnfairQueue()
private var _isSpeakerOn: Bool = false
private var isSpeakerOn: Bool {
get { speakerQueue.sync { _isSpeakerOn } }
set { speakerQueue.sync { _isSpeakerOn = newValue } }
}

private let isActiveQueue = UnfairQueue()
private var _isActive: Bool = false
var isActive: Bool {
get { isActiveQueue.sync { _isActive } }
set { isActiveQueue.sync { _isActive = newValue } }
}

private let audioSession = RTCAudioSession.sharedInstance()
private var configuration = RTCAudioSessionConfiguration.default

private var activeCallSettings: CallSettings?
private var routeChangeCancellable: AnyCancellable?
private let defaultCategoryOptions: AVAudioSession.CategoryOptions = [
.allowBluetooth,
.allowBluetoothA2DP
]

weak var delegate: AudioSessionDelegate?

init() {
audioSession.useManualAudio = true
audioSession.isAudioEnabled = true
configuration.categoryOptions = defaultCategoryOptions
configureRouteChangeListener()
do {
try audioSession.setCategory(.playAndRecord)
} catch {
log.error("Failed to set audio session category for playback and recording.", subsystems: .webRTC)
}
}

func didUpdate(_ callSettings: CallSettings) throws {
guard callSettings != activeCallSettings else { return }

if !isActive, callSettings.audioOutputOn {
let mode: AVAudioSession.Mode = callSettings.speakerOn ? .videoChat : .voiceChat
try activate(mode: mode)
try toggleSpeaker(callSettings.speakerOn)
} else if isActive, callSettings.audioOutputOn {
let mode: AVAudioSession.Mode = callSettings.speakerOn ? .videoChat : .voiceChat
try activate(mode: mode)
try toggleSpeaker(callSettings.speakerOn)
} else if isActive, !callSettings.audioOutputOn {
try deactivate()
} else {
/* No-op */
}

var isActive: Bool { rtcAudioSession.isActive }
var isAudioEnabled: Bool { rtcAudioSession.isAudioEnabled }
var isSpeakerOn: Bool { rtcAudioSession.categoryOptions.contains(.defaultToSpeaker) }
activeCallSettings = callSettings
log.debug(
"AudioSession updated with \(callSettings).",
subsystems: .webRTC
)
}

private func activate(mode: AVAudioSession.Mode) throws {
audioSession.lockForConfiguration()
defer { audioSession.unlockForConfiguration() }
configuration.mode = mode.rawValue
try audioSession.setConfiguration(configuration, active: true)
isActive = true
}

private func deactivate() throws {
audioSession.lockForConfiguration()
defer { audioSession.unlockForConfiguration() }
try audioSession.setConfiguration(configuration, active: false)
isActive = false
}

private func toggleSpeaker(_ isEnabled: Bool) throws {
guard isEnabled != isSpeakerOn else {
return
}

func configure(
_ configuration: RTCAudioSessionConfiguration = .default,
audioOn: Bool,
speakerOn: Bool
) {
rtcAudioSession.lockForConfiguration()
defer { rtcAudioSession.unlockForConfiguration() }
rtcAudioSession.useManualAudio = true
rtcAudioSession.isAudioEnabled = true
audioSession.lockForConfiguration()
defer { audioSession.unlockForConfiguration() }

configuration.categoryOptions = isEnabled
? defaultCategoryOptions.union(.defaultToSpeaker)
: defaultCategoryOptions
try audioSession.setConfiguration(configuration)
try audioSession.overrideOutputAudioPort(isEnabled ? .speaker : .none)
isSpeakerOn = isEnabled

log.debug(
"Attempted to set speakerOn:\(isEnabled) with categoryOptions:\(configuration.categoryOptions). Current route: \(audioSession.currentRoute).",
subsystems: .webRTC
)
}

private func configureRouteChangeListener() {
routeChangeCancellable = NotificationCenter
.default
.publisher(for: AVAudioSession.routeChangeNotification)
.compactMap { notification -> AVAudioSession.RouteChangeReason? in
guard
let userInfo = notification.userInfo,
let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue)
else { return nil }
return reason
}
.filter {
Set<AVAudioSession.RouteChangeReason>(
[
.newDeviceAvailable,
.oldDeviceUnavailable,
.routeConfigurationChange,
.categoryChange,
.wakeFromSleep
]
).contains($0)
}
.log(.debug) { "AudioSession route updated due to \($0)." }
.sink { [weak self] _ in self?.updateAudioRoute() }
}

private func updateAudioRoute() {
let currentRoute = audioSession.currentRoute
do {
log.debug(
"""
Configuring audio session
audioOn: \(audioOn)
speakerOn: \(speakerOn)
"""
)
if speakerOn {
configuration.categoryOptions.insert(.defaultToSpeaker)
configuration.mode = AVAudioSession.Mode.videoChat.rawValue
if currentRoute.isExternal {
delegate?.audioSessionUpdated(self, speakerEnabled: false)
} else {
configuration.categoryOptions.remove(.defaultToSpeaker)
configuration.mode = AVAudioSession.Mode.voiceChat.rawValue
try toggleSpeaker(activeCallSettings?.speakerOn ?? false)
}
try rtcAudioSession.setConfiguration(configuration, active: audioOn)
} catch {
log.error("Error occured while configuring audio session", error: error)
log.error("Failed to update route for \(currentRoute.description).", subsystems: .webRTC, error: error)
}
}

func setAudioSessionEnabled(_ enabled: Bool) {
rtcAudioSession.lockForConfiguration()
defer { rtcAudioSession.unlockForConfiguration() }
rtcAudioSession.isAudioEnabled = enabled
}

extension AVAudioSessionRouteDescription {

private static let externalPorts: Set<AVAudioSession.Port> = [
.bluetoothA2DP, .bluetoothLE, .bluetoothHFP, .carAudio, .headphones
]

var isExternal: Bool {
outputs.map(\.portType).contains { Self.externalPorts.contains($0) }
}
}

deinit {
rtcAudioSession.lockForConfiguration()
rtcAudioSession.isAudioEnabled = false
rtcAudioSession.unlockForConfiguration()
extension AVAudioSession.RouteChangeReason: CustomStringConvertible {
public var description: String {
switch self {
case .unknown:
return ".unknown"
case .newDeviceAvailable:
return ".newDeviceAvailable"
case .oldDeviceUnavailable:
return ".oldDeviceUnavailable"
case .categoryChange:
return ".categoryChange"
case .override:
return ".override"
case .wakeFromSleep:
return ".wakeFromSleep"
case .noSuitableRouteForCategory:
return ".noSuitableRouteForCategory"
case .routeConfigurationChange:
return ".routeConfigurationChange"
@unknown default:
return "Unknown Reason"
}
}
}

extension AVAudioSession.CategoryOptions: CustomStringConvertible {
public var description: String {
var options: [String] = []

if contains(.mixWithOthers) {
options.append(".mixWithOthers")
}
if contains(.duckOthers) {
options.append(".duckOthers")
}
if contains(.allowBluetooth) {
options.append(".allowBluetooth")
}
if contains(.defaultToSpeaker) {
options.append(".defaultToSpeaker")
}
if contains(.interruptSpokenAudioAndMixWithOthers) {
options.append(".interruptSpokenAudioAndMixWithOthers")
}
if contains(.allowBluetoothA2DP) {
options.append(".allowBluetoothA2DP")
}
if contains(.allowAirPlay) {
options.append(".allowAirPlay")
}
if #available(iOS 14.5, *) {
if contains(.overrideMutedMicrophoneInterruption) {
options.append(".overrideMutedMicrophoneInterruption")
}
}

return options.isEmpty ? ".noOptions" : options.joined(separator: ", ")
}
}

extension AVAudioSessionPortDescription {
override public var description: String {
"<Port type:\(portType.rawValue) name:\(portName)>"
}
}

extension RTCAudioSessionConfiguration {

static let `default`: RTCAudioSessionConfiguration = {
let configuration = RTCAudioSessionConfiguration.webRTC()
var categoryOptions: AVAudioSession.CategoryOptions = [.allowBluetooth, .allowBluetoothA2DP]
Expand All @@ -70,3 +238,8 @@ extension RTCAudioSessionConfiguration {
return configuration
}()
}

protocol AudioSessionDelegate: AnyObject {

func audioSessionUpdated(_ audioSession: AudioSession, speakerEnabled: Bool)
}
Original file line number Diff line number Diff line change
Expand Up @@ -140,30 +140,6 @@ final class AudioMediaAdapter: MediaAdapting, @unchecked Sendable {
try await localMediaManager.didUpdateCallSettings(settings)
}

// MARK: - AudioSession

/// Updates the audio session state.
///
/// - Parameter isEnabled: Whether the audio session is enabled.
func didUpdateAudioSessionState(_ isEnabled: Bool) async {
await audioSession.setAudioSessionEnabled(isEnabled)
}

/// Updates the audio session speaker state.
///
/// - Parameters:
/// - isEnabled: Whether the speaker is enabled.
/// - audioSessionEnabled: Whether the audio session is enabled.
func didUpdateAudioSessionSpeakerState(
_ isEnabled: Bool,
with audioSessionEnabled: Bool
) async {
await audioSession.configure(
audioOn: audioSessionEnabled,
speakerOn: isEnabled
)
}

// MARK: - Observers

/// Adds a new audio stream and notifies observers.
Expand Down
Loading
Loading