Set automaticallyConfiguresApplicationAudioSession = false

This commit is contained in:
Marc Rousavy 2021-03-29 13:18:02 +02:00
parent 51580bc18a
commit cd180dc73b
2 changed files with 10 additions and 1 deletions

View File

@ -20,13 +20,14 @@ extension CameraView {
let start = DispatchTime.now() let start = DispatchTime.now()
do { do {
try addAudioInput() try addAudioInput()
let audioSession = AVAudioSession.sharedInstance() let audioSession = AVAudioSession.sharedInstance()
if audioSession.category != .playAndRecord { if audioSession.category != .playAndRecord {
// allow background music playback // allow background music playback
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker]) try audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
} }
// TODO: Use https://developer.apple.com/documentation/avfaudio/avaudiosession/3726094-setprefersnointerruptionsfromsys
audioSession.trySetAllowHaptics(true) audioSession.trySetAllowHaptics(true)
// activate current audio session because camera is active // activate current audio session because camera is active
try audioSession.setActive(true) try audioSession.setActive(true)
} catch let error as NSError { } catch let error as NSError {
@ -38,6 +39,7 @@ extension CameraView {
} }
self.removeAudioInput() self.removeAudioInput()
} }
let end = DispatchTime.now() let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!") ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
@ -52,6 +54,7 @@ extension CameraView {
return return
} }
removeAudioInput() removeAudioInput()
captureSession.beginConfiguration() captureSession.beginConfiguration()
guard let audioDevice = AVCaptureDevice.default(for: .audio) else { guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraError.device(.microphoneUnavailable) throw CameraError.device(.microphoneUnavailable)
@ -72,6 +75,7 @@ extension CameraView {
guard let audioInput = audioDeviceInput else { guard let audioInput = audioDeviceInput else {
return return
} }
captureSession.beginConfiguration() captureSession.beginConfiguration()
captureSession.removeInput(audioInput) captureSession.removeInput(audioInput)
audioDeviceInput = nil audioDeviceInput = nil
@ -86,6 +90,7 @@ extension CameraView {
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return return
} }
switch type { switch type {
case .began: case .began:
// Something interrupted our Audio Session, stop recording audio. // Something interrupted our Audio Session, stop recording audio.

View File

@ -35,6 +35,10 @@ extension CameraView {
captureSession.commitConfiguration() captureSession.commitConfiguration()
} }
// Disable automatic Audio Session configuration because we configure it in CameraView+AVAudioSession.swift (called before Camera gets activated)
captureSession.automaticallyConfiguresApplicationAudioSession = false
// If preset is set, use preset. Otherwise use format.
if let preset = self.preset { if let preset = self.preset {
var sessionPreset: AVCaptureSession.Preset? var sessionPreset: AVCaptureSession.Preset?
do { do {