fix: Move Audio Input initialization shortly before startRecording
(#159)
* rename * Update AVAudioSession+updateCategory.swift * fix bootstrap script * Update CameraView+AVAudioSession.swift * move audio input adding lower * Activate AudioSession only when starting recording * format * Deactivate Audio Session * remove audio input before deactivating audio session * Update CameraView+AVAudioSession.swift * log time * Update CameraView+AVAudioSession.swift * measure time with `measureElapsedTime` * Update project.pbxproj * only log in debug builds * bootstrap with bridge (RNN new API) * Mark two funcs as `@inlinable` * format * Update ReactLogger.swift * Make audioWriter optional (allow videos without sound) * only log frame drop reason in DEBUG * Make audio writing entirely optional * format * Use function name as label for measureElapsedTime * Update MeasureElapsedTime.swift * Update MeasureElapsedTime.swift * Mark AudioWriter as finished * set `automaticallyConfiguresApplicationAudioSession` once * Add JS console logging * log to JS console for a few logs * Update AVAudioSession+updateCategory.swift * format * Update JSConsoleHelper.mm * catch log errors * Update ReactLogger.swift * fix docs * Update RecordingSession.swift * Immediatelly add audio input * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift * Update ReactLogger.swift * immediatelly set audio session * extract * format * Update TROUBLESHOOTING.mdx * hmm * Update AVAudioSession+updateCategory.swift * Create secondary `AVCaptureSession` for audio * Configure once, start stop on demand * format * fix audio notification interruptions * docs
This commit is contained in:
@@ -14,71 +14,81 @@ import Foundation
|
||||
*/
|
||||
extension CameraView {
|
||||
/**
|
||||
Configures the Audio session to allow background-music playback while recording.
|
||||
Configures the Audio Capture Session with an audio input and audio data output.
|
||||
*/
|
||||
final func configureAudioSession() {
|
||||
let start = DispatchTime.now()
|
||||
ReactLogger.log(level: .info, message: "Configuring Audio Session...")
|
||||
|
||||
audioCaptureSession.beginConfiguration()
|
||||
defer {
|
||||
audioCaptureSession.commitConfiguration()
|
||||
}
|
||||
|
||||
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
|
||||
|
||||
// Audio Input
|
||||
do {
|
||||
try addAudioInput()
|
||||
if let audioDeviceInput = self.audioDeviceInput {
|
||||
audioCaptureSession.removeInput(audioDeviceInput)
|
||||
self.audioDeviceInput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
||||
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
||||
return invokeOnError(.device(.microphoneUnavailable))
|
||||
}
|
||||
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
||||
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
|
||||
}
|
||||
audioCaptureSession.addInput(audioDeviceInput!)
|
||||
} catch let error as NSError {
|
||||
return invokeOnError(.device(.microphoneUnavailable), cause: error)
|
||||
}
|
||||
|
||||
let audioSession = AVAudioSession.sharedInstance()
|
||||
try audioSession.setCategoryIfNotSet(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
|
||||
audioSession.trySetAllowHaptics(true)
|
||||
// Audio Output
|
||||
if let audioOutput = self.audioOutput {
|
||||
audioCaptureSession.removeOutput(audioOutput)
|
||||
self.audioOutput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
||||
audioOutput = AVCaptureAudioDataOutput()
|
||||
guard audioCaptureSession.canAddOutput(audioOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
|
||||
}
|
||||
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
|
||||
audioCaptureSession.addOutput(audioOutput!)
|
||||
}
|
||||
|
||||
// activate current audio session because camera is active
|
||||
try audioSession.setActive(true)
|
||||
/**
|
||||
Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration.
|
||||
|
||||
The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording.
|
||||
Background audio is allowed to play on speakers or bluetooth speakers.
|
||||
*/
|
||||
final func activateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Activating Audio Session...")
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().updateCategory(AVAudioSession.Category.playAndRecord,
|
||||
options: [.mixWithOthers,
|
||||
.allowBluetoothA2DP,
|
||||
.defaultToSpeaker,
|
||||
.allowAirPlay])
|
||||
audioCaptureSession.startRunning()
|
||||
} catch let error as NSError {
|
||||
switch error.code {
|
||||
case 561_017_449:
|
||||
self.invokeOnError(.session(.audioInUseByOtherApp), cause: error)
|
||||
default:
|
||||
self.invokeOnError(.session(.audioSessionSetupFailed(reason: error.description)), cause: error)
|
||||
self.invokeOnError(.session(.audioSessionFailedToActivate), cause: error)
|
||||
}
|
||||
self.removeAudioInput()
|
||||
}
|
||||
|
||||
let end = DispatchTime.now()
|
||||
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
|
||||
ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
|
||||
}
|
||||
|
||||
/**
|
||||
Configures the CaptureSession and adds the audio device if it has not already been added yet.
|
||||
*/
|
||||
func addAudioInput() throws {
|
||||
if audioDeviceInput != nil {
|
||||
// we already added the audio device, don't add it again
|
||||
return
|
||||
}
|
||||
removeAudioInput()
|
||||
final func deactivateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Deactivating Audio Session...")
|
||||
|
||||
ReactLogger.log(level: .info, message: "Adding audio input...")
|
||||
captureSession.beginConfiguration()
|
||||
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
||||
throw CameraError.device(.microphoneUnavailable)
|
||||
}
|
||||
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
||||
guard captureSession.canAddInput(audioDeviceInput!) else {
|
||||
throw CameraError.parameter(.unsupportedInput(inputDescriptor: "audio-input"))
|
||||
}
|
||||
captureSession.addInput(audioDeviceInput!)
|
||||
captureSession.automaticallyConfiguresApplicationAudioSession = false
|
||||
captureSession.commitConfiguration()
|
||||
}
|
||||
|
||||
/**
|
||||
Configures the CaptureSession and removes the audio device if it has been added before.
|
||||
*/
|
||||
func removeAudioInput() {
|
||||
guard let audioInput = audioDeviceInput else {
|
||||
return
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Removing audio input...")
|
||||
captureSession.beginConfiguration()
|
||||
captureSession.removeInput(audioInput)
|
||||
audioDeviceInput = nil
|
||||
captureSession.commitConfiguration()
|
||||
audioCaptureSession.stopRunning()
|
||||
}
|
||||
|
||||
@objc
|
||||
@@ -93,18 +103,21 @@ extension CameraView {
|
||||
switch type {
|
||||
case .began:
|
||||
// Something interrupted our Audio Session, stop recording audio.
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
|
||||
removeAudioInput()
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!", alsoLogToJS: true)
|
||||
case .ended:
|
||||
ReactLogger.log(level: .error, message: "The Audio Session interruption has ended.")
|
||||
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
|
||||
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
||||
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
|
||||
if options.contains(.shouldResume) {
|
||||
ReactLogger.log(level: .error, message: "Resuming interrupted Audio Session...")
|
||||
// restart audio session because interruption is over
|
||||
configureAudioSession()
|
||||
if isRecording {
|
||||
audioQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...", alsoLogToJS: true)
|
||||
// restart audio session because interruption is over
|
||||
self.activateAudioSession()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!", alsoLogToJS: true)
|
||||
}
|
||||
@unknown default: ()
|
||||
}
|
||||
|
Reference in New Issue
Block a user