fix: Move Audio Input initialization shortly before startRecording (#159)

* rename

* Update AVAudioSession+updateCategory.swift

* fix bootstrap script

* Update CameraView+AVAudioSession.swift

* move audio input adding lower

* Activate AudioSession only when starting recording

* format

* Deactivate Audio Session

* remove audio input before deactivating audio session

* Update CameraView+AVAudioSession.swift

* log time

* Update CameraView+AVAudioSession.swift

* measure time with `measureElapsedTime`

* Update project.pbxproj

* only log in debug builds

* bootstrap with bridge (RNN new API)

* Mark two funcs as `@inlinable`

* format

* Update ReactLogger.swift

* Make audioWriter optional (allow videos without sound)

* only log frame drop reason in DEBUG

* Make audio writing entirely optional

* format

* Use function name as label for measureElapsedTime

* Update MeasureElapsedTime.swift

* Update MeasureElapsedTime.swift

* Mark AudioWriter as finished

* set `automaticallyConfiguresApplicationAudioSession` once

* Add JS console logging

* log to JS console for a few logs

* Update AVAudioSession+updateCategory.swift

* format

* Update JSConsoleHelper.mm

* catch log errors

* Update ReactLogger.swift

* fix docs

* Update RecordingSession.swift

* Immediatelly add audio input

* Update CameraView+AVCaptureSession.swift

* Update CameraView+AVCaptureSession.swift

* Update ReactLogger.swift

* immediatelly set audio session

* extract

* format

* Update TROUBLESHOOTING.mdx

* hmm

* Update AVAudioSession+updateCategory.swift

* Create secondary `AVCaptureSession` for audio

* Configure once, start stop on demand

* format

* fix audio notification interruptions

* docs
This commit is contained in:
Marc Rousavy
2021-06-03 14:16:02 +02:00
committed by GitHub
parent 71730a73ef
commit eeb765f018
21 changed files with 420 additions and 212 deletions

View File

@@ -13,9 +13,13 @@ private var hasLoggedFrameDropWarning = false
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
/**
Starts a video + audio recording with a custom Asset Writer.
*/
func startRecording(options: NSDictionary, callback: @escaping RCTResponseSenderBlock) {
cameraQueue.async {
ReactLogger.log(level: .info, message: "Starting Video recording...")
do {
let errorPointer = ErrorPointer(nilLiteral: ())
guard let tempFilePath = RCTTempFilePath("mov", errorPointer) else {
@@ -41,6 +45,9 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
let onFinish = { (status: AVAssetWriter.Status, error: Error?) -> Void in
defer {
self.recordingSession = nil
self.audioQueue.async {
self.deactivateAudioSession()
}
}
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
if let error = error {
@@ -58,16 +65,36 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
}
let videoSettings = self.videoOutput!.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
let audioSettings = self.audioOutput!.recommendedAudioSettingsForAssetWriter(writingTo: fileType) as? [String: Any]
self.recordingSession = try RecordingSession(url: tempURL,
fileType: fileType,
videoSettings: videoSettings ?? [:],
audioSettings: audioSettings ?? [:],
isVideoMirrored: self.videoOutput!.isMirrored,
completion: onFinish)
self.isRecording = true
// Init Video
guard let videoOutput = self.videoOutput,
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType),
!videoSettings.isEmpty else {
throw CameraError.capture(.createRecorderError(message: "Failed to get video settings!"))
}
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
isVideoMirrored: self.videoOutput!.isMirrored)
// Init Audio (optional, async)
self.audioQueue.async {
// Activate Audio Session (blocking)
self.activateAudioSession()
guard let recordingSession = self.recordingSession else {
// recording has already been cancelled
return
}
if let audioOutput = self.audioOutput,
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) as? [String: Any] {
recordingSession.initializeAudioWriter(withSettings: audioSettings)
}
// Finally start recording, with or without audio.
recordingSession.start()
self.isRecording = true
}
} catch EnumParserError.invalidValue {
return callback([NSNull(), EnumParserError.invalidValue])
} catch let error as NSError {
@@ -77,9 +104,9 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func stopRecording(promise: Promise) {
isRecording = false
cameraQueue.async {
self.isRecording = false
withPromise(promise) {
guard let recordingSession = self.recordingSession else {
throw CameraError.capture(.noRecordingInProgress)
@@ -146,13 +173,16 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
let reason = findFrameDropReason(inBuffer: buffer)
// TODO: Show in React console?
ReactLogger.log(level: .warning, message: "Dropped a Frame. This might indicate that your Frame Processor is doing too much work. " +
"Either throttle the frame processor's frame rate, or optimize your frame processor's execution speed. Frame drop reason: \(reason)")
hasLoggedFrameDropWarning = true
}
#if DEBUG
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
let reason = findFrameDropReason(inBuffer: buffer)
ReactLogger.log(level: .warning,
message: "Dropped a Frame. This might indicate that your Frame Processor is doing too much work. " +
"Either throttle the frame processor's frame rate, or optimize your frame processor's execution speed. Frame drop reason: \(reason)",
alsoLogToJS: true)
hasLoggedFrameDropWarning = true
}
#endif
}
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {