fix: Make recorder less error-prone (#189)

* Abort recording if failed to start or empty frames

* Activate Audio Session on `cameraQueue`

* Double-check stop recording in callback

* Only call callback once

* Format

* Add description to `.aborted` error

* Update RecordingSession.swift

* Update AVAudioSession+updateCategory.swift

* Rename serial dispatch queues
This commit is contained in:
Marc Rousavy 2021-06-09 14:56:56 +02:00 committed by GitHub
parent 02168e1f28
commit 5919d46a46
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 97 additions and 35 deletions

View File

@ -188,6 +188,7 @@ enum CaptureError {
case invalidPhotoCodec
case videoNotEnabled
case photoNotEnabled
case aborted
case unknown(message: String? = nil)
var code: String {
@ -210,6 +211,8 @@ enum CaptureError {
return "video-not-enabled"
case .photoNotEnabled:
return "photo-not-enabled"
case .aborted:
return "aborted"
case .unknown:
return "unknown"
}
@ -235,6 +238,8 @@ enum CaptureError {
return "Video capture is disabled! Pass `video={true}` to enable video recordings."
case .photoNotEnabled:
return "Photo capture is disabled! Pass `photo={true}` to enable photo capture."
case .aborted:
return "The capture has been stopped before any input data arrived."
case let .unknown(message: message):
return message ?? "An unknown error occured while capturing a video/photo."
}

View File

@ -10,23 +10,26 @@ import Foundation
@objc
public class CameraQueues: NSObject {
/// The serial execution queue for the camera preview layer (input stream) as well as output processing of photos.
@objc public static let cameraQueue = DispatchQueue(label: "com.mrousavy.vision.camera-queue",
@objc public static let cameraQueue = DispatchQueue(label: "mrousavy/VisionCamera.main",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
/// The serial execution queue for output processing of videos as well as frame processors.
@objc public static let videoQueue = DispatchQueue(label: "com.mrousavy.vision.video-queue",
@objc public static let videoQueue = DispatchQueue(label: "mrousavy/VisionCamera.video",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
/// The serial execution queue for output processing of audio buffers.
@objc public static let audioQueue = DispatchQueue(label: "com.mrousavy.vision.audio-queue",
@objc public static let audioQueue = DispatchQueue(label: "mrousavy/VisionCamera.audio",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
}

View File

@ -71,9 +71,16 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
}
}
self.isRecording = false
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
if let error = error as NSError? {
if error.domain == "capture/aborted" {
callback.reject(error: .capture(.aborted), cause: error)
} else {
callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(error.description)")), cause: error)
}
} else {
if status == .completed {
callback.resolve([
@ -106,30 +113,25 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
// Init Audio (optional, async)
if enableAudio {
self.audioQueue.async {
// Activate Audio Session (blocking)
self.activateAudioSession()
guard let recordingSession = self.recordingSession else {
// recording has already been cancelled
return
}
if let audioOutput = self.audioOutput,
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) as? [String: Any] {
recordingSession.initializeAudioWriter(withSettings: audioSettings)
self.recordingSession!.initializeAudioWriter(withSettings: audioSettings)
}
}
// Finally start recording, with or without audio.
recordingSession.start()
self.isRecording = true
// start recording session with or without audio.
do {
try self.recordingSession!.start()
} catch {
callback.reject(error: .capture(.createRecorderError(message: "RecordingSession failed to start writing.")))
return
}
} else {
// start recording session without audio.
self.recordingSession!.start()
self.isRecording = true
}
}
}
func stopRecording(promise: Promise) {
cameraQueue.async {

View File

@ -11,7 +11,7 @@ import Foundation
extension AVAudioSession {
/**
Calls [setCategory] if the given category or options are not equal to the currently set category and options and reactivates the session.
Calls [setCategory] if the given category or options are not equal to the currently set category and options.
*/
func updateCategory(_ category: AVAudioSession.Category, options: AVAudioSession.CategoryOptions = []) throws {
if self.category != category || categoryOptions.rawValue != options.rawValue {

View File

@ -12,27 +12,38 @@ import Foundation
Represents a callback to JavaScript. Syntax is the same as with Promise.
*/
class Callback {
private var hasCalled = false
private let callback: RCTResponseSenderBlock
init(_ callback: @escaping RCTResponseSenderBlock) {
self.callback = callback
}
func reject(error: CameraError, cause: NSError?) {
guard !hasCalled else { return }
callback([NSNull(), makeReactError(error, cause: cause)])
hasCalled = true
}
func reject(error: CameraError) {
guard !hasCalled else { return }
reject(error: error, cause: nil)
hasCalled = true
}
func resolve(_ value: Any?) {
func resolve(_ value: Any) {
guard !hasCalled else { return }
callback([value, NSNull()])
hasCalled = true
}
func resolve() {
resolve(nil)
guard !hasCalled else { return }
resolve(NSNull())
hasCalled = true
}
// MARK: Private
private let callback: RCTResponseSenderBlock
}

View File

@ -16,6 +16,12 @@ enum BufferType {
case video
}
// MARK: - RecordingSessionError
enum RecordingSessionError: Error {
case failedToStartSession
}
// MARK: - RecordingSession
class RecordingSession {
@ -59,6 +65,9 @@ class RecordingSession {
}
}
/**
Initializes an AssetWriter for video frames (CMSampleBuffers).
*/
func initializeVideoWriter(withSettings settings: [String: Any], isVideoMirrored: Bool) {
guard !settings.isEmpty else {
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!", alsoLogToJS: true)
@ -83,6 +92,9 @@ class RecordingSession {
ReactLogger.log(level: .info, message: "Initialized Video AssetWriter.")
}
/**
Initializes an AssetWriter for audio frames (CMSampleBuffers).
*/
func initializeAudioWriter(withSettings settings: [String: Any]) {
guard !settings.isEmpty else {
ReactLogger.log(level: .error, message: "Tried to initialize Audio Writer with empty settings!", alsoLogToJS: true)
@ -99,15 +111,34 @@ class RecordingSession {
ReactLogger.log(level: .info, message: "Initialized Audio AssetWriter.")
}
func start() {
assetWriter.startWriting()
/**
Start the Asset Writer(s). If the AssetWriter failed to start, an error will be thrown.
*/
func start() throws {
ReactLogger.log(level: .info, message: "Starting Asset Writer(s)...")
let success = assetWriter.startWriting()
if !success {
ReactLogger.log(level: .error, message: "Failed to start Asset Writer(s)!")
throw RecordingSessionError.failedToStartSession
}
initialTimestamp = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: 1_000_000_000)
assetWriter.startSession(atSourceTime: initialTimestamp!)
ReactLogger.log(level: .info, message: "Started RecordingSession at \(initialTimestamp!.seconds) seconds.")
}
/**
Appends a new CMSampleBuffer to the Asset Writer. Use bufferType to specify if this is a video or audio frame.
The timestamp parameter represents the presentation timestamp of the buffer, which should be synchronized across video and audio frames.
*/
func appendBuffer(_ buffer: CMSampleBuffer, type bufferType: BufferType, timestamp: CMTime) {
guard assetWriter.status == .writing else {
ReactLogger.log(level: .error, message: "Frame arrived, but AssetWriter status is \(assetWriter.status.descriptor)!")
return
}
if !CMSampleBufferDataIsReady(buffer) {
ReactLogger.log(level: .error, message: "Frame arrived, but sample buffer is not ready!")
return
}
guard let initialTimestamp = initialTimestamp else {
@ -138,7 +169,7 @@ class RecordingSession {
bufferAdaptor.append(imageBuffer, withPresentationTime: timestamp)
if !hasWrittenFirstVideoFrame {
hasWrittenFirstVideoFrame = true
ReactLogger.log(level: .warning, message: "VideoWriter: First frame arrived \((timestamp - initialTimestamp).seconds) seconds late.")
ReactLogger.log(level: .warning, message: "VideoWriter: First frame arrived \((initialTimestamp - timestamp).seconds) seconds late.")
}
case .audio:
guard let audioWriter = audioWriter else {
@ -156,16 +187,25 @@ class RecordingSession {
}
if assetWriter.status == .failed {
// TODO: Should I call the completion handler or is this instance still valid?
ReactLogger.log(level: .error,
message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")",
alsoLogToJS: true)
finish()
}
}
/**
Marks the AssetWriters as finished and stops writing frames. The callback will be invoked either with an error or the status "success".
*/
func finish() {
ReactLogger.log(level: .info, message: "Finishing Recording with AssetWriter status \"\(assetWriter.status.descriptor)\"...")
if assetWriter.status == .writing {
if !hasWrittenFirstVideoFrame {
let error = NSError(domain: "capture/aborted",
code: 1,
userInfo: [NSLocalizedDescriptionKey: "Stopped Recording Session too early, no frames have been recorded!"])
completionHandler(.failed, error)
} else if assetWriter.status == .writing {
bufferAdaptor?.assetWriterInput.markAsFinished()
audioWriter?.markAsFinished()
assetWriter.finishWriting {

View File

@ -39,6 +39,7 @@ export type CaptureError =
| 'capture/capture-type-not-supported'
| 'capture/video-not-enabled'
| 'capture/photo-not-enabled'
| 'capture/aborted'
| 'capture/unknown';
export type SystemError = 'system/no-camera-manager';
export type UnknownError = 'unknown/unknown';