fix: Fully synchronize buffers in RecordingSession
to account for late-running frames (#2206)
* fix: Fully synchronize `RecordingSession` to account for late Frames * Restructure Code * Update RecordingSession.swift * Fix last audio timestamp not being used * fix: Remove `capture/aborted` error * Update RecordingSession.swift * Don't log for every Frame * Format
This commit is contained in:
parent
29e649937e
commit
1767e6e881
@ -128,7 +128,8 @@ extension CameraSession {
|
||||
recordingSession.initializeVideoWriter(withSettings: videoSettings)
|
||||
|
||||
// start recording session with or without audio.
|
||||
try recordingSession.startAssetWriter()
|
||||
// Use Video [AVCaptureSession] clock as a timebase - all other sessions (here; audio) have to be synced to that Clock.
|
||||
try recordingSession.start(clock: self.captureSession.clock)
|
||||
self.recordingSession = recordingSession
|
||||
self.isRecording = true
|
||||
|
||||
@ -150,13 +151,13 @@ extension CameraSession {
|
||||
*/
|
||||
func stopRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.isRecording = false
|
||||
|
||||
withPromise(promise) {
|
||||
guard let recordingSession = self.recordingSession else {
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
recordingSession.finish()
|
||||
// Use Video [AVCaptureSession] clock as a timebase - all other sessions (here; audio) have to be synced to that Clock.
|
||||
recordingSession.stop(clock: self.captureSession.clock)
|
||||
// There might be late frames, so maybe we need to still provide more Frames to the RecordingSession. Let's keep isRecording true for now.
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
@ -273,11 +273,11 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
|
||||
switch captureOutput {
|
||||
case is AVCaptureVideoDataOutput:
|
||||
// Write the Video Buffer to the .mov/.mp4 file, this is the first timestamp if nothing has been recorded yet
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .video)
|
||||
recordingSession.appendBuffer(sampleBuffer, clock: captureSession.clock, type: .video)
|
||||
case is AVCaptureAudioDataOutput:
|
||||
// Synchronize the Audio Buffer with the Video Session's time because it's two separate AVCaptureSessions
|
||||
audioCaptureSession.synchronizeBuffer(sampleBuffer, toSession: captureSession)
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .audio)
|
||||
recordingSession.appendBuffer(sampleBuffer, clock: audioCaptureSession.clock, type: .audio)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
@ -18,17 +18,32 @@ enum BufferType {
|
||||
|
||||
// MARK: - RecordingSession
|
||||
|
||||
/**
|
||||
A [RecordingSession] class that can record video and audio [CMSampleBuffers] from [AVCaptureVideoDataOutput] and
|
||||
[AVCaptureAudioDataOutput] into a .mov/.mp4 file using [AVAssetWriter].
|
||||
|
||||
It also synchronizes buffers to the CMTime by the CaptureSession so that late frames are removed from the beginning and added
|
||||
towards the end (useful e.g. for videoStabilization).
|
||||
*/
|
||||
class RecordingSession {
|
||||
private let assetWriter: AVAssetWriter
|
||||
private var audioWriter: AVAssetWriterInput?
|
||||
private var videoWriter: AVAssetWriterInput?
|
||||
private let completionHandler: (RecordingSession, AVAssetWriter.Status, Error?) -> Void
|
||||
|
||||
private var initialTimestamp: CMTime?
|
||||
private var latestTimestamp: CMTime?
|
||||
private var hasStartedWritingSession = false
|
||||
private var hasWrittenFirstVideoFrame = false
|
||||
private var startTimestamp: CMTime?
|
||||
private var stopTimestamp: CMTime?
|
||||
|
||||
private var lastWrittenTimestamp: CMTime?
|
||||
|
||||
private var isFinishing = false
|
||||
private var hasWrittenLastVideoFrame = false
|
||||
private var hasWrittenLastAudioFrame = false
|
||||
|
||||
private let lock = DispatchSemaphore(value: 1)
|
||||
|
||||
// If we are waiting for late frames and none actually arrive, we force stop the session after the given timeout.
|
||||
private let automaticallyStopTimeoutSeconds = 4.0
|
||||
|
||||
/**
|
||||
Gets the file URL of the recorded video.
|
||||
@ -41,11 +56,11 @@ class RecordingSession {
|
||||
Get the duration (in seconds) of the recorded video.
|
||||
*/
|
||||
var duration: Double {
|
||||
guard let latestTimestamp = latestTimestamp,
|
||||
let initialTimestamp = initialTimestamp else {
|
||||
guard let lastWrittenTimestamp = lastWrittenTimestamp,
|
||||
let startTimestamp = startTimestamp else {
|
||||
return 0.0
|
||||
}
|
||||
return (latestTimestamp - initialTimestamp).seconds
|
||||
return (lastWrittenTimestamp - startTimestamp).seconds
|
||||
}
|
||||
|
||||
init(url: URL,
|
||||
@ -109,24 +124,80 @@ class RecordingSession {
|
||||
}
|
||||
|
||||
/**
|
||||
Start the Asset Writer(s). If the AssetWriter failed to start, an error will be thrown.
|
||||
Start the RecordingSession using the current time of the provided synchronization clock.
|
||||
All buffers passed to [append] must be synchronized to this Clock.
|
||||
*/
|
||||
func startAssetWriter() throws {
|
||||
func start(clock: CMClock) throws {
|
||||
lock.wait()
|
||||
defer {
|
||||
lock.signal()
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Starting Asset Writer(s)...")
|
||||
|
||||
let success = assetWriter.startWriting()
|
||||
if success {
|
||||
ReactLogger.log(level: .info, message: "Asset Writer(s) started!")
|
||||
} else {
|
||||
guard success else {
|
||||
ReactLogger.log(level: .error, message: "Failed to start Asset Writer(s)!")
|
||||
throw CameraError.capture(.createRecorderError(message: "Failed to start Asset Writer(s)!"))
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Asset Writer(s) started!")
|
||||
|
||||
// Get the current time of the AVCaptureSession.
|
||||
// Note: The current time might be more advanced than this buffer's timestamp, for example if the video
|
||||
// pipeline had some additional delay in processing the buffer (aka it is late) - eg because of Video Stabilization (~1s delay).
|
||||
let currentTime = CMClockGetTime(clock)
|
||||
|
||||
// Start the sesssion at the given time. Frames with earlier timestamps (e.g. late frames) will be dropped.
|
||||
assetWriter.startSession(atSourceTime: currentTime)
|
||||
startTimestamp = currentTime
|
||||
ReactLogger.log(level: .info, message: "Started RecordingSession at time: \(currentTime.seconds)")
|
||||
}
|
||||
|
||||
/**
|
||||
Requests the RecordingSession to stop writing frames at the current time of the provided synchronization clock.
|
||||
The RecordingSession will continue to write video frames and audio frames for a little longer if there was a delay
|
||||
in the video pipeline (e.g. caused by video stabilization) to avoid the video cutting off late frames.
|
||||
Once all late frames have been captured (or an artificial abort timeout has been triggered), the [completionHandler] will be called.
|
||||
*/
|
||||
func stop(clock: CMClock) {
|
||||
lock.wait()
|
||||
defer {
|
||||
lock.signal()
|
||||
}
|
||||
|
||||
// Current time of the synchronization clock (e.g. from [AVCaptureSession]) - this marks the end of the video.
|
||||
let currentTime = CMClockGetTime(clock)
|
||||
|
||||
// Request a stop at the given time. Frames with later timestamps (e.g. early frames, while we are waiting for late frames) will be dropped.
|
||||
stopTimestamp = currentTime
|
||||
ReactLogger.log(level: .info,
|
||||
message: "Requesting stop at \(currentTime.seconds) seconds for AssetWriter with status \"\(assetWriter.status.descriptor)\"...")
|
||||
|
||||
// Start a timeout that will force-stop the session if none of the late frames actually arrive
|
||||
CameraQueues.cameraQueue.asyncAfter(deadline: .now() + automaticallyStopTimeoutSeconds) {
|
||||
if !self.isFinishing {
|
||||
ReactLogger.log(level: .error, message: "Waited \(self.automaticallyStopTimeoutSeconds) seconds but no late Frames came in, aborting capture...")
|
||||
self.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Appends a new CMSampleBuffer to the Asset Writer. Use bufferType to specify if this is a video or audio frame.
|
||||
Appends a new CMSampleBuffer to the Asset Writer.
|
||||
- Use clock to specify the CMClock instance this CMSampleBuffer uses for relative time
|
||||
- Use bufferType to specify if this is a video or audio frame.
|
||||
*/
|
||||
func appendBuffer(_ buffer: CMSampleBuffer, type bufferType: BufferType) {
|
||||
func appendBuffer(_ buffer: CMSampleBuffer, clock _: CMClock, type bufferType: BufferType) {
|
||||
// 1. Check if the data is even ready
|
||||
guard let startTimestamp = startTimestamp else {
|
||||
// Session not yet started
|
||||
return
|
||||
}
|
||||
guard !isFinishing else {
|
||||
// Session is already finishing, can't write anything more
|
||||
return
|
||||
}
|
||||
guard assetWriter.status == .writing else {
|
||||
ReactLogger.log(level: .error, message: "Frame arrived, but AssetWriter status is \(assetWriter.status.descriptor)!")
|
||||
return
|
||||
@ -136,82 +207,98 @@ class RecordingSession {
|
||||
return
|
||||
}
|
||||
|
||||
switch bufferType {
|
||||
case .video:
|
||||
guard let videoWriter = videoWriter else {
|
||||
ReactLogger.log(level: .error, message: "Video Frame arrived but VideoWriter was nil!")
|
||||
return
|
||||
// 2. Check the timing of the buffer and make sure it's within our session start and stop times
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
|
||||
if timestamp < startTimestamp {
|
||||
// Don't write this Frame, it was captured before we even started recording.
|
||||
// The reason this can happen is because the capture pipeline can have a delay, e.g. because of stabilization.
|
||||
let delay = CMTimeSubtract(startTimestamp, timestamp)
|
||||
ReactLogger.log(level: .info, message: "Capture Pipeline has a delay of \(delay.seconds) seconds. Skipping this late Frame...")
|
||||
return
|
||||
}
|
||||
if let stopTimestamp = stopTimestamp,
|
||||
timestamp >= stopTimestamp {
|
||||
// This Frame is exactly at, or after the point in time when RecordingSession.stop() has been called.
|
||||
// Consider this the last Frame we write
|
||||
switch bufferType {
|
||||
case .video:
|
||||
if hasWrittenLastVideoFrame {
|
||||
// already wrote last Video Frame before, so skip this one.
|
||||
return
|
||||
}
|
||||
hasWrittenLastVideoFrame = true // flip to true, then write it
|
||||
case .audio:
|
||||
if hasWrittenLastAudioFrame {
|
||||
// already wrote last Audio Frame before, so skip this one.
|
||||
return
|
||||
}
|
||||
hasWrittenLastAudioFrame = true // flip to true, then write it
|
||||
}
|
||||
if !videoWriter.isReadyForMoreMediaData {
|
||||
ReactLogger.log(level: .warning,
|
||||
message: "The Video AVAssetWriterInput was not ready for more data! Is your frame rate too high?")
|
||||
return
|
||||
}
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
|
||||
// Start the writing session before we write the first video frame
|
||||
if !hasStartedWritingSession {
|
||||
initialTimestamp = timestamp
|
||||
assetWriter.startSession(atSourceTime: timestamp)
|
||||
ReactLogger.log(level: .info, message: "Started RecordingSession at \(timestamp.seconds) seconds.")
|
||||
hasStartedWritingSession = true
|
||||
}
|
||||
// Write Video Buffer!
|
||||
videoWriter.append(buffer)
|
||||
// Update state
|
||||
latestTimestamp = timestamp
|
||||
if !hasWrittenFirstVideoFrame {
|
||||
hasWrittenFirstVideoFrame = true
|
||||
}
|
||||
case .audio:
|
||||
guard let audioWriter = audioWriter else {
|
||||
ReactLogger.log(level: .error, message: "Audio Frame arrived but AudioWriter was nil!")
|
||||
return
|
||||
}
|
||||
if !audioWriter.isReadyForMoreMediaData {
|
||||
return
|
||||
}
|
||||
if !hasWrittenFirstVideoFrame || !hasStartedWritingSession {
|
||||
// first video frame has not been written yet, so skip this audio frame.
|
||||
return
|
||||
}
|
||||
// Write Audio Sample!
|
||||
audioWriter.append(buffer)
|
||||
}
|
||||
|
||||
// If we failed to write the frames, stop the Recording
|
||||
// 3. Actually write the Buffer to the AssetWriter
|
||||
let writer = getAssetWriter(forType: bufferType)
|
||||
guard writer.isReadyForMoreMediaData else {
|
||||
ReactLogger.log(level: .warning, message: "\(bufferType) AssetWriter is not ready for more data, dropping this Frame...")
|
||||
return
|
||||
}
|
||||
writer.append(buffer)
|
||||
lastWrittenTimestamp = timestamp
|
||||
|
||||
// 4. If we failed to write the frames, stop the Recording
|
||||
if assetWriter.status == .failed {
|
||||
ReactLogger.log(level: .error,
|
||||
message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")")
|
||||
finish()
|
||||
}
|
||||
|
||||
// 5. If we finished writing both the last video and audio buffers, finish the recording
|
||||
if hasWrittenLastAudioFrame && hasWrittenLastVideoFrame {
|
||||
ReactLogger.log(level: .info, message: "Successfully appended last \(bufferType) Buffer (at \(timestamp.seconds) seconds), finishing RecordingSession...")
|
||||
finish()
|
||||
}
|
||||
}
|
||||
|
||||
private func getAssetWriter(forType type: BufferType) -> AVAssetWriterInput {
|
||||
switch type {
|
||||
case .video:
|
||||
guard let videoWriter = videoWriter else {
|
||||
fatalError("Tried to append to a Video Buffer, which was nil!")
|
||||
}
|
||||
return videoWriter
|
||||
case .audio:
|
||||
guard let audioWriter = audioWriter else {
|
||||
fatalError("Tried to append to a Audio Buffer, which was nil!")
|
||||
}
|
||||
return audioWriter
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Marks the AssetWriters as finished and stops writing frames. The callback will be invoked either with an error or the status "success".
|
||||
Stops the AssetWriters and calls the completion callback.
|
||||
*/
|
||||
func finish() {
|
||||
ReactLogger.log(level: .info, message: "Finishing Recording with AssetWriter status \"\(assetWriter.status.descriptor)\"...")
|
||||
private func finish() {
|
||||
lock.wait()
|
||||
defer {
|
||||
lock.signal()
|
||||
}
|
||||
|
||||
if isFinishing {
|
||||
ReactLogger.log(level: .info, message: "Stopping AssetWriter with status \"\(assetWriter.status.descriptor)\"...")
|
||||
|
||||
guard !isFinishing else {
|
||||
ReactLogger.log(level: .warning, message: "Tried calling finish() twice while AssetWriter is still writing!")
|
||||
return
|
||||
}
|
||||
|
||||
if !hasWrittenFirstVideoFrame {
|
||||
let error = NSError(domain: "capture/aborted",
|
||||
code: 1,
|
||||
userInfo: [NSLocalizedDescriptionKey: "Stopped Recording Session too early, no frames have been recorded!"])
|
||||
completionHandler(self, .failed, error)
|
||||
} else if assetWriter.status == .writing {
|
||||
isFinishing = true
|
||||
videoWriter?.markAsFinished()
|
||||
audioWriter?.markAsFinished()
|
||||
assetWriter.finishWriting {
|
||||
self.isFinishing = false
|
||||
self.completionHandler(self, self.assetWriter.status, self.assetWriter.error)
|
||||
}
|
||||
} else {
|
||||
guard assetWriter.status == .writing else {
|
||||
completionHandler(self, assetWriter.status, assetWriter.error)
|
||||
return
|
||||
}
|
||||
|
||||
isFinishing = true
|
||||
videoWriter?.markAsFinished()
|
||||
audioWriter?.markAsFinished()
|
||||
assetWriter.finishWriting {
|
||||
self.completionHandler(self, self.assetWriter.status, self.assetWriter.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,7 +10,10 @@ import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureSession {
|
||||
private var clock: CMClock {
|
||||
/**
|
||||
Returns the clock that is used by this AVCaptureSession.
|
||||
*/
|
||||
var clock: CMClock {
|
||||
if #available(iOS 15.4, *), let synchronizationClock {
|
||||
return synchronizationClock
|
||||
}
|
||||
@ -24,7 +27,6 @@ extension AVCaptureSession {
|
||||
func synchronizeBuffer(_ buffer: CMSampleBuffer, toSession to: AVCaptureSession) {
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
|
||||
let synchronizedTimestamp = CMSyncConvertTime(timestamp, from: clock, to: to.clock)
|
||||
ReactLogger.log(level: .info, message: "Synchronized Timestamp \(timestamp.seconds) -> \(synchronizedTimestamp.seconds)")
|
||||
CMSampleBufferSetOutputPresentationTimeStamp(buffer, newValue: synchronizedTimestamp)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user