react-native-vision-camera/package/ios/Core/CameraSession+Video.swift
Marc Rousavy cd0b413706
feat: New Core/ library (#1975)
Moves everything Camera related into `core/` / `Core/` so that it is better encapsulated from React Native.

Benefits:

1. Code is much better organized. Should be easier for collaborators now, and cleaner codebase for me.
2. Locking is fully atomically as you can now only configure the session through a lock/Mutex which is batch-overridable
    * On iOS, this makes Camera startup time **MUCH** faster, I measured speedups from **1.5 seconds** to only **240 milliseconds** since we only lock/commit once! 🚀 
    * On Android, this fixes a few out-of-sync/concurrency issues like "Capture Request contains unconfigured Input/Output Surface!" since it is now a single lock-operation! 💪 
3. It is easier to integrate VisionCamera outside of React Native (e.g. Native iOS Apps, NativeScript, Flutter, etc)

With this PR, VisionCamera V3 is up to **7x** faster than V2
2023-10-13 18:33:20 +02:00

222 lines
7.5 KiB
Swift

//
// CameraSession+Video.swift
// VisionCamera
//
// Created by Marc Rousavy on 11.10.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
extension CameraSession {
/**
Starts a video + audio recording with a custom Asset Writer.
*/
func startRecording(options: RecordVideoOptions,
onVideoRecorded: @escaping (_ video: Video) -> Void,
onError: @escaping (_ error: CameraError) -> Void) {
// Run on Camera Queue
CameraQueues.cameraQueue.async {
ReactLogger.log(level: .info, message: "Starting Video recording...")
if options.flash != .off {
// use the torch as the video's flash
self.configure { config in
config.torch = options.flash
}
}
// Get Video Output
guard let videoOutput = self.videoOutput else {
if self.configuration?.video == .disabled {
onError(.capture(.videoNotEnabled))
} else {
onError(.session(.cameraNotReady))
}
return
}
let enableAudio = self.configuration?.audio != .disabled
// Callback for when the recording ends
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer {
// Disable Audio Session again
if enableAudio {
CameraQueues.audioQueue.async {
self.deactivateAudioSession()
}
}
// Reset flash
if options.flash != .off {
// Set torch mode back to what it was before if we used it for the video flash.
self.configure { config in
let torch = self.configuration?.torch ?? .off
config.torch = torch
}
}
}
self.recordingSession = nil
self.isRecording = false
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
if let error = error as NSError? {
ReactLogger.log(level: .error, message: "RecordingSession Error \(error.code): \(error.description)")
// Something went wrong, we have an error
if error.domain == "capture/aborted" {
onError(.capture(.aborted))
} else {
onError(.capture(.unknown(message: "An unknown recording error occured! \(error.code) \(error.description)")))
}
} else {
if status == .completed {
// Recording was successfully saved
let video = Video(path: recordingSession.url.absoluteString,
duration: recordingSession.duration)
onVideoRecorded(video)
} else {
// Recording wasn't saved and we don't have an error either.
onError(.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
}
}
}
// Create temporary file
let errorPointer = ErrorPointer(nilLiteral: ())
let fileExtension = options.fileType.descriptor ?? "mov"
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
let message = errorPointer?.pointee?.description
onError(.capture(.createTempFileError(message: message)))
return
}
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
let tempURL = URL(string: "file://\(tempFilePath)")!
let recordingSession: RecordingSession
do {
recordingSession = try RecordingSession(url: tempURL,
fileType: options.fileType,
completion: onFinish)
} catch let error as NSError {
onError(.capture(.createRecorderError(message: error.description)))
return
}
self.recordingSession = recordingSession
// Init Video
guard var videoSettings = self.recommendedVideoSettings(videoOutput: videoOutput,
fileType: options.fileType,
videoCodec: options.codec),
!videoSettings.isEmpty else {
onError(.capture(.createRecorderError(message: "Failed to get video settings!")))
return
}
ReactLogger.log(level: .trace, message: "Recommended Video Settings: \(videoSettings.description)")
// Custom Video Bit Rate
if let videoBitRate = options.bitRate {
// Convert from Mbps -> bps
let bitsPerSecond = videoBitRate * 1_000_000
videoSettings[AVVideoCompressionPropertiesKey] = [
AVVideoAverageBitRateKey: NSNumber(value: bitsPerSecond),
]
}
// get pixel format (420f, 420v, x420)
let pixelFormat = videoOutput.pixelFormat
recordingSession.initializeVideoWriter(withSettings: videoSettings,
pixelFormat: pixelFormat)
// Enable/Activate Audio Session (optional)
if enableAudio {
if let audioOutput = self.audioOutput {
// Activate Audio Session asynchronously
CameraQueues.audioQueue.async {
do {
try self.activateAudioSession()
} catch {
self.onConfigureError(error)
}
}
// Initialize audio asset writer
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: options.fileType)
recordingSession.initializeAudioWriter(withSettings: audioSettings)
}
}
// start recording session with or without audio.
do {
try recordingSession.startAssetWriter()
self.isRecording = true
} catch let error as NSError {
onError(.capture(.createRecorderError(message: "RecordingSession failed to start asset writer. \(error.description)")))
return
}
}
}
/**
Stops an active recording.
*/
func stopRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
self.isRecording = false
withPromise(promise) {
guard let recordingSession = self.recordingSession else {
throw CameraError.capture(.noRecordingInProgress)
}
recordingSession.finish()
return nil
}
}
}
/**
Pauses an active recording.
*/
func pauseRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
throw CameraError.capture(.noRecordingInProgress)
}
self.isRecording = false
return nil
}
}
}
/**
Resumes an active, but paused recording.
*/
func resumeRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
throw CameraError.capture(.noRecordingInProgress)
}
self.isRecording = true
return nil
}
}
}
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput,
fileType: AVFileType,
videoCodec: AVVideoCodecType?) -> [String: Any]? {
if videoCodec != nil {
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
} else {
return videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
}
}
}