2023-10-13 10:33:20 -06:00
|
|
|
//
|
|
|
|
// CameraSession+Video.swift
|
|
|
|
// VisionCamera
|
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 11.10.23.
|
|
|
|
// Copyright © 2023 mrousavy. All rights reserved.
|
|
|
|
//
|
|
|
|
|
|
|
|
import AVFoundation
|
|
|
|
import Foundation
|
|
|
|
import UIKit
|
|
|
|
|
|
|
|
extension CameraSession {
|
|
|
|
/**
|
|
|
|
Starts a video + audio recording with a custom Asset Writer.
|
|
|
|
*/
|
|
|
|
func startRecording(options: RecordVideoOptions,
|
|
|
|
onVideoRecorded: @escaping (_ video: Video) -> Void,
|
|
|
|
onError: @escaping (_ error: CameraError) -> Void) {
|
|
|
|
// Run on Camera Queue
|
|
|
|
CameraQueues.cameraQueue.async {
|
2023-11-22 09:53:10 -07:00
|
|
|
let start = DispatchTime.now()
|
2023-10-13 10:33:20 -06:00
|
|
|
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
|
|
|
|
|
|
|
if options.flash != .off {
|
|
|
|
// use the torch as the video's flash
|
|
|
|
self.configure { config in
|
|
|
|
config.torch = options.flash
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get Video Output
|
|
|
|
guard let videoOutput = self.videoOutput else {
|
|
|
|
if self.configuration?.video == .disabled {
|
|
|
|
onError(.capture(.videoNotEnabled))
|
|
|
|
} else {
|
|
|
|
onError(.session(.cameraNotReady))
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
let enableAudio = self.configuration?.audio != .disabled
|
|
|
|
|
|
|
|
// Callback for when the recording ends
|
|
|
|
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
|
|
|
defer {
|
|
|
|
// Disable Audio Session again
|
|
|
|
if enableAudio {
|
|
|
|
CameraQueues.audioQueue.async {
|
|
|
|
self.deactivateAudioSession()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Reset flash
|
|
|
|
if options.flash != .off {
|
|
|
|
// Set torch mode back to what it was before if we used it for the video flash.
|
|
|
|
self.configure { config in
|
|
|
|
let torch = self.configuration?.torch ?? .off
|
|
|
|
config.torch = torch
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.isRecording = false
|
2023-11-22 09:53:10 -07:00
|
|
|
self.recordingSession = nil
|
2023-10-13 10:33:20 -06:00
|
|
|
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
|
|
|
|
|
|
|
if let error = error as NSError? {
|
|
|
|
ReactLogger.log(level: .error, message: "RecordingSession Error \(error.code): \(error.description)")
|
|
|
|
// Something went wrong, we have an error
|
|
|
|
if error.domain == "capture/aborted" {
|
|
|
|
onError(.capture(.aborted))
|
|
|
|
} else {
|
|
|
|
onError(.capture(.unknown(message: "An unknown recording error occured! \(error.code) \(error.description)")))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if status == .completed {
|
|
|
|
// Recording was successfully saved
|
|
|
|
let video = Video(path: recordingSession.url.absoluteString,
|
|
|
|
duration: recordingSession.duration)
|
|
|
|
onVideoRecorded(video)
|
|
|
|
} else {
|
|
|
|
// Recording wasn't saved and we don't have an error either.
|
|
|
|
onError(.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create temporary file
|
|
|
|
let errorPointer = ErrorPointer(nilLiteral: ())
|
|
|
|
let fileExtension = options.fileType.descriptor ?? "mov"
|
|
|
|
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
|
|
|
let message = errorPointer?.pointee?.description
|
|
|
|
onError(.capture(.createTempFileError(message: message)))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-22 09:53:10 -07:00
|
|
|
ReactLogger.log(level: .info, message: "Will record to temporary file: \(tempFilePath)")
|
2023-10-13 10:33:20 -06:00
|
|
|
let tempURL = URL(string: "file://\(tempFilePath)")!
|
|
|
|
|
|
|
|
do {
|
2023-11-22 09:53:10 -07:00
|
|
|
// Create RecordingSession for the temp file
|
|
|
|
let recordingSession = try RecordingSession(url: tempURL,
|
|
|
|
fileType: options.fileType,
|
|
|
|
completion: onFinish)
|
|
|
|
|
|
|
|
// Init Audio + Activate Audio Session (optional)
|
|
|
|
if enableAudio,
|
|
|
|
let audioOutput = self.audioOutput,
|
|
|
|
let audioInput = self.audioDeviceInput {
|
|
|
|
ReactLogger.log(level: .trace, message: "Enabling Audio for Recording...")
|
2023-10-13 10:33:20 -06:00
|
|
|
// Activate Audio Session asynchronously
|
|
|
|
CameraQueues.audioQueue.async {
|
|
|
|
do {
|
|
|
|
try self.activateAudioSession()
|
|
|
|
} catch {
|
|
|
|
self.onConfigureError(error)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Initialize audio asset writer
|
|
|
|
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: options.fileType)
|
2023-11-22 09:53:10 -07:00
|
|
|
recordingSession.initializeAudioWriter(withSettings: audioSettings,
|
|
|
|
format: audioInput.device.activeFormat.formatDescription)
|
2023-10-13 10:33:20 -06:00
|
|
|
}
|
|
|
|
|
2023-11-22 09:53:10 -07:00
|
|
|
// Init Video
|
|
|
|
let videoSettings = try videoOutput.recommendedVideoSettings(forOptions: options)
|
|
|
|
recordingSession.initializeVideoWriter(withSettings: videoSettings)
|
|
|
|
|
|
|
|
// start recording session with or without audio.
|
2023-11-23 10:17:15 -07:00
|
|
|
// Use Video [AVCaptureSession] clock as a timebase - all other sessions (here; audio) have to be synced to that Clock.
|
|
|
|
try recordingSession.start(clock: self.captureSession.clock)
|
2023-11-22 09:53:10 -07:00
|
|
|
self.recordingSession = recordingSession
|
2023-10-13 10:33:20 -06:00
|
|
|
self.isRecording = true
|
2023-11-22 09:53:10 -07:00
|
|
|
|
|
|
|
let end = DispatchTime.now()
|
|
|
|
ReactLogger.log(level: .info, message: "RecordingSesssion started in \(Double(end.uptimeNanoseconds - start.uptimeNanoseconds) / 1_000_000)ms!")
|
2023-10-13 10:33:20 -06:00
|
|
|
} catch let error as NSError {
|
2023-11-22 09:53:10 -07:00
|
|
|
if let error = error as? CameraError {
|
|
|
|
onError(error)
|
|
|
|
} else {
|
|
|
|
onError(.capture(.createRecorderError(message: "RecordingSession failed with unknown error: \(error.description)")))
|
|
|
|
}
|
2023-10-13 10:33:20 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
Stops an active recording.
|
|
|
|
*/
|
|
|
|
func stopRecording(promise: Promise) {
|
|
|
|
CameraQueues.cameraQueue.async {
|
|
|
|
withPromise(promise) {
|
|
|
|
guard let recordingSession = self.recordingSession else {
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
|
|
|
}
|
2023-11-23 10:17:15 -07:00
|
|
|
// Use Video [AVCaptureSession] clock as a timebase - all other sessions (here; audio) have to be synced to that Clock.
|
|
|
|
recordingSession.stop(clock: self.captureSession.clock)
|
|
|
|
// There might be late frames, so maybe we need to still provide more Frames to the RecordingSession. Let's keep isRecording true for now.
|
2023-10-13 10:33:20 -06:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
Pauses an active recording.
|
|
|
|
*/
|
|
|
|
func pauseRecording(promise: Promise) {
|
|
|
|
CameraQueues.cameraQueue.async {
|
|
|
|
withPromise(promise) {
|
|
|
|
guard self.recordingSession != nil else {
|
|
|
|
// there's no active recording!
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
|
|
|
}
|
|
|
|
self.isRecording = false
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
Resumes an active, but paused recording.
|
|
|
|
*/
|
|
|
|
func resumeRecording(promise: Promise) {
|
|
|
|
CameraQueues.cameraQueue.async {
|
|
|
|
withPromise(promise) {
|
|
|
|
guard self.recordingSession != nil else {
|
|
|
|
// there's no active recording!
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
|
|
|
}
|
|
|
|
self.isRecording = true
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|