2021-02-19 08:28:05 -07:00
|
|
|
//
|
|
|
|
// CameraView+RecordVideo.swift
|
2021-06-21 14:42:46 -06:00
|
|
|
// mrousavy
|
2021-02-19 08:28:05 -07:00
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 16.12.20.
|
2021-06-01 05:07:57 -06:00
|
|
|
// Copyright © 2020 mrousavy. All rights reserved.
|
2021-02-19 08:28:05 -07:00
|
|
|
//
|
|
|
|
|
|
|
|
import AVFoundation
|
|
|
|
|
2021-06-11 13:06:19 -06:00
|
|
|
private var hasLoggedVideoFrameDropWarning = false
|
|
|
|
private var hasLoggedFrameProcessorFrameDropWarning = false
|
2021-05-06 06:11:55 -06:00
|
|
|
|
|
|
|
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
|
|
|
|
|
|
|
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
2021-06-03 06:16:02 -06:00
|
|
|
/**
|
|
|
|
Starts a video + audio recording with a custom Asset Writer.
|
|
|
|
*/
|
2021-06-07 05:08:40 -06:00
|
|
|
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
|
2021-05-06 06:11:55 -06:00
|
|
|
cameraQueue.async {
|
|
|
|
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
2021-06-07 05:08:40 -06:00
|
|
|
let callback = Callback(jsCallbackFunc)
|
2021-06-03 06:16:02 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
var fileType = AVFileType.mov
|
|
|
|
if let fileTypeOption = options["fileType"] as? String {
|
|
|
|
guard let parsed = try? AVFileType(withString: fileTypeOption) else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption)))
|
|
|
|
return
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
fileType = parsed
|
|
|
|
}
|
2021-02-19 08:28:05 -07:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
let errorPointer = ErrorPointer(nilLiteral: ())
|
|
|
|
let fileExtension = fileType.descriptor ?? "mov"
|
|
|
|
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee)
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
|
|
|
|
let tempURL = URL(string: "file://\(tempFilePath)")!
|
|
|
|
|
|
|
|
if let flashMode = options["flash"] as? String {
|
|
|
|
// use the torch as the video's flash
|
|
|
|
self.setTorchMode(flashMode)
|
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
guard let videoOutput = self.videoOutput else {
|
|
|
|
if self.video?.boolValue == true {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .session(.cameraNotReady))
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
} else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.videoNotEnabled))
|
|
|
|
return
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-06-11 13:06:19 -06:00
|
|
|
guard let videoInput = self.videoDeviceInput else {
|
|
|
|
callback.reject(error: .session(.cameraNotReady))
|
|
|
|
return
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
|
|
|
|
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
|
|
|
// both a callback and a Promise in a single function. Wait for TurboModules?
|
|
|
|
// This means that any errors that occur in this function have to be delegated through
|
|
|
|
// the callback, but I'd prefer for them to throw for the original function instead.
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
let enableAudio = self.audio?.boolValue == true
|
2021-02-19 08:28:05 -07:00
|
|
|
|
2021-06-09 03:14:49 -06:00
|
|
|
let onFinish = { (status: AVAssetWriter.Status, error: Error?) in
|
2021-06-07 05:08:40 -06:00
|
|
|
defer {
|
|
|
|
self.recordingSession = nil
|
|
|
|
if enableAudio {
|
2021-06-03 06:16:02 -06:00
|
|
|
self.audioQueue.async {
|
|
|
|
self.deactivateAudioSession()
|
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-06-09 06:56:56 -06:00
|
|
|
|
|
|
|
self.isRecording = false
|
2021-06-07 05:08:40 -06:00
|
|
|
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
2021-06-09 06:56:56 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
if let error = error as NSError? {
|
2021-06-09 06:56:56 -06:00
|
|
|
if error.domain == "capture/aborted" {
|
|
|
|
callback.reject(error: .capture(.aborted), cause: error)
|
|
|
|
} else {
|
|
|
|
callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(error.description)")), cause: error)
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
} else {
|
|
|
|
if status == .completed {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.resolve([
|
2021-06-07 05:08:40 -06:00
|
|
|
"path": self.recordingSession!.url.absoluteString,
|
|
|
|
"duration": self.recordingSession!.duration,
|
|
|
|
])
|
2021-05-06 06:11:55 -06:00
|
|
|
} else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
do {
|
2021-05-06 06:11:55 -06:00
|
|
|
self.recordingSession = try RecordingSession(url: tempURL,
|
|
|
|
fileType: fileType,
|
|
|
|
completion: onFinish)
|
2021-06-07 05:08:40 -06:00
|
|
|
} catch let error as NSError {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error)
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
// Init Video
|
|
|
|
guard let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType),
|
|
|
|
!videoSettings.isEmpty else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-06-11 13:06:19 -06:00
|
|
|
// get pixel format (420f, 420v)
|
|
|
|
let pixelFormat = CMFormatDescriptionGetMediaSubType(videoInput.device.activeFormat.formatDescription)
|
2021-06-07 05:08:40 -06:00
|
|
|
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
|
2021-06-11 13:06:19 -06:00
|
|
|
pixelFormat: pixelFormat)
|
2021-06-03 06:16:02 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
// Init Audio (optional, async)
|
|
|
|
if enableAudio {
|
2021-06-09 06:56:56 -06:00
|
|
|
// Activate Audio Session (blocking)
|
|
|
|
self.activateAudioSession()
|
2021-06-07 05:08:40 -06:00
|
|
|
|
2021-06-09 06:56:56 -06:00
|
|
|
if let audioOutput = self.audioOutput,
|
|
|
|
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) as? [String: Any] {
|
|
|
|
self.recordingSession!.initializeAudioWriter(withSettings: audioSettings)
|
2021-06-03 06:16:02 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-09 06:56:56 -06:00
|
|
|
|
|
|
|
// start recording session with or without audio.
|
|
|
|
do {
|
|
|
|
try self.recordingSession!.start()
|
|
|
|
} catch {
|
|
|
|
callback.reject(error: .capture(.createRecorderError(message: "RecordingSession failed to start writing.")))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
self.isRecording = true
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func stopRecording(promise: Promise) {
|
2021-05-06 06:11:55 -06:00
|
|
|
cameraQueue.async {
|
2021-06-03 06:16:02 -06:00
|
|
|
self.isRecording = false
|
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
withPromise(promise) {
|
|
|
|
guard let recordingSession = self.recordingSession else {
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
|
|
|
}
|
|
|
|
recordingSession.finish()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Implement for JS
|
|
|
|
func pauseRecording(promise: Promise) {
|
|
|
|
cameraQueue.async {
|
2021-02-19 08:28:05 -07:00
|
|
|
withPromise(promise) {
|
2021-05-06 06:11:55 -06:00
|
|
|
if self.isRecording {
|
|
|
|
self.isRecording = false
|
|
|
|
return nil
|
|
|
|
} else {
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Implement for JS
|
|
|
|
func resumeRecording(promise: Promise) {
|
|
|
|
cameraQueue.async {
|
|
|
|
withPromise(promise) {
|
|
|
|
if !self.isRecording {
|
|
|
|
self.isRecording = true
|
|
|
|
return nil
|
|
|
|
} else {
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-19 08:28:05 -07:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
2021-06-09 02:57:05 -06:00
|
|
|
// Video Recording runs in the same queue
|
2021-05-06 06:11:55 -06:00
|
|
|
if isRecording {
|
|
|
|
guard let recordingSession = recordingSession else {
|
2021-06-09 03:14:49 -06:00
|
|
|
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
|
|
|
return
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2021-06-09 02:57:05 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
switch captureOutput {
|
|
|
|
case is AVCaptureVideoDataOutput:
|
2021-06-03 06:50:08 -06:00
|
|
|
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
2021-05-06 06:11:55 -06:00
|
|
|
case is AVCaptureAudioDataOutput:
|
2021-06-03 06:50:08 -06:00
|
|
|
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
|
|
|
|
from: audioCaptureSession.masterClock!,
|
|
|
|
to: captureSession.masterClock!)
|
|
|
|
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
|
2021-05-06 06:11:55 -06:00
|
|
|
default:
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-11 13:06:19 -06:00
|
|
|
// TODO: resize using VideoToolbox (VTPixelTransferSession)
|
|
|
|
|
|
|
|
if let frameProcessor = frameProcessorCallback,
|
|
|
|
captureOutput is AVCaptureVideoDataOutput {
|
2021-05-06 06:11:55 -06:00
|
|
|
// check if last frame was x nanoseconds ago, effectively throttling FPS
|
|
|
|
let diff = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
|
|
|
let secondsPerFrame = 1.0 / frameProcessorFps.doubleValue
|
|
|
|
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
2021-06-09 02:57:05 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
if diff > UInt64(nanosecondsPerFrame) {
|
2021-06-11 13:06:19 -06:00
|
|
|
if !isRunningFrameProcessor {
|
|
|
|
// we're not in the middle of executing the Frame Processor, so prepare for next call.
|
|
|
|
var bufferCopy: CMSampleBuffer?
|
|
|
|
CMSampleBufferCreateCopy(allocator: kCFAllocatorDefault,
|
|
|
|
sampleBuffer: sampleBuffer,
|
|
|
|
sampleBufferOut: &bufferCopy)
|
|
|
|
if let bufferCopy = bufferCopy {
|
|
|
|
// successfully copied buffer, dispatch frame processor call.
|
|
|
|
CameraQueues.frameProcessorQueue.async {
|
|
|
|
self.isRunningFrameProcessor = true
|
|
|
|
let frame = Frame(buffer: bufferCopy, orientation: self.bufferOrientation)
|
|
|
|
frameProcessor(frame)
|
|
|
|
self.isRunningFrameProcessor = false
|
|
|
|
}
|
|
|
|
lastFrameProcessorCall = DispatchTime.now()
|
|
|
|
} else {
|
|
|
|
// failed to create a buffer copy.
|
|
|
|
ReactLogger.log(level: .error, message: "Failed to copy buffer! Frame Processor cannot be called.", alsoLogToJS: true)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// we're still in the middle of executing a Frame Processor for a previous frame, notify user about dropped frame.
|
|
|
|
if !hasLoggedFrameProcessorFrameDropWarning {
|
|
|
|
ReactLogger.log(level: .warning,
|
|
|
|
message: "Your Frame Processor took so long to execute that a frame was dropped. " +
|
|
|
|
"Either throttle your Frame Processor's frame rate using the `frameProcessorFps` prop, or optimize " +
|
|
|
|
"it's execution speed. (This warning will only be shown once)",
|
|
|
|
alsoLogToJS: true)
|
|
|
|
hasLoggedFrameProcessorFrameDropWarning = true
|
|
|
|
}
|
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
#if DEBUG
|
|
|
|
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
2021-06-11 13:06:19 -06:00
|
|
|
if !hasLoggedVideoFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
|
2021-06-03 06:16:02 -06:00
|
|
|
let reason = findFrameDropReason(inBuffer: buffer)
|
|
|
|
ReactLogger.log(level: .warning,
|
2021-06-11 13:06:19 -06:00
|
|
|
message: "Dropped a Frame - This might indicate that your frame rate is higher than the phone can currently process. " +
|
|
|
|
"Throttle the Camera frame rate using the `fps` prop and make sure the device stays in optimal condition for recording. " +
|
|
|
|
"Frame drop reason: \(reason). (This warning will only be shown once)",
|
2021-06-03 06:16:02 -06:00
|
|
|
alsoLogToJS: true)
|
2021-06-11 13:06:19 -06:00
|
|
|
hasLoggedVideoFrameDropWarning = true
|
2021-06-03 06:16:02 -06:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {
|
|
|
|
var mode: CMAttachmentMode = 0
|
|
|
|
guard let reason = CMGetAttachment(buffer,
|
|
|
|
key: kCMSampleBufferAttachmentKey_DroppedFrameReason,
|
|
|
|
attachmentModeOut: &mode) else {
|
|
|
|
return "unknown"
|
|
|
|
}
|
|
|
|
return String(describing: reason)
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
#endif
|
2021-06-09 02:57:05 -06:00
|
|
|
|
|
|
|
/**
|
|
|
|
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
|
|
|
*/
|
|
|
|
var bufferOrientation: UIImage.Orientation {
|
|
|
|
guard let cameraPosition = videoDeviceInput?.device.position else {
|
|
|
|
return .up
|
|
|
|
}
|
|
|
|
|
|
|
|
switch UIDevice.current.orientation {
|
|
|
|
case .portrait:
|
|
|
|
return cameraPosition == .front ? .leftMirrored : .right
|
|
|
|
|
|
|
|
case .landscapeLeft:
|
|
|
|
return cameraPosition == .front ? .downMirrored : .up
|
|
|
|
|
|
|
|
case .portraitUpsideDown:
|
|
|
|
return cameraPosition == .front ? .rightMirrored : .left
|
|
|
|
|
|
|
|
case .landscapeRight:
|
|
|
|
return cameraPosition == .front ? .upMirrored : .down
|
|
|
|
|
|
|
|
case .unknown, .faceUp, .faceDown:
|
|
|
|
fallthrough
|
|
|
|
@unknown default:
|
|
|
|
return .up
|
|
|
|
}
|
|
|
|
}
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|