2021-02-19 08:28:05 -07:00
|
|
|
//
|
|
|
|
// CameraView+RecordVideo.swift
|
2021-06-21 14:42:46 -06:00
|
|
|
// mrousavy
|
2021-02-19 08:28:05 -07:00
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 16.12.20.
|
2021-06-01 05:07:57 -06:00
|
|
|
// Copyright © 2020 mrousavy. All rights reserved.
|
2021-02-19 08:28:05 -07:00
|
|
|
//
|
|
|
|
|
|
|
|
import AVFoundation
|
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
|
|
|
|
|
|
|
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
2021-06-03 06:16:02 -06:00
|
|
|
/**
|
|
|
|
Starts a video + audio recording with a custom Asset Writer.
|
|
|
|
*/
|
2021-06-07 05:08:40 -06:00
|
|
|
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
|
2021-05-06 06:11:55 -06:00
|
|
|
cameraQueue.async {
|
|
|
|
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
2021-06-07 05:08:40 -06:00
|
|
|
let callback = Callback(jsCallbackFunc)
|
2021-06-03 06:16:02 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
var fileType = AVFileType.mov
|
|
|
|
if let fileTypeOption = options["fileType"] as? String {
|
|
|
|
guard let parsed = try? AVFileType(withString: fileTypeOption) else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption)))
|
|
|
|
return
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
fileType = parsed
|
|
|
|
}
|
2021-02-19 08:28:05 -07:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
let errorPointer = ErrorPointer(nilLiteral: ())
|
|
|
|
let fileExtension = fileType.descriptor ?? "mov"
|
|
|
|
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee)
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
|
|
|
|
let tempURL = URL(string: "file://\(tempFilePath)")!
|
|
|
|
|
|
|
|
if let flashMode = options["flash"] as? String {
|
|
|
|
// use the torch as the video's flash
|
|
|
|
self.setTorchMode(flashMode)
|
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
guard let videoOutput = self.videoOutput else {
|
|
|
|
if self.video?.boolValue == true {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .session(.cameraNotReady))
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
} else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.videoNotEnabled))
|
|
|
|
return
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-06-11 13:06:19 -06:00
|
|
|
guard let videoInput = self.videoDeviceInput else {
|
|
|
|
callback.reject(error: .session(.cameraNotReady))
|
|
|
|
return
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
|
|
|
|
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
|
|
|
// both a callback and a Promise in a single function. Wait for TurboModules?
|
|
|
|
// This means that any errors that occur in this function have to be delegated through
|
|
|
|
// the callback, but I'd prefer for them to throw for the original function instead.
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
let enableAudio = self.audio?.boolValue == true
|
2021-02-19 08:28:05 -07:00
|
|
|
|
2022-04-15 01:48:32 -06:00
|
|
|
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
2021-06-07 05:08:40 -06:00
|
|
|
defer {
|
|
|
|
if enableAudio {
|
2021-06-03 06:16:02 -06:00
|
|
|
self.audioQueue.async {
|
|
|
|
self.deactivateAudioSession()
|
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-12-10 01:57:05 -07:00
|
|
|
if options["flash"] != nil {
|
|
|
|
// Set torch mode back to what it was before if we used it for the video flash.
|
|
|
|
self.setTorchMode(self.torch)
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-06-09 06:56:56 -06:00
|
|
|
|
2022-04-15 01:48:32 -06:00
|
|
|
self.recordingSession = nil
|
2021-06-09 06:56:56 -06:00
|
|
|
self.isRecording = false
|
2021-06-07 05:08:40 -06:00
|
|
|
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
2021-06-09 06:56:56 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
if let error = error as NSError? {
|
2021-06-09 06:56:56 -06:00
|
|
|
if error.domain == "capture/aborted" {
|
|
|
|
callback.reject(error: .capture(.aborted), cause: error)
|
|
|
|
} else {
|
|
|
|
callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(error.description)")), cause: error)
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
} else {
|
|
|
|
if status == .completed {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.resolve([
|
2022-04-15 01:48:32 -06:00
|
|
|
"path": recordingSession.url.absoluteString,
|
|
|
|
"duration": recordingSession.duration,
|
2021-06-07 05:08:40 -06:00
|
|
|
])
|
2021-05-06 06:11:55 -06:00
|
|
|
} else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2022-04-15 01:48:32 -06:00
|
|
|
let recordingSession: RecordingSession
|
2021-06-07 05:08:40 -06:00
|
|
|
do {
|
2022-04-15 01:48:32 -06:00
|
|
|
recordingSession = try RecordingSession(url: tempURL,
|
|
|
|
fileType: fileType,
|
|
|
|
completion: onFinish)
|
2021-06-07 05:08:40 -06:00
|
|
|
} catch let error as NSError {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error)
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2022-04-15 01:48:32 -06:00
|
|
|
self.recordingSession = recordingSession
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-12-30 02:47:23 -07:00
|
|
|
var videoCodec: AVVideoCodecType?
|
|
|
|
if let codecString = options["videoCodec"] as? String {
|
|
|
|
videoCodec = AVVideoCodecType(withString: codecString)
|
|
|
|
}
|
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
// Init Video
|
2021-12-30 02:47:23 -07:00
|
|
|
guard let videoSettings = self.recommendedVideoSettings(videoOutput: videoOutput, fileType: fileType, videoCodec: videoCodec),
|
2021-06-07 05:08:40 -06:00
|
|
|
!videoSettings.isEmpty else {
|
2021-06-09 03:14:49 -06:00
|
|
|
callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
|
|
|
return
|
2021-06-07 05:08:40 -06:00
|
|
|
}
|
2021-12-30 02:47:23 -07:00
|
|
|
|
2021-12-10 01:52:40 -07:00
|
|
|
// get pixel format (420f, 420v, x420)
|
2021-06-11 13:06:19 -06:00
|
|
|
let pixelFormat = CMFormatDescriptionGetMediaSubType(videoInput.device.activeFormat.formatDescription)
|
2022-04-15 01:48:32 -06:00
|
|
|
recordingSession.initializeVideoWriter(withSettings: videoSettings,
|
|
|
|
pixelFormat: pixelFormat)
|
2021-06-03 06:16:02 -06:00
|
|
|
|
2021-06-07 05:08:40 -06:00
|
|
|
// Init Audio (optional, async)
|
|
|
|
if enableAudio {
|
2021-06-09 06:56:56 -06:00
|
|
|
// Activate Audio Session (blocking)
|
|
|
|
self.activateAudioSession()
|
2021-06-07 05:08:40 -06:00
|
|
|
|
2021-06-09 06:56:56 -06:00
|
|
|
if let audioOutput = self.audioOutput,
|
2021-12-30 02:34:46 -07:00
|
|
|
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) {
|
2022-04-15 01:48:32 -06:00
|
|
|
recordingSession.initializeAudioWriter(withSettings: audioSettings)
|
2021-06-03 06:16:02 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2021-06-09 06:56:56 -06:00
|
|
|
|
|
|
|
// start recording session with or without audio.
|
|
|
|
do {
|
2022-06-11 03:15:24 -06:00
|
|
|
try recordingSession.startAssetWriter()
|
2022-04-15 01:48:32 -06:00
|
|
|
} catch let error as NSError {
|
2022-06-11 03:15:24 -06:00
|
|
|
callback.reject(error: .capture(.createRecorderError(message: "RecordingSession failed to start asset writer.")), cause: error)
|
2021-06-09 06:56:56 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
self.isRecording = true
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func stopRecording(promise: Promise) {
|
2021-05-06 06:11:55 -06:00
|
|
|
cameraQueue.async {
|
2021-06-03 06:16:02 -06:00
|
|
|
self.isRecording = false
|
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
withPromise(promise) {
|
|
|
|
guard let recordingSession = self.recordingSession else {
|
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
|
|
|
}
|
|
|
|
recordingSession.finish()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func pauseRecording(promise: Promise) {
|
|
|
|
cameraQueue.async {
|
2021-02-19 08:28:05 -07:00
|
|
|
withPromise(promise) {
|
2022-03-22 03:44:58 -06:00
|
|
|
guard self.recordingSession != nil else {
|
|
|
|
// there's no active recording!
|
2021-05-06 06:11:55 -06:00
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2022-03-22 03:44:58 -06:00
|
|
|
self.isRecording = false
|
|
|
|
return nil
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func resumeRecording(promise: Promise) {
|
|
|
|
cameraQueue.async {
|
|
|
|
withPromise(promise) {
|
2022-03-22 03:44:58 -06:00
|
|
|
guard self.recordingSession != nil else {
|
|
|
|
// there's no active recording!
|
2021-05-06 06:11:55 -06:00
|
|
|
throw CameraError.capture(.noRecordingInProgress)
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2022-03-22 03:44:58 -06:00
|
|
|
self.isRecording = true
|
|
|
|
return nil
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-19 08:28:05 -07:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
|
|
|
if isRecording {
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// Write Video / Audio frame to file
|
2021-05-06 06:11:55 -06:00
|
|
|
guard let recordingSession = recordingSession else {
|
2021-06-09 03:14:49 -06:00
|
|
|
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
|
|
|
return
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2021-06-09 02:57:05 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
switch captureOutput {
|
|
|
|
case is AVCaptureVideoDataOutput:
|
2021-06-03 06:50:08 -06:00
|
|
|
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
2021-05-06 06:11:55 -06:00
|
|
|
case is AVCaptureAudioDataOutput:
|
2021-06-03 06:50:08 -06:00
|
|
|
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
|
|
|
|
from: audioCaptureSession.masterClock!,
|
|
|
|
to: captureSession.masterClock!)
|
|
|
|
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
|
2021-05-06 06:11:55 -06:00
|
|
|
default:
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-06 08:27:16 -06:00
|
|
|
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// Call the JavaScript Frame Processor func (worklet)
|
2023-02-15 09:24:33 -07:00
|
|
|
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
frameProcessor(frame)
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|
2021-09-06 08:27:16 -06:00
|
|
|
}
|
|
|
|
|
2021-12-30 02:47:23 -07:00
|
|
|
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput, fileType: AVFileType, videoCodec: AVVideoCodecType?) -> [String: Any]? {
|
|
|
|
if videoCodec != nil {
|
|
|
|
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
|
|
|
|
} else {
|
|
|
|
return videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-09 02:57:05 -06:00
|
|
|
/**
|
|
|
|
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
|
|
|
*/
|
|
|
|
var bufferOrientation: UIImage.Orientation {
|
|
|
|
guard let cameraPosition = videoDeviceInput?.device.position else {
|
|
|
|
return .up
|
|
|
|
}
|
|
|
|
|
|
|
|
switch UIDevice.current.orientation {
|
|
|
|
case .portrait:
|
|
|
|
return cameraPosition == .front ? .leftMirrored : .right
|
|
|
|
|
|
|
|
case .landscapeLeft:
|
|
|
|
return cameraPosition == .front ? .downMirrored : .up
|
|
|
|
|
|
|
|
case .portraitUpsideDown:
|
|
|
|
return cameraPosition == .front ? .rightMirrored : .left
|
|
|
|
|
|
|
|
case .landscapeRight:
|
|
|
|
return cameraPosition == .front ? .upMirrored : .down
|
|
|
|
|
|
|
|
case .unknown, .faceUp, .faceDown:
|
|
|
|
fallthrough
|
|
|
|
@unknown default:
|
|
|
|
return .up
|
|
|
|
}
|
|
|
|
}
|
2021-02-19 08:28:05 -07:00
|
|
|
}
|