try: Improvements from WWDC 2021 1:1 workshop (#197)
* perf: Automatically determine Pixel Format depending on active format. (More efficient video recording 🚀)
* perf: Skip `AVAssetWriter` transform by directly correctly orienting the Video Output connection
* feat: Support camera flipping while recording
* feat: Run frame processor on separate queue, avoids stutters in video recordigns
* feat: Automatically drop late frame processor frames
This commit is contained in:
parent
26cf21ff5f
commit
9c579c65aa
@ -151,10 +151,8 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
|
|||||||
|
|
||||||
//#region Tap Gesture
|
//#region Tap Gesture
|
||||||
const onDoubleTap = useCallback(() => {
|
const onDoubleTap = useCallback(() => {
|
||||||
// TODO: (MARC) Allow switching camera (back <-> front) while recording and stich videos together!
|
|
||||||
if (isPressingButton.value) return;
|
|
||||||
onFlipCameraPressed();
|
onFlipCameraPressed();
|
||||||
}, [isPressingButton, onFlipCameraPressed]);
|
}, [onFlipCameraPressed]);
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|
||||||
//#region Effects
|
//#region Effects
|
||||||
|
@ -17,13 +17,20 @@ public class CameraQueues: NSObject {
|
|||||||
autoreleaseFrequency: .inherit,
|
autoreleaseFrequency: .inherit,
|
||||||
target: nil)
|
target: nil)
|
||||||
|
|
||||||
/// The serial execution queue for output processing of videos as well as frame processors.
|
/// The serial execution queue for output processing of videos for recording.
|
||||||
@objc public static let videoQueue = DispatchQueue(label: "mrousavy/VisionCamera.video",
|
@objc public static let videoQueue = DispatchQueue(label: "mrousavy/VisionCamera.video",
|
||||||
qos: .userInteractive,
|
qos: .userInteractive,
|
||||||
attributes: [],
|
attributes: [],
|
||||||
autoreleaseFrequency: .inherit,
|
autoreleaseFrequency: .inherit,
|
||||||
target: nil)
|
target: nil)
|
||||||
|
|
||||||
|
/// The serial execution queue for output processing of videos for frame processing.
|
||||||
|
@objc public static let frameProcessorQueue = DispatchQueue(label: "mrousavy/VisionCamera.frame-processor",
|
||||||
|
qos: .userInteractive,
|
||||||
|
attributes: [],
|
||||||
|
autoreleaseFrequency: .inherit,
|
||||||
|
target: nil)
|
||||||
|
|
||||||
/// The serial execution queue for output processing of audio buffers.
|
/// The serial execution queue for output processing of audio buffers.
|
||||||
@objc public static let audioQueue = DispatchQueue(label: "mrousavy/VisionCamera.audio",
|
@objc public static let audioQueue = DispatchQueue(label: "mrousavy/VisionCamera.audio",
|
||||||
qos: .userInteractive,
|
qos: .userInteractive,
|
||||||
|
@ -133,11 +133,9 @@ extension CameraView {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
|
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
|
||||||
videoOutput!.alwaysDiscardsLateVideoFrames = true
|
videoOutput!.alwaysDiscardsLateVideoFrames = false
|
||||||
captureSession.addOutput(videoOutput!)
|
captureSession.addOutput(videoOutput!)
|
||||||
if videoDeviceInput!.device.position == .front {
|
videoOutput!.setOrientation(forCameraPosition: videoDeviceInput!.device.position)
|
||||||
videoOutput!.mirror()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
invokeOnInitialized()
|
invokeOnInitialized()
|
||||||
|
@ -8,7 +8,8 @@
|
|||||||
|
|
||||||
import AVFoundation
|
import AVFoundation
|
||||||
|
|
||||||
private var hasLoggedFrameDropWarning = false
|
private var hasLoggedVideoFrameDropWarning = false
|
||||||
|
private var hasLoggedFrameProcessorFrameDropWarning = false
|
||||||
|
|
||||||
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
||||||
|
|
||||||
@ -54,6 +55,10 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
guard let videoInput = self.videoDeviceInput else {
|
||||||
|
callback.reject(error: .session(.cameraNotReady))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
||||||
// both a callback and a Promise in a single function. Wait for TurboModules?
|
// both a callback and a Promise in a single function. Wait for TurboModules?
|
||||||
@ -108,8 +113,10 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
// get pixel format (420f, 420v)
|
||||||
|
let pixelFormat = CMFormatDescriptionGetMediaSubType(videoInput.device.activeFormat.formatDescription)
|
||||||
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
|
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
|
||||||
isVideoMirrored: self.videoOutput!.isMirrored)
|
pixelFormat: pixelFormat)
|
||||||
|
|
||||||
// Init Audio (optional, async)
|
// Init Audio (optional, async)
|
||||||
if enableAudio {
|
if enableAudio {
|
||||||
@ -196,30 +203,60 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
|
// TODO: resize using VideoToolbox (VTPixelTransferSession)
|
||||||
|
|
||||||
|
if let frameProcessor = frameProcessorCallback,
|
||||||
|
captureOutput is AVCaptureVideoDataOutput {
|
||||||
// check if last frame was x nanoseconds ago, effectively throttling FPS
|
// check if last frame was x nanoseconds ago, effectively throttling FPS
|
||||||
let diff = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
let diff = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
||||||
let secondsPerFrame = 1.0 / frameProcessorFps.doubleValue
|
let secondsPerFrame = 1.0 / frameProcessorFps.doubleValue
|
||||||
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
||||||
|
|
||||||
if diff > UInt64(nanosecondsPerFrame) {
|
if diff > UInt64(nanosecondsPerFrame) {
|
||||||
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
|
if !isRunningFrameProcessor {
|
||||||
frameProcessor(frame)
|
// we're not in the middle of executing the Frame Processor, so prepare for next call.
|
||||||
lastFrameProcessorCall = DispatchTime.now()
|
var bufferCopy: CMSampleBuffer?
|
||||||
|
CMSampleBufferCreateCopy(allocator: kCFAllocatorDefault,
|
||||||
|
sampleBuffer: sampleBuffer,
|
||||||
|
sampleBufferOut: &bufferCopy)
|
||||||
|
if let bufferCopy = bufferCopy {
|
||||||
|
// successfully copied buffer, dispatch frame processor call.
|
||||||
|
CameraQueues.frameProcessorQueue.async {
|
||||||
|
self.isRunningFrameProcessor = true
|
||||||
|
let frame = Frame(buffer: bufferCopy, orientation: self.bufferOrientation)
|
||||||
|
frameProcessor(frame)
|
||||||
|
self.isRunningFrameProcessor = false
|
||||||
|
}
|
||||||
|
lastFrameProcessorCall = DispatchTime.now()
|
||||||
|
} else {
|
||||||
|
// failed to create a buffer copy.
|
||||||
|
ReactLogger.log(level: .error, message: "Failed to copy buffer! Frame Processor cannot be called.", alsoLogToJS: true)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// we're still in the middle of executing a Frame Processor for a previous frame, notify user about dropped frame.
|
||||||
|
if !hasLoggedFrameProcessorFrameDropWarning {
|
||||||
|
ReactLogger.log(level: .warning,
|
||||||
|
message: "Your Frame Processor took so long to execute that a frame was dropped. " +
|
||||||
|
"Either throttle your Frame Processor's frame rate using the `frameProcessorFps` prop, or optimize " +
|
||||||
|
"it's execution speed. (This warning will only be shown once)",
|
||||||
|
alsoLogToJS: true)
|
||||||
|
hasLoggedFrameProcessorFrameDropWarning = true
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||||
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
|
if !hasLoggedVideoFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
|
||||||
let reason = findFrameDropReason(inBuffer: buffer)
|
let reason = findFrameDropReason(inBuffer: buffer)
|
||||||
ReactLogger.log(level: .warning,
|
ReactLogger.log(level: .warning,
|
||||||
message: "Dropped a Frame - This might indicate that your Frame Processor is doing too much work. " +
|
message: "Dropped a Frame - This might indicate that your frame rate is higher than the phone can currently process. " +
|
||||||
"Either throttle the frame processor's frame rate using the `frameProcessorFps` prop, or optimize " +
|
"Throttle the Camera frame rate using the `fps` prop and make sure the device stays in optimal condition for recording. " +
|
||||||
"your frame processor's execution speed. Frame drop reason: \(reason)",
|
"Frame drop reason: \(reason). (This warning will only be shown once)",
|
||||||
alsoLogToJS: true)
|
alsoLogToJS: true)
|
||||||
hasLoggedFrameDropWarning = true
|
hasLoggedVideoFrameDropWarning = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,6 +100,9 @@ public final class CameraView: UIView {
|
|||||||
internal let videoQueue = CameraQueues.videoQueue
|
internal let videoQueue = CameraQueues.videoQueue
|
||||||
internal let audioQueue = CameraQueues.audioQueue
|
internal let audioQueue = CameraQueues.audioQueue
|
||||||
|
|
||||||
|
/// Specifies whether the frameProcessor() function is currently executing. used to drop late frames.
|
||||||
|
internal var isRunningFrameProcessor = false
|
||||||
|
|
||||||
var isRunning: Bool {
|
var isRunning: Bool {
|
||||||
return captureSession.isRunning
|
return captureSession.isRunning
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ final class CameraViewManager: RCTViewManager {
|
|||||||
|
|
||||||
// Install Frame Processor bindings and setup Runtime
|
// Install Frame Processor bindings and setup Runtime
|
||||||
if enableFrameProcessors {
|
if enableFrameProcessors {
|
||||||
CameraQueues.videoQueue.async {
|
CameraQueues.frameProcessorQueue.async {
|
||||||
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
|
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
|
||||||
self.bridge.runOnJS {
|
self.bridge.runOnJS {
|
||||||
self.runtimeManager!.installFrameProcessorBindings()
|
self.runtimeManager!.installFrameProcessorBindings()
|
||||||
|
@ -13,7 +13,9 @@ extension AVAssetWriterInputPixelBufferAdaptor {
|
|||||||
/**
|
/**
|
||||||
Convenience initializer to extract correct attributes from the given videoSettings.
|
Convenience initializer to extract correct attributes from the given videoSettings.
|
||||||
*/
|
*/
|
||||||
convenience init(assetWriterInput: AVAssetWriterInput, withVideoSettings videoSettings: [String: Any]) {
|
convenience init(assetWriterInput: AVAssetWriterInput,
|
||||||
|
withVideoSettings videoSettings: [String: Any],
|
||||||
|
pixelFormat: OSType) {
|
||||||
var attributes: [String: Any] = [:]
|
var attributes: [String: Any] = [:]
|
||||||
|
|
||||||
if let width = videoSettings[AVVideoWidthKey] as? NSNumber,
|
if let width = videoSettings[AVVideoWidthKey] as? NSNumber,
|
||||||
@ -22,8 +24,7 @@ extension AVAssetWriterInputPixelBufferAdaptor {
|
|||||||
attributes[kCVPixelBufferHeightKey as String] = height as CFNumber
|
attributes[kCVPixelBufferHeightKey as String] = height as CFNumber
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Is "Bi-Planar Y'CbCr 8-bit 4:2:0 full-range" the best CVPixelFormatType? How can I find natively supported ones?
|
attributes[kCVPixelBufferPixelFormatTypeKey as String] = pixelFormat
|
||||||
attributes[kCVPixelBufferPixelFormatTypeKey as String] = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
|
||||||
|
|
||||||
self.init(assetWriterInput: assetWriterInput, sourcePixelBufferAttributes: attributes)
|
self.init(assetWriterInput: assetWriterInput, sourcePixelBufferAttributes: attributes)
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,7 @@ extension AVCapturePhotoOutput {
|
|||||||
func mirror() {
|
func mirror() {
|
||||||
connections.forEach { connection in
|
connections.forEach { connection in
|
||||||
if connection.isVideoMirroringSupported {
|
if connection.isVideoMirroringSupported {
|
||||||
|
connection.automaticallyAdjustsVideoMirroring = false
|
||||||
connection.isVideoMirrored = true
|
connection.isVideoMirrored = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,25 +0,0 @@
|
|||||||
//
|
|
||||||
// AVCaptureVideoDataOutput+mirror.swift
|
|
||||||
// Cuvent
|
|
||||||
//
|
|
||||||
// Created by Marc Rousavy on 18.01.21.
|
|
||||||
// Copyright © 2021 mrousavy. All rights reserved.
|
|
||||||
//
|
|
||||||
|
|
||||||
import AVFoundation
|
|
||||||
|
|
||||||
extension AVCaptureVideoDataOutput {
|
|
||||||
func mirror() {
|
|
||||||
connections.forEach { connection in
|
|
||||||
if connection.isVideoMirroringSupported {
|
|
||||||
connection.isVideoMirrored = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var isMirrored: Bool {
|
|
||||||
return connections.contains { connection in
|
|
||||||
connection.isVideoMirrored
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
24
ios/Extensions/AVCaptureVideoDataOutput+setOrientation.swift
Normal file
24
ios/Extensions/AVCaptureVideoDataOutput+setOrientation.swift
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
//
|
||||||
|
// AVCaptureVideoDataOutput+setOrientation.swift
|
||||||
|
// Cuvent
|
||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 18.01.21.
|
||||||
|
// Copyright © 2021 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
import AVFoundation
|
||||||
|
|
||||||
|
extension AVCaptureVideoDataOutput {
|
||||||
|
func setOrientation(forCameraPosition position: AVCaptureDevice.Position) {
|
||||||
|
let isMirrored = position == .front
|
||||||
|
connections.forEach { connection in
|
||||||
|
if connection.isVideoMirroringSupported {
|
||||||
|
connection.automaticallyAdjustsVideoMirroring = false
|
||||||
|
connection.isVideoMirrored = isMirrored
|
||||||
|
}
|
||||||
|
if connection.isVideoOrientationSupported {
|
||||||
|
connection.videoOrientation = .portrait
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -43,7 +43,7 @@
|
|||||||
// Forward declarations for the Swift classes
|
// Forward declarations for the Swift classes
|
||||||
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
|
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
|
||||||
@interface CameraQueues : NSObject
|
@interface CameraQueues : NSObject
|
||||||
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
|
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull frameProcessorQueue;
|
||||||
@end
|
@end
|
||||||
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
||||||
@interface CameraView : UIView
|
@interface CameraView : UIView
|
||||||
@ -153,7 +153,7 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
|||||||
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
|
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
|
||||||
auto view = static_cast<CameraView*>(anonymousView);
|
auto view = static_cast<CameraView*>(anonymousView);
|
||||||
|
|
||||||
dispatch_async(CameraQueues.videoQueue, [worklet, view, self]() {
|
dispatch_async(CameraQueues.frameProcessorQueue, [worklet, view, self]() {
|
||||||
NSLog(@"FrameProcessorBindings: Converting worklet to Objective-C callback...");
|
NSLog(@"FrameProcessorBindings: Converting worklet to Objective-C callback...");
|
||||||
auto& rt = *runtimeManager->runtime;
|
auto& rt = *runtimeManager->runtime;
|
||||||
auto function = worklet->getValue(rt).asObject(rt).asFunction(rt);
|
auto function = worklet->getValue(rt).asObject(rt).asFunction(rt);
|
||||||
|
@ -16,10 +16,7 @@ FrameProcessorCallback convertJSIFunctionToFrameProcessorCallback(jsi::Runtime &
|
|||||||
__block auto cb = value.getFunction(runtime);
|
__block auto cb = value.getFunction(runtime);
|
||||||
|
|
||||||
return ^(Frame* frame) {
|
return ^(Frame* frame) {
|
||||||
#if DEBUG
|
|
||||||
std::chrono::steady_clock::time_point begin = std::chrono::steady_clock::now();
|
|
||||||
#endif
|
|
||||||
|
|
||||||
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
|
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
|
||||||
try {
|
try {
|
||||||
cb.call(runtime, jsi::Object::createFromHostObject(runtime, frameHostObject));
|
cb.call(runtime, jsi::Object::createFromHostObject(runtime, frameHostObject));
|
||||||
@ -27,14 +24,6 @@ FrameProcessorCallback convertJSIFunctionToFrameProcessorCallback(jsi::Runtime &
|
|||||||
NSLog(@"Frame Processor threw an error: %s", jsError.getMessage().c_str());
|
NSLog(@"Frame Processor threw an error: %s", jsError.getMessage().c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
#if DEBUG
|
|
||||||
std::chrono::steady_clock::time_point end = std::chrono::steady_clock::now();
|
|
||||||
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - begin).count();
|
|
||||||
if (duration > 100) {
|
|
||||||
NSLog(@"Warning: Frame Processor function took %lld ms to execute. This blocks the video queue from recording, optimize your frame processor!", duration);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Manually free the buffer because:
|
// Manually free the buffer because:
|
||||||
// 1. we are sure we don't need it anymore, the frame processor worklet has finished executing.
|
// 1. we are sure we don't need it anymore, the frame processor worklet has finished executing.
|
||||||
// 2. we don't know when the JS runtime garbage collects this object, it might be holding it for a few more frames
|
// 2. we don't know when the JS runtime garbage collects this object, it might be holding it for a few more frames
|
||||||
|
@ -68,7 +68,7 @@ class RecordingSession {
|
|||||||
/**
|
/**
|
||||||
Initializes an AssetWriter for video frames (CMSampleBuffers).
|
Initializes an AssetWriter for video frames (CMSampleBuffers).
|
||||||
*/
|
*/
|
||||||
func initializeVideoWriter(withSettings settings: [String: Any], isVideoMirrored: Bool) {
|
func initializeVideoWriter(withSettings settings: [String: Any], pixelFormat: OSType) {
|
||||||
guard !settings.isEmpty else {
|
guard !settings.isEmpty else {
|
||||||
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!", alsoLogToJS: true)
|
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!", alsoLogToJS: true)
|
||||||
return
|
return
|
||||||
@ -81,14 +81,10 @@ class RecordingSession {
|
|||||||
let videoWriter = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
|
let videoWriter = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
|
||||||
videoWriter.expectsMediaDataInRealTime = true
|
videoWriter.expectsMediaDataInRealTime = true
|
||||||
|
|
||||||
if isVideoMirrored {
|
|
||||||
videoWriter.transform = CGAffineTransform(rotationAngle: -(.pi / 2))
|
|
||||||
} else {
|
|
||||||
videoWriter.transform = CGAffineTransform(rotationAngle: .pi / 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
assetWriter.add(videoWriter)
|
assetWriter.add(videoWriter)
|
||||||
bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriter, withVideoSettings: settings)
|
bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriter,
|
||||||
|
withVideoSettings: settings,
|
||||||
|
pixelFormat: pixelFormat)
|
||||||
ReactLogger.log(level: .info, message: "Initialized Video AssetWriter.")
|
ReactLogger.log(level: .info, message: "Initialized Video AssetWriter.")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */; };
|
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */; };
|
||||||
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */; };
|
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */; };
|
||||||
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
|
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
|
||||||
B887519225E0102000DB86D6 /* AVCaptureVideoDataOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516B25E0102000DB86D6 /* AVCaptureVideoDataOutput+mirror.swift */; };
|
B887519225E0102000DB86D6 /* AVCaptureVideoDataOutput+setOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516B25E0102000DB86D6 /* AVCaptureVideoDataOutput+setOrientation.swift */; };
|
||||||
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516E25E0102000DB86D6 /* MakeReactError.swift */; };
|
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516E25E0102000DB86D6 /* MakeReactError.swift */; };
|
||||||
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
|
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
|
||||||
B887519625E0102000DB86D6 /* Promise.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517025E0102000DB86D6 /* Promise.swift */; };
|
B887519625E0102000DB86D6 /* Promise.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517025E0102000DB86D6 /* Promise.swift */; };
|
||||||
@ -106,7 +106,7 @@
|
|||||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput+mirror.swift"; sourceTree = "<group>"; };
|
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput+mirror.swift"; sourceTree = "<group>"; };
|
||||||
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+matchesFilter.swift"; sourceTree = "<group>"; };
|
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+matchesFilter.swift"; sourceTree = "<group>"; };
|
||||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+toDictionary.swift"; sourceTree = "<group>"; };
|
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+toDictionary.swift"; sourceTree = "<group>"; };
|
||||||
B887516B25E0102000DB86D6 /* AVCaptureVideoDataOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoDataOutput+mirror.swift"; sourceTree = "<group>"; };
|
B887516B25E0102000DB86D6 /* AVCaptureVideoDataOutput+setOrientation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoDataOutput+setOrientation.swift"; sourceTree = "<group>"; };
|
||||||
B887516E25E0102000DB86D6 /* MakeReactError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MakeReactError.swift; sourceTree = "<group>"; };
|
B887516E25E0102000DB86D6 /* MakeReactError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MakeReactError.swift; sourceTree = "<group>"; };
|
||||||
B887516F25E0102000DB86D6 /* ReactLogger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactLogger.swift; sourceTree = "<group>"; };
|
B887516F25E0102000DB86D6 /* ReactLogger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactLogger.swift; sourceTree = "<group>"; };
|
||||||
B887517025E0102000DB86D6 /* Promise.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Promise.swift; sourceTree = "<group>"; };
|
B887517025E0102000DB86D6 /* Promise.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Promise.swift; sourceTree = "<group>"; };
|
||||||
@ -203,7 +203,7 @@
|
|||||||
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */,
|
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */,
|
||||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
|
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
|
||||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */,
|
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */,
|
||||||
B887516B25E0102000DB86D6 /* AVCaptureVideoDataOutput+mirror.swift */,
|
B887516B25E0102000DB86D6 /* AVCaptureVideoDataOutput+setOrientation.swift */,
|
||||||
B887516225E0102000DB86D6 /* Collection+safe.swift */,
|
B887516225E0102000DB86D6 /* Collection+safe.swift */,
|
||||||
);
|
);
|
||||||
path = Extensions;
|
path = Extensions;
|
||||||
@ -407,7 +407,7 @@
|
|||||||
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
|
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
|
||||||
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
|
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
|
||||||
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
|
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
|
||||||
B887519225E0102000DB86D6 /* AVCaptureVideoDataOutput+mirror.swift in Sources */,
|
B887519225E0102000DB86D6 /* AVCaptureVideoDataOutput+setOrientation.swift in Sources */,
|
||||||
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
|
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
|
||||||
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
|
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
|
||||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
||||||
|
Loading…
Reference in New Issue
Block a user