feat: Complete iOS Codebase rewrite (#1647)

* Make Frame Processors an extra subspec

* Update VisionCamera.podspec

* Make optional

* Make VisionCamera compile without Skia

* Fix

* Add skia again

* Update VisionCamera.podspec

* Make VisionCamera build without Frame Processors

* Rename error to `system/frame-processors-unavailable`

* Fix Frame Processor returning early

* Remove `preset`, FP partial rewrite

* Only warn on frame drop

* Fix wrong queue

* fix: Run on CameraQueue again

* Update CameraView.swift

* fix: Activate audio session asynchronously on audio queue

* Update CameraView+RecordVideo.swift

* Update PreviewView.h

* Cleanups

* Cleanup

* fix cast

* feat: Add LiDAR Depth Camera support

* Upgrade Ruby

* Add vector icons type

* Update Gemfile.lock

* fix: Stop queues on deinit

* Also load `builtInTrueDepthCamera`

* Update CameraViewManager.swift

* Update SkImageHelpers.mm

* Extract FrameProcessorCallback to FrameProcessor

Holds more context now :)

* Rename to .m

* fix: Add `RCTLog` import

* Create SkiaFrameProcessor

* Update CameraBridge.h

* Call Frame Processor

* Fix defines

* fix: Allow deleting callback funcs

* fix Skia build

* batch

* Just call `setSkiaFrameProcessor`

* Rewrite in Swift

* Pass `SkiaRenderer`

* Fix Import

* Move `PreviewView` to Swift

* Fix Layer

* Set Skia Canvas to Frame Host Object

* Make `DrawableFrameHostObject` subclass

* Fix TS types

* Use same MTLDevice and apply scale

* Make getter

* Extract `setTorch` and `Preview`

* fix: Fix nil metal device

* Don't wait for session stop in deinit

* Use main pixel ratio

* Use unique_ptr for Render Contexts

* fix: Fix SkiaPreviewDisplayLink broken after deinit

* inline `getTextureCache`

* Update CameraPage.tsx

* chore: Format iOS

* perf: Allow MTLLayer to be optimized for only frame buffers

* Add RN Video types

* fix: Fix Frame Processors if guard

* Find nodeModules recursively

* Create `Frame.isDrawable`

* Add `cocoapods-check` dependency
This commit is contained in:
Marc Rousavy
2023-07-20 15:30:04 +02:00
committed by GitHub
parent 5fb594ce6b
commit 375e894038
78 changed files with 1278 additions and 1245 deletions

View File

@@ -13,13 +13,10 @@
#import <React/RCTViewManager.h>
#import <React/RCTUIManager.h>
#import <React/RCTFPSGraph.h>
#import <React/RCTLog.h>
#import "FrameProcessorCallback.h"
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import "FrameProcessor.h"
#import "FrameProcessorRuntimeManager.h"
#import "Frame.h"
#import "RCTBridge+runOnJS.h"
#import "JSConsoleHelper.h"
@interface CameraBridge: RCTViewManager
@end
#endif

View File

@@ -113,7 +113,6 @@ enum FormatError {
case invalidHdr
case invalidFormat
case invalidColorSpace(colorSpace: String)
case invalidPreset(preset: String)
var code: String {
switch self {
@@ -123,8 +122,6 @@ enum FormatError {
return "invalid-fps"
case .invalidHdr:
return "invalid-hdr"
case .invalidPreset:
return "invalid-preset"
case .invalidColorSpace:
return "invalid-color-space"
}
@@ -141,8 +138,6 @@ enum FormatError {
case let .invalidColorSpace(colorSpace):
return "The currently selected format does not support the colorSpace \"\(colorSpace)\"! " +
"Make sure you select a format which `colorSpaces` includes \"\(colorSpace)\"!"
case let .invalidPreset(preset):
return "The preset \"\(preset)\" is not available for the current camera device."
}
}
}
@@ -256,6 +251,8 @@ enum CaptureError {
enum SystemError: String {
case noManager = "no-camera-manager"
case skiaUnavailable = "skia-unavailable"
case frameProcessorsUnavailable = "frame-processors-unavailable"
var code: String {
return rawValue
@@ -265,6 +262,10 @@ enum SystemError: String {
switch self {
case .noManager:
return "No Camera Manager was found."
case .skiaUnavailable:
return "Skia Integration is unavailable - is @shopify/react-native-skia installed?"
case .frameProcessorsUnavailable:
return "Frame Processors are unavailable - is react-native-worklets installed?"
}
}
}

View File

@@ -72,7 +72,7 @@ extension CameraView {
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
return
}
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
audioOutput!.setSampleBufferDelegate(self, queue: CameraQueues.audioQueue)
audioCaptureSession.addOutput(audioOutput!)
}
}
@@ -135,7 +135,7 @@ extension CameraView {
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
if isRecording {
audioQueue.async {
CameraQueues.audioQueue.async {
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
// restart audio session because interruption is over
self.activateAudioSession()

View File

@@ -39,28 +39,6 @@ extension CameraView {
captureSession.commitConfiguration()
}
// If preset is set, use preset. Otherwise use format.
if let preset = preset {
var sessionPreset: AVCaptureSession.Preset?
do {
sessionPreset = try AVCaptureSession.Preset(withString: preset)
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
invokeOnError(.parameter(.unsupportedOS(unionName: "Preset", receivedValue: preset, supportedOnOs: os)))
return
} catch {
invokeOnError(.parameter(.invalid(unionName: "Preset", receivedValue: preset)))
return
}
if sessionPreset != nil {
if captureSession.canSetSessionPreset(sessionPreset!) {
captureSession.sessionPreset = sessionPreset!
} else {
// non-fatal error, so continue with configuration
invokeOnError(.format(.invalidPreset(preset: preset)))
}
}
}
// pragma MARK: Capture Session Inputs
// Video Input
do {
@@ -132,7 +110,7 @@ extension CameraView {
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
return
}
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
videoOutput!.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue)
videoOutput!.alwaysDiscardsLateVideoFrames = false
if previewType == "skia" {
@@ -273,7 +251,7 @@ extension CameraView {
if isActive {
// restart capture session after an error occured
cameraQueue.async {
CameraQueues.cameraQueue.async {
self.captureSession.startRunning()
}
}

View File

@@ -23,7 +23,7 @@ extension CameraView {
}
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
private func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
guard let previewView = previewView else {
invokeOnError(.session(.cameraNotReady))
return .zero
@@ -53,7 +53,7 @@ extension CameraView {
}
/// Converts a Point in the UI View Layer to a Point in the Camera Device Sensor coordinate system (x: [0..1], y: [0..1])
func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
private func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero

View File

@@ -0,0 +1,57 @@
//
// CameraView+Preview.swift
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension CameraView {
#if VISION_CAMERA_ENABLE_SKIA
@objc
func getSkiaRenderer() -> SkiaRenderer {
if skiaRenderer == nil {
skiaRenderer = SkiaRenderer()
}
return skiaRenderer!
}
#endif
public func setupPreviewView() {
if previewType == "skia" {
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
#if VISION_CAMERA_ENABLE_SKIA
if previewView is SkiaPreviewView { return }
previewView?.removeFromSuperview()
previewView = SkiaPreviewView(frame: frame, skiaRenderer: getSkiaRenderer())
#else
invokeOnError(.system(.skiaUnavailable))
return
#endif
} else {
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is NativePreviewView { return }
previewView?.removeFromSuperview()
previewView = NativePreviewView(frame: frame, session: captureSession)
}
addSubview(previewView!)
}
internal func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
}

View File

@@ -15,7 +15,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
Starts a video + audio recording with a custom Asset Writer.
*/
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
cameraQueue.async {
CameraQueues.cameraQueue.async {
ReactLogger.log(level: .info, message: "Starting Video recording...")
let callback = Callback(jsCallbackFunc)
@@ -67,7 +67,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer {
if enableAudio {
self.audioQueue.async {
CameraQueues.audioQueue.async {
self.deactivateAudioSession()
}
}
@@ -127,10 +127,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
recordingSession.initializeVideoWriter(withSettings: videoSettings,
pixelFormat: pixelFormat)
// Init Audio (optional, async)
// Init Audio (optional)
if enableAudio {
// Activate Audio Session (blocking)
self.activateAudioSession()
// Activate Audio Session asynchronously
CameraQueues.audioQueue.async {
self.activateAudioSession()
}
if let audioOutput = self.audioOutput,
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) {
@@ -150,7 +152,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func stopRecording(promise: Promise) {
cameraQueue.async {
CameraQueues.cameraQueue.async {
self.isRecording = false
withPromise(promise) {
@@ -164,7 +166,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func pauseRecording(promise: Promise) {
cameraQueue.async {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
@@ -177,7 +179,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func resumeRecording(promise: Promise) {
cameraQueue.async {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
@@ -190,23 +192,15 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
// Draw Frame to Preview View Canvas (and call Frame Processor)
if captureOutput is AVCaptureVideoDataOutput {
if let previewView = previewView as? PreviewSkiaView {
// Render to Skia PreviewView
previewView.drawFrame(sampleBuffer) { canvas in
// Call JS Frame Processor before passing Frame to GPU - allows user to draw
guard let frameProcessor = self.frameProcessorCallback else { return }
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
frameProcessor(frame, canvas)
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
if captureOutput is AVCaptureVideoDataOutput {
if let frameProcessor = frameProcessor {
// Call Frame Processor
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
frameProcessor.call(frame)
}
} else {
// Call JS Frame Processor. User cannot draw, since we don't have a Skia Canvas.
guard let frameProcessor = frameProcessorCallback else { return }
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
frameProcessor(frame, nil)
}
}
#endif
// Record Video Frame/Audio Sample to File
if isRecording {
@@ -220,8 +214,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
case is AVCaptureAudioDataOutput:
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
from: audioCaptureSession.masterClock!,
to: captureSession.masterClock!)
from: audioCaptureSession.masterClock ?? CMClockGetHostTimeClock(),
to: captureSession.masterClock ?? CMClockGetHostTimeClock())
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
default:
break
@@ -253,7 +247,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
/**
Gets the orientation of the CameraView's images (CMSampleBuffers).
*/
var bufferOrientation: UIImage.Orientation {
private var bufferOrientation: UIImage.Orientation {
guard let cameraPosition = videoDeviceInput?.device.position else {
return .up
}
@@ -261,16 +255,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
switch outputOrientation {
case .portrait:
return cameraPosition == .front ? .leftMirrored : .right
case .landscapeLeft:
return cameraPosition == .front ? .downMirrored : .up
case .portraitUpsideDown:
return cameraPosition == .front ? .rightMirrored : .left
case .landscapeRight:
return cameraPosition == .front ? .upMirrored : .down
case .unknown:
return .up
@unknown default:

View File

@@ -8,23 +8,9 @@
import AVFoundation
// MARK: - TakePhotoOptions
struct TakePhotoOptions {
init(fromDictionary dictionary: NSDictionary) {
if let videoCodec = dictionary.value(forKey: "videoCodec") as? String {
self.videoCodec = AVVideoCodecType(withString: videoCodec)
}
qualityPrioritization = dictionary.value(forKey: "qualityPrioritization") as? String
}
var videoCodec: AVVideoCodecType?
var qualityPrioritization: String?
}
extension CameraView {
func takePhoto(options: NSDictionary, promise: Promise) {
cameraQueue.async {
CameraQueues.cameraQueue.async {
guard let photoOutput = self.photoOutput,
let videoDeviceInput = self.videoDeviceInput else {
if self.photo?.boolValue == true {

View File

@@ -0,0 +1,51 @@
//
// CameraView+Torch.swift
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension CameraView {
final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
invokeOnError(.session(.cameraNotReady))
return
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
return
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
invokeOnError(.device(.torchUnavailable))
return
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
invokeOnError(.device(.configureError), cause: error)
return
}
}
}

View File

@@ -23,7 +23,6 @@ private let propsThatRequireReconfiguration = ["cameraId",
"enableDepthData",
"enableHighQualityPhotos",
"enablePortraitEffectsMatteDelivery",
"preset",
"photo",
"video",
"enableFrameProcessor",
@@ -37,14 +36,11 @@ private let propsThatRequireDeviceReconfiguration = ["fps",
public final class CameraView: UIView {
// pragma MARK: React Properties
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
// use cases
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool
@@ -85,27 +81,26 @@ public final class CameraView: UIView {
// Capture Session
internal let captureSession = AVCaptureSession()
internal let audioCaptureSession = AVCaptureSession()
// Inputs
// Inputs & Outputs
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var videoOutput: AVCaptureVideoDataOutput?
internal var audioOutput: AVCaptureAudioDataOutput?
// CameraView+RecordView (+ FrameProcessorDelegate.mm)
// CameraView+RecordView (+ Frame Processor)
internal var isRecording = false
internal var recordingSession: RecordingSession?
@objc public var frameProcessorCallback: FrameProcessorCallback?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc public var frameProcessor: FrameProcessor?
#endif
#if VISION_CAMERA_ENABLE_SKIA
internal var skiaRenderer: SkiaRenderer?
#endif
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
internal let cameraQueue = CameraQueues.cameraQueue
internal let videoQueue = CameraQueues.videoQueue
internal let audioQueue = CameraQueues.audioQueue
internal var previewView: UIView?
internal var previewView: PreviewView?
#if DEBUG
internal var fpsGraph: RCTFPSGraph?
#endif
@@ -165,10 +160,7 @@ public final class CameraView: UIView {
if newSuperview != nil {
if !isMounted {
isMounted = true
guard let onViewReady = onViewReady else {
return
}
onViewReady(nil)
onViewReady?(nil)
}
}
}
@@ -180,36 +172,6 @@ public final class CameraView: UIView {
}
}
func setupPreviewView() {
if previewType == "skia" {
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
if previewView is PreviewSkiaView { return }
previewView?.removeFromSuperview()
previewView = PreviewSkiaView(frame: frame)
} else {
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is PreviewView { return }
previewView?.removeFromSuperview()
previewView = PreviewView(frame: frame, session: captureSession)
}
addSubview(previewView!)
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
@@ -246,8 +208,8 @@ public final class CameraView: UIView {
shouldReconfigureDevice ||
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
// Video Configuration
cameraQueue.async {
CameraQueues.cameraQueue.async {
// Video Configuration
if shouldReconfigure {
self.configureCaptureSession()
}
@@ -285,7 +247,7 @@ public final class CameraView: UIView {
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
if shouldUpdateTorch {
self.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
self.setTorchMode(self.torch)
}
}
@@ -293,52 +255,13 @@ public final class CameraView: UIView {
// Audio Configuration
if shouldReconfigureAudioSession {
audioQueue.async {
CameraQueues.audioQueue.async {
self.configureAudioSession()
}
}
}
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
invokeOnError(.session(.cameraNotReady))
return
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
return
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
invokeOnError(.device(.torchUnavailable))
return
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
invokeOnError(.device(.configureError), cause: error)
return
}
}
@objc
func onOrientationChanged() {
updateOrientation()

View File

@@ -40,7 +40,6 @@ RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(colorSpace, NSString);
RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
// other props
RCT_EXPORT_VIEW_PROPERTY(preset, NSString);
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(previewType, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);

View File

@@ -13,7 +13,9 @@ import Foundation
final class CameraViewManager: RCTViewManager {
// pragma MARK: Properties
private var runtimeManager: FrameProcessorRuntimeManager?
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
private var runtimeManager: FrameProcessorRuntimeManager?
#endif
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
@@ -31,10 +33,14 @@ final class CameraViewManager: RCTViewManager {
@objc
final func installFrameProcessorBindings() -> NSNumber {
// Runs on JS Thread
runtimeManager = FrameProcessorRuntimeManager()
runtimeManager!.installFrameProcessorBindings()
return true as NSNumber
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Runs on JS Thread
runtimeManager = FrameProcessorRuntimeManager()
runtimeManager!.installFrameProcessorBindings()
return true as NSNumber
#else
return false as NSNumber
#endif
}
@objc
@@ -101,15 +107,10 @@ final class CameraViewManager: RCTViewManager {
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(), mediaType: .video, position: .unspecified)
let devices = discoverySession.devices.filter {
if #available(iOS 11.1, *) {
// exclude the true-depth camera. The True-Depth camera has YUV and Infrared, can't take photos!
return $0.deviceType != .builtInTrueDepthCamera
}
return true
}
return devices.map {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(),
mediaType: .video,
position: .unspecified)
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
@@ -171,6 +172,7 @@ final class CameraViewManager: RCTViewManager {
private func getCameraView(withTag tag: NSNumber) -> CameraView {
// swiftlint:disable force_cast
return bridge.uiManager.view(forReactTag: tag) as! CameraView
// swiftlint:enable force_cast
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
@@ -180,9 +182,6 @@ final class CameraViewManager: RCTViewManager {
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)

View File

@@ -12,6 +12,6 @@ extension FourCharCode {
s.append(String(UnicodeScalar((self >> 16) & 255)!))
s.append(String(UnicodeScalar((self >> 8) & 255)!))
s.append(String(UnicodeScalar(self & 255)!))
return (s)
return s
}
}

View File

@@ -14,9 +14,9 @@
@interface Frame : NSObject
- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation;
- (instancetype _Nonnull) initWithBuffer:(CMSampleBufferRef _Nonnull)buffer orientation:(UIImageOrientation)orientation;
@property (nonatomic, readonly) CMSampleBufferRef buffer;
@property (nonatomic, readonly) CMSampleBufferRef _Nonnull buffer;
@property (nonatomic, readonly) UIImageOrientation orientation;
@end

View File

@@ -11,11 +11,11 @@
#import <CoreMedia/CMSampleBuffer.h>
@implementation Frame {
CMSampleBufferRef buffer;
CMSampleBufferRef _Nonnull buffer;
UIImageOrientation orientation;
}
- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation {
- (instancetype) initWithBuffer:(CMSampleBufferRef _Nonnull)buffer orientation:(UIImageOrientation)orientation {
self = [super init];
if (self) {
_buffer = buffer;

View File

@@ -13,17 +13,11 @@
#import "Frame.h"
#import "SkCanvas.h"
#import "JsiSkCanvas.h"
using namespace facebook;
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
public:
explicit FrameHostObject(Frame* frame): frame(frame) {}
explicit FrameHostObject(Frame* frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas):
frame(frame), canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
@@ -31,5 +25,4 @@ public:
public:
Frame* frame;
std::shared_ptr<RNSkia::JsiSkCanvas> canvas;
};

View File

@@ -11,8 +11,6 @@
#import <jsi/jsi.h>
#import "WKTJsiHostObject.h"
#import "SkCanvas.h"
#import "../Skia Render Layer/SkImageHelpers.h"
#import "../../cpp/JSITypedArray.h"
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
@@ -24,6 +22,7 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isDrawable")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
@@ -31,27 +30,10 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
// Skia
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (canvas != nullptr) {
auto canvasPropNames = canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
@@ -80,7 +62,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
incrementRefCount);
}
if (name == "decrementRefCount") {
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame Buffer.
@@ -92,31 +73,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
decrementRefCount);
}
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
// convert CMSampleBuffer to SkImage
auto context = canvas->getCanvas()->recordingContext();
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
// draw SkImage
if (count > 0) {
// ..with paint/shader
auto paintHostObject = arguments[0].asObject(runtime).asHostObject<RNSkia::JsiSkPaint>(runtime);
auto paint = paintHostObject->getObject();
canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
} else {
// ..without paint/shader
canvas->getCanvas()->drawImage(image, 0, 0);
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "toArrayBuffer") {
auto toArrayBuffer = JSI_HOST_FUNCTION_LAMBDA {
auto pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
@@ -146,6 +102,9 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
if (name == "isDrawable") {
return jsi::Value(false);
}
if (name == "isValid") {
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
return jsi::Value(isValid);
@@ -206,11 +165,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Value((double) planesCount);
}
if (canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
return canvas->get(runtime, propName);
}
// fallback to base implementation
return HostObject::get(runtime, propName);
}

View File

@@ -0,0 +1,33 @@
//
// FrameProcessorContext.h
// VisionCamera
//
// Created by Marc Rousavy on 13.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "Frame.h"
#ifdef __cplusplus
#import "WKTJsiWorklet.h"
#import <jsi/jsi.h>
#import "FrameHostObject.h"
#import <memory.h>
#endif
@interface FrameProcessor : NSObject
#ifdef __cplusplus
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet;
- (void)callWithFrameHostObject:(std::shared_ptr<FrameHostObject>)frameHostObject;
#endif
- (void)call:(Frame* _Nonnull)frame;
@end

View File

@@ -0,0 +1,61 @@
//
// FrameProcessor.mm
// VisionCamera
//
// Created by Marc Rousavy on 13.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "FrameProcessor.h"
#import <memory>
#import <jsi/jsi.h>
#import "WKTJsiWorklet.h"
#import "FrameHostObject.h"
using namespace facebook;
@implementation FrameProcessor {
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
}
- (instancetype)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet {
if (self = [super init]) {
_workletContext = context;
_workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
}
return self;
}
- (void)callWithFrameHostObject:(std::shared_ptr<FrameHostObject>)frameHostObject {
// Call the Frame Processor on the Worklet Runtime
jsi::Runtime& runtime = _workletContext->getWorkletRuntime();
try {
// Wrap HostObject as JSI Value
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
jsi::Value jsValue(std::move(argument));
// Call the Worklet with the Frame JS Host Object as an argument
_workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
} catch (jsi::JSError& jsError) {
// JS Error occured, print it to console.
auto message = jsError.getMessage();
_workletContext->invokeOnJsThread([message](jsi::Runtime& jsRuntime) {
auto logFn = jsRuntime.global().getPropertyAsObject(jsRuntime, "console").getPropertyAsFunction(jsRuntime, "error");
logFn.call(jsRuntime, jsi::String::createFromUtf8(jsRuntime, "Frame Processor threw an error: " + message));
});
}
}
- (void)call:(Frame* _Nonnull)frame {
// Create the Frame Host Object wrapping the internal Frame
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
[self callWithFrameHostObject:frameHostObject];
}
@end

View File

@@ -1,14 +0,0 @@
//
// FrameProcessorCallback.h
// VisionCamera
//
// Created by Marc Rousavy on 11.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import "Frame.h"
typedef void (^FrameProcessorCallback) (Frame* frame, void* skCanvas);

View File

@@ -20,13 +20,13 @@
/// Get the name of the Frame Processor Plugin.
/// This will be exposed to JS under the `FrameProcessorPlugins` Proxy object.
- (NSString *)name;
- (NSString * _Nonnull)name;
/// The actual callback when calling this plugin. Any Frame Processing should be handled there.
/// Make sure your code is optimized, as this is a hot path.
- (id) callback:(Frame*)frame withArguments:(NSArray<id>*)arguments;
- (id _Nullable) callback:(Frame* _Nonnull)frame withArguments:(NSArray<id>* _Nullable)arguments;
/// Register the given plugin in the Plugin Registry. This should be called on App Startup.
+ (void) registerPlugin:(FrameProcessorPlugin*)plugin;
+ (void) registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
@end

View File

@@ -18,13 +18,13 @@
return nil;
}
- (id)callback:(Frame *)frame withArguments:(NSArray<id> *)arguments {
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSArray<id>* _Nullable)arguments {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin \"%@\" does not override the `callback(frame:withArguments:)` method!", [self name]];
return nil;
}
+ (void)registerPlugin:(FrameProcessorPlugin *)plugin {
+ (void)registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin {
[FrameProcessorPluginRegistry addFrameProcessorPlugin:plugin];
}

View File

@@ -15,6 +15,6 @@
@interface FrameProcessorPluginRegistry : NSObject
+ (NSMutableDictionary<NSString*, FrameProcessorPlugin*>*)frameProcessorPlugins;
+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin*)plugin;
+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
@end

View File

@@ -13,8 +13,6 @@
@interface FrameProcessorRuntimeManager : NSObject
- (instancetype) init;
- (void) installFrameProcessorBindings;
@end

View File

@@ -10,6 +10,7 @@
#import "FrameProcessorRuntimeManager.h"
#import "FrameProcessorPluginRegistry.h"
#import "FrameProcessorPlugin.h"
#import "FrameProcessor.h"
#import "FrameHostObject.h"
#import <memory>
@@ -21,15 +22,15 @@
#import <ReactCommon/RCTTurboModuleManager.h>
#import "WKTJsiWorkletContext.h"
#import "WKTJsiWorkletApi.h"
#import "WKTJsiWorklet.h"
#import "WKTJsiHostObject.h"
#import "FrameProcessorUtils.h"
#import "FrameProcessorCallback.h"
#import "../React Utils/JSIUtils.h"
#import "../../cpp/JSITypedArray.h"
#if VISION_CAMERA_ENABLE_SKIA
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Forward declarations for the Swift classes
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues : NSObject
@@ -37,21 +38,15 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView : UIView
@property (nonatomic, copy) FrameProcessorCallback _Nullable frameProcessorCallback;
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
@end
@implementation FrameProcessorRuntimeManager {
// Running Frame Processors on camera's video thread (synchronously)
// Separate Camera Worklet Context
std::shared_ptr<RNWorklet::JsiWorkletContext> workletContext;
}
- (instancetype)init {
if (self = [super init]) {
// Initialize self
}
return self;
}
- (void) setupWorkletContext:(jsi::Runtime&)runtime {
NSLog(@"FrameProcessorBindings: Creating Worklet Context...");
@@ -136,7 +131,7 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
// HostObject that attaches the cache to the lifecycle of the Runtime. On Runtime destroy, we destroy the cache.
auto propNameCacheObject = std::make_shared<vision::InvalidateCacheOnDestroy>(jsiRuntime);
jsiRuntime.global().setProperty(jsiRuntime,
"__visionCameraPropNameCache",
"__visionCameraArrayBufferCache",
jsi::Object::createFromHostObject(jsiRuntime, propNameCacheObject));
// Install the Worklet Runtime in the main React JS Runtime
@@ -148,14 +143,30 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
auto viewTag = arguments[0].asNumber();
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, arguments[1]);
auto object = arguments[1].asObject(runtime);
auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, object.getProperty(runtime, "frameProcessor"));
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
auto callback = convertWorkletToFrameProcessorCallback(self->workletContext->getWorkletRuntime(), worklet);
view.frameProcessorCallback = callback;
if (frameProcessorType == "frame-processor") {
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:self->workletContext
worklet:worklet];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:self->workletContext
worklet:worklet
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
});
return jsi::Value::undefined();
@@ -175,10 +186,8 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
if (!currentBridge) return;
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
if (!anonymousView) return;
auto view = static_cast<CameraView*>(anonymousView);
view.frameProcessorCallback = nil;
view.frameProcessor = nil;
});
return jsi::Value::undefined();

View File

@@ -1,25 +0,0 @@
//
// FrameProcessorUtils.h
// VisionCamera
//
// Created by Marc Rousavy on 15.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <React/RCTBridgeModule.h>
#import "FrameProcessorCallback.h"
#ifndef __cplusplus
#error FrameProcessorUtils.h has to be compiled with C++!
#endif
#import <jsi/jsi.h>
#import "WKTJsiWorklet.h"
#import <memory>
using namespace facebook;
FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr<RNWorklet::JsiWorklet> worklet);

View File

@@ -1,72 +0,0 @@
//
// FrameProcessorUtils.m
// VisionCamera
//
// Created by Marc Rousavy on 15.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#import "FrameProcessorUtils.h"
#import <chrono>
#import <memory>
#import <regex>
#import "FrameHostObject.h"
#import "Frame.h"
#import <React/RCTBridge.h>
#import <React/RCTBridge+Private.h>
#import "JSConsoleHelper.h"
#import <ReactCommon/RCTTurboModule.h>
#import "WKTJsiWorklet.h"
#import "RNSkPlatformContext.h"
#import "RNSkiOSPlatformContext.h"
#import "JsiSkCanvas.h"
FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr<RNWorklet::JsiWorklet> worklet) {
// Wrap Worklet call in invoker
auto workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
// Create cached Skia Canvas object
auto skiaPlatformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(&runtime, RCTBridge.currentBridge);
auto canvasHostObject = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
// Converts a Worklet to a callable Objective-C block function
return ^(Frame* frame, void* skiaCanvas) {
try {
// create HostObject which holds the Frame
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
// Update cached Canvas object
if (skiaCanvas != nullptr) {
canvasHostObject->setCanvas((SkCanvas*)skiaCanvas);
frameHostObject->canvas = canvasHostObject;
} else {
frameHostObject->canvas = nullptr;
}
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
jsi::Value jsValue(std::move(argument));
// Call the Worklet with the Frame JS Host Object as an argument
workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
// After the sync Frame Processor finished executing, remove the Canvas on that Frame instance. It can no longer draw.
frameHostObject->canvas = nullptr;
} catch (jsi::JSError& jsError) {
// JS Error occured, print it to console.
auto stack = std::regex_replace(jsError.getStack(), std::regex("\n"), "\n ");
auto message = [NSString stringWithFormat:@"Frame Processor threw an error: %s\nIn: %s", jsError.getMessage().c_str(), stack.c_str()];
RCTBridge* bridge = [RCTBridge currentBridge];
if (bridge != nil && bridge.jsCallInvoker != nullptr) {
bridge.jsCallInvoker->invokeAsync([bridge, message]() {
auto logFn = [JSConsoleHelper getLogFunctionForBridge:bridge];
logFn(RCTLogLevelError, message);
});
} else {
NSLog(@"%@", message);
}
}
};
}

View File

@@ -0,0 +1,35 @@
//
// PreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
class NativePreviewView: PreviewView {
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
// swiftlint:enable force_cast
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
init(frame: CGRect, session: AVCaptureSession) {
super.init(frame: frame)
videoPreviewLayer.session = session
videoPreviewLayer.videoGravity = .resizeAspectFill
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented!")
}
}

View File

@@ -1,55 +0,0 @@
//
// AVCaptureSession.Preset+descriptor.swift
// mrousavy
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension AVCaptureSession.Preset {
init(withString string: String) throws {
switch string {
case "cif-352x288":
self = .cif352x288
return
case "hd-1280x720":
self = .hd1280x720
return
case "hd-1920x1080":
self = .hd1920x1080
return
case "hd-3840x2160":
self = .hd4K3840x2160
return
case "high":
self = .high
return
case "iframe-1280x720":
self = .iFrame1280x720
return
case "iframe-960x540":
self = .iFrame960x540
return
case "input-priority":
self = .inputPriority
return
case "low":
self = .low
return
case "medium":
self = .medium
return
case "photo":
self = .photo
return
case "vga-640x480":
self = .vga640x480
return
default:
throw EnumParserError.invalidValue
}
}
}

View File

@@ -2,33 +2,11 @@
// PreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
class PreviewView: UIView {
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
init(frame: CGRect, session: AVCaptureSession) {
super.init(frame: frame)
videoPreviewLayer.session = session
videoPreviewLayer.videoGravity = .resizeAspectFill
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented!")
}
}
class PreviewView: UIView {}

View File

@@ -1,20 +0,0 @@
//
// JSConsoleHelper.h
// VisionCamera
//
// Created by Marc Rousavy on 02.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <React/RCTBridge.h>
#import <React/RCTLog.h>
@interface JSConsoleHelper : NSObject
typedef void (^ConsoleLogFunction) (RCTLogLevel level, NSString* message);
+ (ConsoleLogFunction) getLogFunctionForBridge:(RCTBridge*)bridge;
@end

View File

@@ -1,60 +0,0 @@
//
// JSConsoleHelper.mm
// VisionCamera
//
// Created by Marc Rousavy on 02.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "JSConsoleHelper.h"
#import <React/RCTBridge.h>
#import <ReactCommon/RCTTurboModule.h>
#import <React/RCTBridge+Private.h>
#import <jsi/jsi.h>
#import "RCTBridge+runOnJS.h"
@implementation JSConsoleHelper
+ (const char *) getLogFunctionNameForLogLevel:(RCTLogLevel)level {
switch (level) {
case RCTLogLevelTrace:
return "trace";
case RCTLogLevelInfo:
return "log";
case RCTLogLevelWarning:
return "warn";
case RCTLogLevelError:
case RCTLogLevelFatal:
return "error";
}
}
+ (ConsoleLogFunction) getLogFunctionForBridge:(RCTBridge*)bridge {
RCTCxxBridge *cxxBridge = (RCTCxxBridge *)bridge;
if (!cxxBridge.runtime) {
return nil;
}
facebook::jsi::Runtime* jsiRuntime = (facebook::jsi::Runtime*)cxxBridge.runtime;
return ^(RCTLogLevel level, NSString* message) {
[bridge runOnJS:^{
if (jsiRuntime != nullptr) {
facebook::jsi::Runtime& runtime = *jsiRuntime;
auto logFunctionName = [JSConsoleHelper getLogFunctionNameForLogLevel:level];
try {
auto console = runtime.global().getPropertyAsObject(runtime, "console");
auto log = console.getPropertyAsFunction(runtime, logFunctionName);
log.call(runtime, facebook::jsi::String::createFromAscii(runtime, [message UTF8String]));
} catch (facebook::jsi::JSError& jsError) {
NSLog(@"%@", message);
NSLog(@"Failed to call `console.%s`: %s", logFunctionName, jsError.getMessage().c_str());
}
}
}];
};
}
@end

View File

@@ -20,6 +20,7 @@
#import <ReactCommon/CallInvoker.h>
#import <React/RCTBridge.h>
#import <ReactCommon/TurboModuleUtils.h>
#import <ReactCommon/RCTBlockGuard.h>
#import "../Frame Processor/Frame.h"
#import "../Frame Processor/FrameHostObject.h"
@@ -173,6 +174,13 @@ id convertJSIValueToObjCObject(jsi::Runtime &runtime, const jsi::Value &value, s
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime &runtime, const jsi::Function &value, std::shared_ptr<CallInvoker> jsInvoker)
{
auto weakWrapper = CallbackWrapper::createWeak(value.getFunction(runtime), runtime, jsInvoker);
RCTBlockGuard *blockGuard = [[RCTBlockGuard alloc] initWithCleanup:^() {
auto strongWrapper = weakWrapper.lock();
if (strongWrapper) {
strongWrapper->destroy();
}
}];
BOOL __block wrapperWasCalled = NO;
RCTResponseSenderBlock callback = ^(NSArray *responses) {
if (wrapperWasCalled) {
@@ -184,7 +192,7 @@ RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime &runtime, const
return;
}
strongWrapper->jsInvoker().invokeAsync([weakWrapper, responses]() {
strongWrapper->jsInvoker().invokeAsync([weakWrapper, responses, blockGuard]() {
auto strongWrapper2 = weakWrapper.lock();
if (!strongWrapper2) {
return;
@@ -194,6 +202,9 @@ RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime &runtime, const
strongWrapper2->callback().call(strongWrapper2->runtime(), args, static_cast<size_t>(responses.count));
strongWrapper2->destroy();
delete[] args;
// Delete the CallbackWrapper when the block gets dealloced without being invoked.
(void)blockGuard;
});
wrapperWasCalled = YES;

View File

@@ -1,18 +0,0 @@
//
// RCTBridge+runOnJS.h
// VisionCamera
//
// Created by Marc Rousavy on 23.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
@interface RCTBridge (RunOnJS)
- (void) runOnJS:(void (^)(void))block NS_SWIFT_NAME( runOnJS(_:) );
@end

View File

@@ -1,23 +0,0 @@
//
// RCTBridge+runOnJS.mm
// VisionCamera
//
// Created by Marc Rousavy on 23.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#import "RCTBridge+runOnJS.h"
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
#import <ReactCommon/RCTTurboModule.h>
@implementation RCTBridge (RunOnJS)
- (void) runOnJS:(void (^)())block {
auto callInvoker = [self jsCallInvoker];
callInvoker->invokeAsync([block]() {
block();
});
}
@end

View File

@@ -0,0 +1,35 @@
//
// DrawableFrameHostObject.h
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <jsi/jsi.h>
#import "../Frame Processor/FrameHostObject.h"
#import "../Frame Processor/Frame.h"
#import <CoreMedia/CMSampleBuffer.h>
#import "SkCanvas.h"
#import "JsiSkCanvas.h"
using namespace facebook;
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
public:
explicit DrawableFrameHostObject(Frame* frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas):
FrameHostObject(frame), _canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
void invalidateCanvas();
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
};

View File

@@ -0,0 +1,83 @@
//
// DrawableFrameHostObject.mm
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "DrawableFrameHostObject.h"
#import "SkCanvas.h"
#import "SkImageHelpers.h"
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
auto result = FrameHostObject::getPropertyNames(rt);
// Skia - Render Frame
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (_canvas != nullptr) {
auto canvasPropNames = _canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (_canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
// convert CMSampleBuffer to SkImage
auto context = _canvas->getCanvas()->recordingContext();
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
// draw SkImage
if (count > 0) {
// ..with paint/shader
auto paintHostObject = arguments[0].asObject(runtime).asHostObject<RNSkia::JsiSkPaint>(runtime);
auto paint = paintHostObject->getObject();
_canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
} else {
// ..without paint/shader
_canvas->getCanvas()->drawImage(image, 0, 0);
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "isDrawable") {
return jsi::Value(_canvas != nullptr);
}
if (_canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
auto result = _canvas->get(runtime, propName);
if (!result.isUndefined()) {
return result;
}
}
// fallback to base implementation
return FrameHostObject::get(runtime, propName);
}
void DrawableFrameHostObject::invalidateCanvas() {
_canvas = nullptr;
}

View File

@@ -1,26 +0,0 @@
//
// PreviewSkiaView.h
// VisionCamera
//
// Created by Marc Rousavy on 17.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef PreviewSkiaView_h
#define PreviewSkiaView_h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "FrameProcessorCallback.h"
typedef void (^DrawCallback) (void* _Nonnull skCanvas);
@interface PreviewSkiaView: UIView
// Call to pass a new Frame to be drawn by the Skia Canvas
- (void) drawFrame:(_Nonnull CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback;
@end
#endif /* PreviewSkiaView_h */

View File

@@ -1,60 +0,0 @@
//
// PreviewSkiaView.mm
// VisionCamera
//
// Created by Marc Rousavy on 17.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "PreviewSkiaView.h"
#import <Foundation/Foundation.h>
#import "SkiaMetalCanvasProvider.h"
#include <include/core/SkCanvas.h>
#include <exception>
#include <string>
#if SHOW_FPS
#import <React/RCTFPSGraph.h>
#endif
@implementation PreviewSkiaView {
std::shared_ptr<SkiaMetalCanvasProvider> _canvasProvider;
}
- (void)drawFrame:(CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback {
if (_canvasProvider == nullptr) {
throw std::runtime_error("Cannot draw new Frame to Canvas when SkiaMetalCanvasProvider is null!");
}
_canvasProvider->renderFrameToCanvas(buffer, ^(SkCanvas* canvas) {
callback((void*)canvas);
});
}
- (void) willMoveToSuperview:(UIView *)newWindow {
if (newWindow == NULL) {
// Remove implementation view when the parent view is not set
if (_canvasProvider != nullptr) {
[_canvasProvider->getLayer() removeFromSuperlayer];
_canvasProvider = nullptr;
}
} else {
// Create implementation view when the parent view is set
if (_canvasProvider == nullptr) {
_canvasProvider = std::make_shared<SkiaMetalCanvasProvider>();
[self.layer addSublayer: _canvasProvider->getLayer()];
_canvasProvider->start();
}
}
}
- (void) layoutSubviews {
if (_canvasProvider != nullptr) {
_canvasProvider->setSize(self.bounds.size.width, self.bounds.size.height);
}
}
@end

View File

@@ -25,18 +25,18 @@
# define FourCC2Str(fourcc) (const char[]){*(((char*)&fourcc)+3), *(((char*)&fourcc)+2), *(((char*)&fourcc)+1), *(((char*)&fourcc)+0),0}
#endif
CVMetalTextureCacheRef getTextureCache(GrRecordingContext* context) {
inline CVMetalTextureCacheRef getTextureCache() {
static CVMetalTextureCacheRef textureCache = nil;
if (textureCache == nil) {
// Create a new Texture Cache
auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
nil,
MTLCreateSystemDefaultDevice(),
nil,
&textureCache);
if (result != kCVReturnSuccess || textureCache == nil) {
throw std::runtime_error("Failed to create Metal Texture Cache!");
}
auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
nil,
MTLCreateSystemDefaultDevice(),
nil,
&textureCache);
if (result != kCVReturnSuccess || textureCache == nil) {
throw std::runtime_error("Failed to create Metal Texture Cache!");
}
}
return textureCache;
}
@@ -45,46 +45,34 @@ sk_sp<SkImage> SkImageHelpers::convertCMSampleBufferToSkImage(GrRecordingContext
auto pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
double width = CVPixelBufferGetWidth(pixelBuffer);
double height = CVPixelBufferGetHeight(pixelBuffer);
// Make sure the format is RGB (BGRA_8888)
auto format = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (format != kCVPixelFormatType_32BGRA) {
auto fourCharCode = @(FourCC2Str(format));
auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + fourCharCode.UTF8String + std::string(") - cannot convert to SkImage!");
auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + FourCC2Str(format) + std::string(") - cannot convert to SkImage!");
throw std::runtime_error(error);
}
auto textureCache = getTextureCache(context);
auto textureCache = getTextureCache();
// Convert CMSampleBuffer* -> CVMetalTexture*
CVMetalTextureRef cvTexture;
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
MTLPixelFormatBGRA8Unorm,
width,
height,
0, // plane index
&cvTexture);
GrMtlTextureInfo textureInfo;
textureCache,
pixelBuffer,
nil,
MTLPixelFormatBGRA8Unorm,
width,
height,
0, // plane index
&cvTexture);
auto mtlTexture = CVMetalTextureGetTexture(cvTexture);
textureInfo.fTexture.retain((__bridge void*)mtlTexture);
// Wrap it in a GrBackendTexture
GrBackendTexture texture(width, height, GrMipmapped::kNo, textureInfo);
auto image = convertMTLTextureToSkImage(context, mtlTexture);
// Create an SkImage from the existing texture
auto image = SkImages::AdoptTextureFrom(context,
texture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
// Release the Texture wrapper (it will still be strong)
CFRelease(cvTexture);
return image;
}
@@ -92,7 +80,11 @@ sk_sp<SkImage> SkImageHelpers::convertMTLTextureToSkImage(GrRecordingContext* co
// Convert the rendered MTLTexture to an SkImage
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendTexture backendTexture(texture.width, texture.height, GrMipmapped::kNo, textureInfo);
GrBackendTexture backendTexture((int)texture.width,
(int)texture.height,
GrMipmapped::kNo,
textureInfo);
// TODO: Adopt or Borrow?
auto image = SkImages::AdoptTextureFrom(context,
backendTexture,
kTopLeft_GrSurfaceOrigin,
@@ -109,7 +101,7 @@ SkRect SkImageHelpers::createCenterCropRect(SkRect sourceRect, SkRect destinatio
} else {
src = SkSize::Make((sourceRect.height() * destinationRect.width()) / destinationRect.height(), sourceRect.height());
}
return inscribe(src, sourceRect);
}

View File

@@ -0,0 +1,27 @@
//
// SkiaFrameProcessor.h
// VisionCamera
//
// Created by Marc Rousavy on 14.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import "FrameProcessor.h"
#import "SkiaRenderer.h"
#ifdef __cplusplus
#import "WKTJsiWorklet.h"
#endif
@interface SkiaFrameProcessor: FrameProcessor
#ifdef __cplusplus
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer;
#endif
@end

View File

@@ -0,0 +1,56 @@
//
// SkiaFrameProcessor.mm
// VisionCamera
//
// Created by Marc Rousavy on 14.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SkiaFrameProcessor.h"
#import "SkiaRenderer.h"
#import <memory>
#import <jsi/jsi.h>
#import "DrawableFrameHostObject.h"
#import <react-native-skia/JsiSkCanvas.h>
#import <react-native-skia/RNSkiOSPlatformContext.h>
using namespace facebook;
@implementation SkiaFrameProcessor {
SkiaRenderer* _skiaRenderer;
std::shared_ptr<RNSkia::JsiSkCanvas> _skiaCanvas;
}
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer {
if (self = [super initWithWorklet:context
worklet:worklet]) {
_skiaRenderer = skiaRenderer;
auto platformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(context->getJsRuntime(),
RCTBridge.currentBridge);
_skiaCanvas = std::make_shared<RNSkia::JsiSkCanvas>(platformContext);
}
return self;
}
- (void)call:(Frame*)frame {
[_skiaRenderer renderCameraFrameToOffscreenCanvas:frame.buffer
withDrawCallback:^(SkiaCanvas _Nonnull canvas) {
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
self->_skiaCanvas->setCanvas(static_cast<SkCanvas*>(canvas));
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, self->_skiaCanvas);
// Call JS Frame Processor
[self callWithFrameHostObject:frameHostObject];
// Remove Skia Canvas from Host Object because it is no longer valid
frameHostObject->invalidateCanvas();
}];
}
@end

View File

@@ -1,57 +0,0 @@
#pragma once
#ifndef __cplusplus
#error This header has to be compiled with C++!
#endif
#import <MetalKit/MetalKit.h>
#import <QuartzCore/CAMetalLayer.h>
#import <AVFoundation/AVFoundation.h>
#include <include/gpu/GrDirectContext.h>
#include <include/core/SkCanvas.h>
#include <functional>
#include <mutex>
#include <memory>
#include <atomic>
#import "VisionDisplayLink.h"
#import "SkiaMetalRenderContext.h"
class SkiaMetalCanvasProvider: public std::enable_shared_from_this<SkiaMetalCanvasProvider> {
public:
SkiaMetalCanvasProvider();
~SkiaMetalCanvasProvider();
// Render a Camera Frame to the off-screen canvas
void renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function<void(SkCanvas*)>& drawCallback);
// Start updating the DisplayLink (runLoop @ screen refresh rate) and draw Frames to the Layer
void start();
// Update the size of the View (Layer)
void setSize(int width, int height);
CALayer* getLayer();
private:
bool _isValid = false;
float _width = -1;
float _height = -1;
// For rendering Camera Frame -> off-screen MTLTexture
OffscreenRenderContext _offscreenContext;
// For rendering off-screen MTLTexture -> on-screen CAMetalLayer
LayerRenderContext _layerContext;
// For synchronization between the two Threads/Contexts
std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame = false;
private:
void render();
id<MTLTexture> getTexture(int width, int height);
float getPixelDensity();
};

View File

@@ -0,0 +1,51 @@
//
// SkiaPreviewDisplayLink.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import Foundation
class SkiaPreviewDisplayLink {
private var displayLink: CADisplayLink?
private let callback: (_ timestamp: Double) -> Void
init(callback: @escaping (_ timestamp: Double) -> Void) {
self.callback = callback
}
deinit {
stop()
}
@objc
func update(_ displayLink: CADisplayLink) {
callback(displayLink.timestamp)
}
func start() {
if displayLink == nil {
let displayLink = CADisplayLink(target: self, selector: #selector(update))
let queue = DispatchQueue(label: "mrousavy/VisionCamera.preview",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
queue.async {
displayLink.add(to: .current, forMode: .common)
self.displayLink = displayLink
ReactLogger.log(level: .info, message: "Starting Skia Preview Display Link...")
RunLoop.current.run()
ReactLogger.log(level: .info, message: "Skia Preview Display Link stopped.")
}
}
}
func stop() {
displayLink?.invalidate()
displayLink = nil
}
}

View File

@@ -0,0 +1,82 @@
//
// SkiaPreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import Foundation
// MARK: - SkiaPreviewLayer
@available(iOS 13.0, *)
class SkiaPreviewLayer: CAMetalLayer {
private var pixelRatio: CGFloat {
return UIScreen.main.scale
}
init(device: MTLDevice) {
super.init()
framebufferOnly = true
self.device = device
isOpaque = false
pixelFormat = .bgra8Unorm
contentsScale = pixelRatio
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setSize(width: CGFloat, height: CGFloat) {
frame = CGRect(x: 0, y: 0, width: width, height: height)
drawableSize = CGSize(width: width * pixelRatio,
height: height * pixelRatio)
}
}
// MARK: - SkiaPreviewView
class SkiaPreviewView: PreviewView {
private let skiaRenderer: SkiaRenderer
private let previewLayer: SkiaPreviewLayer
private lazy var displayLink = SkiaPreviewDisplayLink(callback: { [weak self] _ in
// Called everytime to render the screen - e.g. 60 FPS
if let self = self {
self.skiaRenderer.renderLatestFrame(to: self.previewLayer)
}
})
init(frame: CGRect, skiaRenderer: SkiaRenderer) {
self.skiaRenderer = skiaRenderer
previewLayer = SkiaPreviewLayer(device: skiaRenderer.metalDevice)
super.init(frame: frame)
}
deinit {
self.displayLink.stop()
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func willMove(toSuperview newSuperview: UIView?) {
if newSuperview != nil {
layer.addSublayer(previewLayer)
displayLink.start()
} else {
previewLayer.removeFromSuperlayer()
displayLink.stop()
}
}
override func layoutSubviews() {
previewLayer.setSize(width: bounds.size.width,
height: bounds.size.height)
}
}

View File

@@ -1,18 +1,15 @@
//
// SkiaMetalRenderContext.h
// SkiaRenderContext.h
// VisionCamera
//
// Created by Marc Rousavy on 02.12.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef SkiaMetalRenderContext_h
#define SkiaMetalRenderContext_h
#pragma once
#import <MetalKit/MetalKit.h>
#import <QuartzCore/CAMetalLayer.h>
#import <AVFoundation/AVFoundation.h>
#include <include/gpu/GrDirectContext.h>
#import <include/gpu/GrDirectContext.h>
struct RenderContext {
id<MTLDevice> device;
@@ -26,16 +23,3 @@ struct RenderContext {
(__bridge void*)commandQueue);
}
};
// For rendering to an off-screen in-memory Metal Texture (MTLTexture)
struct OffscreenRenderContext: public RenderContext {
id<MTLTexture> texture;
};
// For rendering to a Metal Layer (CAMetalLayer)
struct LayerRenderContext: public RenderContext {
CAMetalLayer* layer;
VisionDisplayLink* displayLink;
};
#endif /* SkiaMetalRenderContext_h */

View File

@@ -0,0 +1,45 @@
//
// SkiaRenderer.h
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
typedef void* SkiaCanvas;
typedef void(^draw_callback_t)(SkiaCanvas _Nonnull);
/**
A Camera Frame Renderer powered by Skia.
It provides two Contexts, one offscreen and one onscreen.
- Offscreen Context: Allows you to render a Frame into a Skia Canvas and draw onto it using Skia commands
- Onscreen Context: Allows you to render a Frame from the offscreen context onto a Layer allowing it to be displayed for Preview.
The two contexts may run at different Frame Rates.
*/
@interface SkiaRenderer : NSObject
/**
Renders the given Camera Frame to the offscreen Skia Canvas.
The given callback will be executed with a reference to the Skia Canvas
for the user to perform draw operations on (in this case, through a JS Frame Processor)
*/
- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
/**
Renders the latest Frame to the onscreen Layer.
This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms.
*/
- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer;
/**
The Metal Device used for Rendering to the Layer
*/
@property (nonatomic, readonly) id<MTLDevice> _Nonnull metalDevice;
@end

View File

@@ -1,72 +1,137 @@
#import "SkiaMetalCanvasProvider.h"
//
// SkiaRenderer.mm
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SkiaRenderer.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import "SkiaRenderContext.h"
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkFont.h>
#import <include/core/SkColorSpace.h>
#import <include/gpu/ganesh/SkImageGanesh.h>
#import <include/gpu/GrDirectContext.h>
#import "SkImageHelpers.h"
#include <memory>
#import <system_error>
#import <memory>
#import <mutex>
SkiaMetalCanvasProvider::SkiaMetalCanvasProvider(): std::enable_shared_from_this<SkiaMetalCanvasProvider>() {
// Configure Metal Layer
_layerContext.layer = [CAMetalLayer layer];
_layerContext.layer.framebufferOnly = NO;
_layerContext.layer.device = _layerContext.device;
_layerContext.layer.opaque = false;
_layerContext.layer.contentsScale = getPixelDensity();
_layerContext.layer.pixelFormat = MTLPixelFormatBGRA8Unorm;
// Set up DisplayLink
_layerContext.displayLink = [[VisionDisplayLink alloc] init];
_isValid = true;
@implementation SkiaRenderer {
// The context we draw each Frame on
std::unique_ptr<RenderContext> _offscreenContext;
// The context the preview runs on
std::unique_ptr<RenderContext> _layerContext;
// The texture holding the drawn-to Frame
id<MTLTexture> _texture;
// For synchronization between the two Threads/Contexts
std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame;
}
SkiaMetalCanvasProvider::~SkiaMetalCanvasProvider() {
_isValid = false;
NSLog(@"VisionCamera: Stopping SkiaMetalCanvasProvider DisplayLink...");
[_layerContext.displayLink stop];
- (instancetype)init {
if (self = [super init]) {
_offscreenContext = std::make_unique<RenderContext>();
_layerContext = std::make_unique<RenderContext>();
_texture = nil;
_hasNewFrame = false;
}
return self;
}
void SkiaMetalCanvasProvider::start() {
NSLog(@"VisionCamera: Starting SkiaMetalCanvasProvider DisplayLink...");
[_layerContext.displayLink start:[weakThis = weak_from_this()](double time) {
auto thiz = weakThis.lock();
if (thiz) {
thiz->render();
}
}];
- (id<MTLDevice>)metalDevice {
return _layerContext->device;
}
id<MTLTexture> SkiaMetalCanvasProvider::getTexture(int width, int height) {
if (_offscreenContext.texture == nil
|| _offscreenContext.texture.width != width
|| _offscreenContext.texture.height != height) {
- (id<MTLTexture>)getTexture:(NSUInteger)width height:(NSUInteger)height {
if (_texture == nil
|| _texture.width != width
|| _texture.height != height) {
// Create new texture with the given width and height
MTLTextureDescriptor* textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:width
height:height
mipmapped:NO];
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
_offscreenContext.texture = [_offscreenContext.device newTextureWithDescriptor:textureDescriptor];
_texture = [_offscreenContext->device newTextureWithDescriptor:textureDescriptor];
}
return _offscreenContext.texture;
return _texture;
}
/**
Callback from the DisplayLink - renders the current in-memory off-screen texture to the on-screen CAMetalLayer
*/
void SkiaMetalCanvasProvider::render() {
if (_width == -1 && _height == -1) {
return;
}
- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
// Wrap in auto release pool since we want the system to clean up after rendering
@autoreleasepool {
// Get the Frame's PixelBuffer
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
throw std::runtime_error("SkiaRenderer: Pixel Buffer is corrupt/empty.");
}
// Lock Mutex to block the runLoop from overwriting the _currentDrawable
std::unique_lock lock(_textureMutex);
// Get the Metal Texture we use for in-memory drawing
auto texture = [self getTexture:CVPixelBufferGetWidth(pixelBuffer)
height:CVPixelBufferGetHeight(pixelBuffer)];
// Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRenderTarget((int)texture.width,
(int)texture.height,
1,
textureInfo);
auto context = _offscreenContext->skiaContext.get();
// Create a Skia Surface from the writable Texture
auto surface = SkSurface::MakeFromBackendRenderTarget(context,
backendRenderTarget,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
SkColorSpace::MakeSRGB(),
nullptr);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
// Converts the CMSampleBuffer to an SkImage - RGB.
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
auto canvas = surface->getCanvas();
// Clear everything so we keep it at a clean state
canvas->clear(SkColors::kBlack);
// Draw the Image into the Frame (aspectRatio: cover)
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0);
// Call the draw callback - probably a JS Frame Processor.
callback(static_cast<void*>(canvas));
// Flush all appended operations on the canvas and commit it to the SkSurface
surface->flushAndSubmit();
// Set dirty & free locks
_hasNewFrame = true;
lock.unlock();
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer {
if (!_hasNewFrame) {
// No new Frame has arrived in the meantime.
// We don't need to re-draw the texture to the screen if nothing has changed, abort.
@@ -74,12 +139,12 @@ void SkiaMetalCanvasProvider::render() {
}
@autoreleasepool {
auto context = _layerContext.skiaContext.get();
auto context = _layerContext->skiaContext.get();
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle;
auto surface = SkSurface::MakeFromCAMetalLayer(context,
(__bridge GrMTLHandle)_layerContext.layer,
(__bridge GrMTLHandle)layer,
kTopLeft_GrSurfaceOrigin,
1,
kBGRA_8888_SkColorType,
@@ -91,15 +156,14 @@ void SkiaMetalCanvasProvider::render() {
}
auto canvas = surface->getCanvas();
// Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex);
// Get the texture
auto texture = _offscreenContext.texture;
auto texture = _texture;
if (texture == nil) return;
// Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
@@ -130,104 +194,14 @@ void SkiaMetalCanvasProvider::render() {
// Pass the drawable into the Metal Command Buffer and submit it to the GPU
id<CAMetalDrawable> drawable = (__bridge id<CAMetalDrawable>)drawableHandle;
id<MTLCommandBuffer> commandBuffer([_layerContext.commandQueue commandBuffer]);
id<MTLCommandBuffer> commandBuffer([_layerContext->commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
// Set flag back to false
_hasNewFrame = false;
lock.unlock();
}
}
float SkiaMetalCanvasProvider::getPixelDensity() {
return UIScreen.mainScreen.scale;
}
/**
Render to a canvas. This uses the current in-memory off-screen texture and draws to it.
The buffer is expected to be in RGB (`BGRA_8888`) format.
While rendering, `drawCallback` will be invoked with a Skia Canvas instance which can be used for Frame Processing (JS).
*/
void SkiaMetalCanvasProvider::renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function<void(SkCanvas*)>& drawCallback) {
if (_width == -1 && _height == -1) {
return;
}
// Wrap in auto release pool since we want the system to clean up after rendering
// and not wait until later - we've seen some example of memory usage growing very
// fast in the simulator without this.
@autoreleasepool {
// Get the Frame's PixelBuffer
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
throw std::runtime_error("drawFrame: Pixel Buffer is corrupt/empty.");
}
// Lock Mutex to block the runLoop from overwriting the _currentDrawable
std::unique_lock lock(_textureMutex);
// Get the Metal Texture we use for in-memory drawing
auto texture = getTexture(CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer));
// Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo fbInfo;
fbInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRT(texture.width,
texture.height,
1,
fbInfo);
auto context = _offscreenContext.skiaContext.get();
// Create a Skia Surface from the writable Texture
auto surface = SkSurface::MakeFromBackendRenderTarget(context,
backendRT,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
nullptr,
nullptr);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
// Lock the Frame's PixelBuffer for the duration of the Frame Processor so the user can safely do operations on it
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
// Converts the CMSampleBuffer to an SkImage - RGB.
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
auto canvas = surface->getCanvas();
// Clear everything so we keep it at a clean state
canvas->clear(SkColors::kBlack);
// Draw the Image into the Frame (aspectRatio: cover)
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0);
// Call the JS Frame Processor.
drawCallback(canvas);
// Flush all appended operations on the canvas and commit it to the SkSurface
surface->flushAndSubmit();
_hasNewFrame = true;
lock.unlock();
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
void SkiaMetalCanvasProvider::setSize(int width, int height) {
_width = width;
_height = height;
_layerContext.layer.frame = CGRectMake(0, 0, width, height);
_layerContext.layer.drawableSize = CGSizeMake(width * getPixelDensity(),
height* getPixelDensity());
}
CALayer* SkiaMetalCanvasProvider::getLayer() { return _layerContext.layer; }
@end

View File

@@ -1,38 +0,0 @@
//
// VisionDisplayLink.h
// VisionCamera
//
// Created by Marc Rousavy on 28.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef DisplayLink_h
#define DisplayLink_h
#import <CoreFoundation/CoreFoundation.h>
#import <UIKit/UIKit.h>
typedef void (^block_t)(double);
@interface VisionDisplayLink : NSObject {
CADisplayLink *_displayLink;
double _currentFps;
double _previousFrameTimestamp;
}
@property(nonatomic, copy) block_t updateBlock;
// Start the DisplayLink's runLoop
- (void)start:(block_t)block;
// Stop the DisplayLink's runLoop
- (void)stop;
// Get the current FPS value
- (double)currentFps;
// The FPS value this DisplayLink is targeting
- (double)targetFps;
@end
#endif /* VisionDisplayLink_h */

View File

@@ -1,63 +0,0 @@
//
// VisionDisplayLink.m
// VisionCamera
//
// Created by Marc Rousavy on 28.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "VisionDisplayLink.h"
#import <Foundation/Foundation.h>
@implementation VisionDisplayLink
- (void)start:(block_t)block {
self.updateBlock = block;
// check whether the loop is already running
if (_displayLink == nil) {
// specify update method
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(update:)];
// Start a new Queue/Thread that will run the runLoop
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, -1);
dispatch_queue_t queue = dispatch_queue_create("mrousavy/VisionCamera.preview", qos);
dispatch_async(queue, ^{
// Add the display link to the current run loop (thread on which we're currently running on)
NSRunLoop* loop = [NSRunLoop currentRunLoop];
[self->_displayLink addToRunLoop:loop forMode:NSRunLoopCommonModes];
// Run the runLoop (blocking)
[loop run];
NSLog(@"VisionCamera: DisplayLink runLoop ended.");
});
}
}
- (void)stop {
// check whether the loop is already stopped
if (_displayLink != nil) {
// if the display link is present, it gets invalidated (loop stops)
[_displayLink invalidate];
_displayLink = nil;
}
}
- (void)update:(CADisplayLink *)sender {
double time = sender.timestamp;
double diff = time - _previousFrameTimestamp;
_currentFps = 1.0 / diff;
_previousFrameTimestamp = time;
_updateBlock(time);
}
- (double)targetFps {
return 1.0 / _displayLink.duration;
}
- (double)currentFps {
return _currentFps;
}
@end

View File

@@ -7,14 +7,11 @@
objects = {
/* Begin PBXBuildFile section */
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.mm in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */; };
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8248867292644EF00729383 /* PreviewSkiaView.mm */; };
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */ = {isa = PBXBuildFile; fileRef = B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */; };
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B82F3A0A2A6896E3002BB804 /* PreviewView.swift */; };
B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* NativePreviewView.swift */; };
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = B841262E292E41A1001AB448 /* SkImageHelpers.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
@@ -23,7 +20,6 @@
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
B8805067266798B600EAD7F2 /* JSConsoleHelper.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */; };
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */; };
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
@@ -44,7 +40,6 @@
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517325E0102000DB86D6 /* EnumParserError.swift */; };
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */; };
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */; };
B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */; };
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */; };
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */; };
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */; };
@@ -58,7 +53,6 @@
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */ = {isa = PBXBuildFile; fileRef = B88A020C2934FC22009E035A /* VisionDisplayLink.m */; };
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
@@ -67,6 +61,8 @@
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */; };
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
@@ -83,23 +79,16 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaMetalRenderContext.h; sourceTree = "<group>"; };
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderContext.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginRegistry.mm; sourceTree = "<group>"; };
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorCallback.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorUtils.mm; sourceTree = "<group>"; };
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorUtils.h; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewView.swift; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
B8248866292644E300729383 /* PreviewSkiaView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PreviewSkiaView.h; sourceTree = "<group>"; };
B8248867292644EF00729383 /* PreviewSkiaView.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PreviewSkiaView.mm; sourceTree = "<group>"; };
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = "<group>"; };
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = "<group>"; };
B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */ = {isa = PBXFileReference; explicitFileType = sourcecode.cpp.h; fileEncoding = 4; path = SkiaMetalCanvasProvider.h; sourceTree = "<group>"; };
B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaMetalCanvasProvider.mm; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
B8412630292E41AD001AB448 /* SkImageHelpers.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkImageHelpers.h; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
@@ -107,12 +96,11 @@
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; };
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; };
B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewDisplayLink.swift; sourceTree = "<group>"; };
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSConsoleHelper.h; sourceTree = "<group>"; };
B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSConsoleHelper.mm; sourceTree = "<group>"; };
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
@@ -134,7 +122,6 @@
B887517325E0102000DB86D6 /* EnumParserError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EnumParserError.swift; sourceTree = "<group>"; };
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoStabilizationMode+descriptor.swift"; sourceTree = "<group>"; };
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVVideoCodecType+descriptor.swift"; sourceTree = "<group>"; };
B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession.Preset+descriptor.swift"; sourceTree = "<group>"; };
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.TorchMode+descriptor.swift"; sourceTree = "<group>"; };
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput.QualityPrioritization+descriptor.swift"; sourceTree = "<group>"; };
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.DeviceType+descriptor.swift"; sourceTree = "<group>"; };
@@ -149,17 +136,25 @@
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = "<group>"; };
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88A020C2934FC22009E035A /* VisionDisplayLink.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisionDisplayLink.m; sourceTree = "<group>"; };
B88A020E2934FC29009E035A /* VisionDisplayLink.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionDisplayLink.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B89A28742A68795E0092207F /* SkiaRenderer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaRenderer.mm; sourceTree = "<group>"; };
B89A28752A68796A0092207F /* SkiaRenderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderer.h; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaFrameProcessor.h; sourceTree = "<group>"; };
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaFrameProcessor.mm; sourceTree = "<group>"; };
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = DrawableFrameHostObject.mm; sourceTree = "<group>"; };
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DrawableFrameHostObject.h; sourceTree = "<group>"; };
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; };
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessor.mm; sourceTree = "<group>"; };
B8F7DDD1266F715D00120533 /* Frame.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Frame.m; sourceTree = "<group>"; };
/* End PBXFileReference section */
@@ -192,15 +187,18 @@
B887518425E0102000DB86D6 /* CameraView.swift */,
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */,
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */,
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */,
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */,
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
B83D5EE629377117000AFD2F /* PreviewView.swift */,
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */,
B83D5EE629377117000AFD2F /* NativePreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8FCA20C292669B800F1AC82 /* Skia Render Layer */,
B887516125E0102000DB86D6 /* Extensions */,
@@ -239,12 +237,8 @@
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
B887517025E0102000DB86D6 /* Promise.swift */,
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */,
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */,
B81D41EF263C86F900B041FD /* JSIUtils.h */,
B8994E6B263F03E100069589 /* JSIUtils.mm */,
B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */,
B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */,
);
path = "React Utils";
sourceTree = "<group>";
@@ -256,7 +250,6 @@
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */,
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */,
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */,
B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */,
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */,
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */,
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */,
@@ -274,9 +267,8 @@
B8DCF2D725EA940700EA5C72 /* Frame Processor */ = {
isa = PBXGroup;
children = (
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */,
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */,
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */,
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */,
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */,
B8103E5725FF56F0007A1684 /* Frame.h */,
B8F7DDD1266F715D00120533 /* Frame.m */,
B84760A22608EE38004C3180 /* FrameHostObject.h */,
@@ -284,7 +276,7 @@
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */,
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */,
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */,
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */,
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */,
);
@@ -294,15 +286,17 @@
B8FCA20C292669B800F1AC82 /* Skia Render Layer */ = {
isa = PBXGroup;
children = (
B8248866292644E300729383 /* PreviewSkiaView.h */,
B8248867292644EF00729383 /* PreviewSkiaView.mm */,
B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */,
B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */,
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */,
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */,
B8412630292E41AD001AB448 /* SkImageHelpers.h */,
B841262E292E41A1001AB448 /* SkImageHelpers.mm */,
B88A020E2934FC29009E035A /* VisionDisplayLink.h */,
B88A020C2934FC22009E035A /* VisionDisplayLink.m */,
B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */,
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */,
B89A28752A68796A0092207F /* SkiaRenderer.h */,
B89A28742A68795E0092207F /* SkiaRenderer.mm */,
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */,
B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */,
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */,
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */,
);
path = "Skia Render Layer";
sourceTree = "<group>";
@@ -410,13 +404,13 @@
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */,
B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
@@ -425,44 +419,40 @@
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */,
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */,
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */,
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */,
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */,
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */,
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */,
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */,
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */,
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */,
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.mm in Sources */,
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */,
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
B8805067266798B600EAD7F2 /* JSConsoleHelper.mm in Sources */,
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */,
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
);