chore: Remove Skia 🎨 (#1740)

* Revert "feat: Skia for Android (#1731)"

This reverts commit a7c137da07.

* Remove some skia

* Remove all the Skia stuff.

* Update useFrameProcessor.ts

* Update lockfiles

* fix: Use native Preview again

* Use `OpenGLTexture&` again

* Remove `PreviewOutput` (we use `SurfaceView` in parallel)

* fix: Log photo widths

* fix: Fix cpplint
This commit is contained in:
Marc Rousavy
2023-09-01 12:20:17 +02:00
committed by GitHub
parent 22829083cd
commit 0a28454579
75 changed files with 404 additions and 2415 deletions

View File

@@ -245,7 +245,6 @@ enum CaptureError {
enum SystemError: String {
case noManager = "no-camera-manager"
case skiaUnavailable = "skia-unavailable"
case frameProcessorsUnavailable = "frame-processors-unavailable"
var code: String {
@@ -256,8 +255,6 @@ enum SystemError: String {
switch self {
case .noManager:
return "No Camera Manager was found."
case .skiaUnavailable:
return "Skia Integration is unavailable - is @shopify/react-native-skia installed?"
case .frameProcessorsUnavailable:
return "Frame Processors are unavailable - is react-native-worklets-core installed?"
}

View File

@@ -10,41 +10,10 @@ import AVFoundation
import Foundation
extension CameraView {
#if VISION_CAMERA_ENABLE_SKIA
@objc
func getSkiaRenderer() -> SkiaRenderer {
if skiaRenderer == nil {
skiaRenderer = SkiaRenderer()
}
return skiaRenderer!
}
#endif
public func setupPreviewView() {
switch previewType {
case "none":
previewView?.removeFromSuperview()
previewView = nil
case "native":
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is NativePreviewView { return }
previewView?.removeFromSuperview()
previewView = NativePreviewView(frame: frame, session: captureSession)
addSubview(previewView!)
case "skia":
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
#if VISION_CAMERA_ENABLE_SKIA
if previewView is SkiaPreviewView { return }
previewView?.removeFromSuperview()
previewView = SkiaPreviewView(frame: frame, skiaRenderer: getSkiaRenderer())
addSubview(previewView!)
#else
invokeOnError(.system(.skiaUnavailable))
return
#endif
default:
invokeOnError(.parameter(.invalid(unionName: "previewType", receivedValue: previewType as String)))
}
previewView?.removeFromSuperview()
previewView = NativePreviewView(frame: frame, session: captureSession)
addSubview(previewView!)
}
internal func setupFpsGraph() {

View File

@@ -26,8 +26,7 @@ private let propsThatRequireReconfiguration = ["cameraId",
"photo",
"video",
"enableFrameProcessor",
"pixelFormat",
"previewType"]
"pixelFormat"]
private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr",
"lowLightBoost"]
@@ -59,7 +58,6 @@ public final class CameraView: UIView {
@objc var zoom: NSNumber = 1.0 // in "factor"
@objc var enableFpsGraph = false
@objc var videoStabilizationMode: NSString?
@objc var previewType: NSString = "none"
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@@ -93,9 +91,6 @@ public final class CameraView: UIView {
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc public var frameProcessor: FrameProcessor?
#endif
#if VISION_CAMERA_ENABLE_SKIA
internal var skiaRenderer: SkiaRenderer?
#endif
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
@@ -188,11 +183,6 @@ public final class CameraView: UIView {
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
if changedProps.contains("previewType") {
DispatchQueue.main.async {
self.setupPreviewView()
}
}
if changedProps.contains("enableFpsGraph") {
DispatchQueue.main.async {
self.setupFpsGraph()

View File

@@ -41,7 +41,6 @@ RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
RCT_EXPORT_VIEW_PROPERTY(pixelFormat, NSString);
// other props
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(previewType, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableFpsGraph, BOOL);

View File

@@ -18,10 +18,10 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isDrawable")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
@@ -105,9 +105,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
if (name == "isDrawable") {
return jsi::Value(false);
}
if (name == "isValid") {
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
return jsi::Value(isValid);
@@ -175,6 +172,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
return jsi::Value((double) bytesPerRow);
}
if (name == "planesCount") {
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
return jsi::Value((double) planesCount);
}
// fallback to base implementation
return HostObject::get(runtime, propName);

View File

@@ -24,11 +24,6 @@
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#if VISION_CAMERA_ENABLE_SKIA
#import "SkiaRenderer.h"
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Swift forward-declarations
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues: NSObject
@@ -38,9 +33,6 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView: UIView
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
#if VISION_CAMERA_ENABLE_SKIA
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
#endif
@end
using namespace facebook;
@@ -80,7 +72,6 @@ std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& r
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("isSkiaEnabled")));
return result;
}
@@ -96,15 +87,6 @@ void VisionCameraProxy::setFrameProcessor(jsi::Runtime& runtime, int viewTag, co
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:worklet
context:_workletContext];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:worklet
context:_workletContext
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
@@ -135,13 +117,6 @@ jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime, std
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "isSkiaEnabled") {
#ifdef VISION_CAMERA_ENABLE_SKIA
return jsi::Value(true);
#else
return jsi::Value(false);
#endif
}
if (name == "setFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"),

View File

@@ -1,35 +0,0 @@
//
// DrawableFrameHostObject.h
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <jsi/jsi.h>
#import "../Frame Processor/FrameHostObject.h"
#import "../Frame Processor/Frame.h"
#import <CoreMedia/CMSampleBuffer.h>
#import "SkCanvas.h"
#import "JsiSkCanvas.h"
using namespace facebook;
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
public:
explicit DrawableFrameHostObject(Frame* frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas):
FrameHostObject(frame), _canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
void invalidateCanvas();
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
};

View File

@@ -1,83 +0,0 @@
//
// DrawableFrameHostObject.mm
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "DrawableFrameHostObject.h"
#import "SkCanvas.h"
#import "SkImageHelpers.h"
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
auto result = FrameHostObject::getPropertyNames(rt);
// Skia - Render Frame
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (_canvas != nullptr) {
auto canvasPropNames = _canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (_canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
// convert CMSampleBuffer to SkImage
auto context = _canvas->getCanvas()->recordingContext();
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
// draw SkImage
if (count > 0) {
// ..with paint/shader
auto paintHostObject = arguments[0].asObject(runtime).asHostObject<RNSkia::JsiSkPaint>(runtime);
auto paint = paintHostObject->getObject();
_canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
} else {
// ..without paint/shader
_canvas->getCanvas()->drawImage(image, 0, 0);
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "isDrawable") {
return jsi::Value(_canvas != nullptr);
}
if (_canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
auto result = _canvas->get(runtime, propName);
if (!result.isUndefined()) {
return result;
}
}
// fallback to base implementation
return FrameHostObject::get(runtime, propName);
}
void DrawableFrameHostObject::invalidateCanvas() {
_canvas = nullptr;
}

View File

@@ -1,42 +0,0 @@
//
// SkImageHelpers.h
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <MetalKit/MetalKit.h>
#import <include/gpu/GrRecordingContext.h>
#import "SkImage.h"
#import "SkSize.h"
#import "SkRect.h"
class SkImageHelpers {
public:
SkImageHelpers() = delete;
public:
/**
Convert a CMSampleBuffer to an SkImage. Format has to be RGB.
*/
static sk_sp<SkImage> convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer);
/**
Convert a MTLTexture to an SkImage. Format has to be RGB.
*/
static sk_sp<SkImage> convertMTLTextureToSkImage(GrRecordingContext* context, id<MTLTexture> mtlTexture);
/**
Creates a Center Crop Transformation Rect so that the source rect fills (aspectRatio: cover) the destination rect.
The return value should be passed as a sourceRect to a canvas->draw...Rect(..) function, destinationRect should stay the same.
*/
static SkRect createCenterCropRect(SkRect source, SkRect destination);
private:
static SkRect inscribe(SkSize size, SkRect rect);
};

View File

@@ -1,116 +0,0 @@
//
// CMSampleBuffer+toSkImage.m
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "SkImageHelpers.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkImage.h>
#import <include/gpu/ganesh/SkImageGanesh.h>
#import <include/gpu/mtl/GrMtlTypes.h>
#import <include/gpu/GrBackendSurface.h>
#include <TargetConditionals.h>
#if TARGET_RT_BIG_ENDIAN
# define FourCC2Str(fourcc) (const char[]){*((char*)&fourcc), *(((char*)&fourcc)+1), *(((char*)&fourcc)+2), *(((char*)&fourcc)+3),0}
#else
# define FourCC2Str(fourcc) (const char[]){*(((char*)&fourcc)+3), *(((char*)&fourcc)+2), *(((char*)&fourcc)+1), *(((char*)&fourcc)+0),0}
#endif
inline CVMetalTextureCacheRef getTextureCache() {
static CVMetalTextureCacheRef textureCache = nil;
if (textureCache == nil) {
// Create a new Texture Cache
auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
nil,
MTLCreateSystemDefaultDevice(),
nil,
&textureCache);
if (result != kCVReturnSuccess || textureCache == nil) {
throw std::runtime_error("Failed to create Metal Texture Cache!");
}
}
return textureCache;
}
sk_sp<SkImage> SkImageHelpers::convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer) {
auto pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
double width = CVPixelBufferGetWidth(pixelBuffer);
double height = CVPixelBufferGetHeight(pixelBuffer);
// Make sure the format is RGB (BGRA_8888)
auto format = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (format != kCVPixelFormatType_32BGRA) {
auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + FourCC2Str(format) + std::string(") - cannot convert to SkImage!");
throw std::runtime_error(error);
}
auto textureCache = getTextureCache();
// Convert CMSampleBuffer* -> CVMetalTexture*
CVMetalTextureRef cvTexture;
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
MTLPixelFormatBGRA8Unorm,
width,
height,
0, // plane index
&cvTexture);
auto mtlTexture = CVMetalTextureGetTexture(cvTexture);
auto image = convertMTLTextureToSkImage(context, mtlTexture);
// Release the Texture wrapper (it will still be strong)
CFRelease(cvTexture);
return image;
}
sk_sp<SkImage> SkImageHelpers::convertMTLTextureToSkImage(GrRecordingContext* context, id<MTLTexture> texture) {
// Convert the rendered MTLTexture to an SkImage
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendTexture backendTexture((int)texture.width,
(int)texture.height,
GrMipmapped::kNo,
textureInfo);
// TODO: Adopt or Borrow?
auto image = SkImages::AdoptTextureFrom(context,
backendTexture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
return image;
}
SkRect SkImageHelpers::createCenterCropRect(SkRect sourceRect, SkRect destinationRect) {
SkSize src;
if (destinationRect.width() / destinationRect.height() > sourceRect.width() / sourceRect.height()) {
src = SkSize::Make(sourceRect.width(), (sourceRect.width() * destinationRect.height()) / destinationRect.width());
} else {
src = SkSize::Make((sourceRect.height() * destinationRect.width()) / destinationRect.height(), sourceRect.height());
}
return inscribe(src, sourceRect);
}
SkRect SkImageHelpers::inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta,
size.width(),
size.height());
}

View File

@@ -1,27 +0,0 @@
//
// SkiaFrameProcessor.h
// VisionCamera
//
// Created by Marc Rousavy on 14.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import "FrameProcessor.h"
#import "SkiaRenderer.h"
#ifdef __cplusplus
#import "WKTJsiWorklet.h"
#endif
@interface SkiaFrameProcessor: FrameProcessor
#ifdef __cplusplus
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer;
#endif
@end

View File

@@ -1,56 +0,0 @@
//
// SkiaFrameProcessor.mm
// VisionCamera
//
// Created by Marc Rousavy on 14.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SkiaFrameProcessor.h"
#import "SkiaRenderer.h"
#import <memory>
#import <jsi/jsi.h>
#import "DrawableFrameHostObject.h"
#import <react-native-skia/JsiSkCanvas.h>
#import <react-native-skia/RNSkiOSPlatformContext.h>
using namespace facebook;
@implementation SkiaFrameProcessor {
SkiaRenderer* _skiaRenderer;
std::shared_ptr<RNSkia::JsiSkCanvas> _skiaCanvas;
}
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer {
if (self = [super initWithWorklet:worklet
context:context]) {
_skiaRenderer = skiaRenderer;
auto platformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(context->getJsRuntime(),
RCTBridge.currentBridge);
_skiaCanvas = std::make_shared<RNSkia::JsiSkCanvas>(platformContext);
}
return self;
}
- (void)call:(Frame*)frame {
[_skiaRenderer renderCameraFrameToOffscreenSurface:frame.buffer
withDrawCallback:^(SkiaCanvas _Nonnull canvas) {
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
self->_skiaCanvas->setCanvas(static_cast<SkCanvas*>(canvas));
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, self->_skiaCanvas);
// Call JS Frame Processor
[self callWithFrameHostObject:frameHostObject];
// Remove Skia Canvas from Host Object because it is no longer valid
frameHostObject->invalidateCanvas();
}];
}
@end

View File

@@ -1,51 +0,0 @@
//
// SkiaPreviewDisplayLink.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import Foundation
class SkiaPreviewDisplayLink {
private var displayLink: CADisplayLink?
private let callback: (_ timestamp: Double) -> Void
init(callback: @escaping (_ timestamp: Double) -> Void) {
self.callback = callback
}
deinit {
stop()
}
@objc
func update(_ displayLink: CADisplayLink) {
callback(displayLink.timestamp)
}
func start() {
if displayLink == nil {
let displayLink = CADisplayLink(target: self, selector: #selector(update))
let queue = DispatchQueue(label: "mrousavy/VisionCamera.preview",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
queue.async {
displayLink.add(to: .current, forMode: .common)
self.displayLink = displayLink
ReactLogger.log(level: .info, message: "Starting Skia Preview Display Link...")
RunLoop.current.run()
ReactLogger.log(level: .info, message: "Skia Preview Display Link stopped.")
}
}
}
func stop() {
displayLink?.invalidate()
displayLink = nil
}
}

View File

@@ -1,81 +0,0 @@
//
// SkiaPreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import Foundation
// MARK: - SkiaPreviewLayer
class SkiaPreviewLayer: CAMetalLayer {
private var pixelRatio: CGFloat {
return UIScreen.main.scale
}
init(device: MTLDevice) {
super.init()
framebufferOnly = true
self.device = device
isOpaque = false
pixelFormat = .bgra8Unorm
contentsScale = pixelRatio
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setSize(width: CGFloat, height: CGFloat) {
frame = CGRect(x: 0, y: 0, width: width, height: height)
drawableSize = CGSize(width: width * pixelRatio,
height: height * pixelRatio)
}
}
// MARK: - SkiaPreviewView
class SkiaPreviewView: PreviewView {
private let skiaRenderer: SkiaRenderer
private let previewLayer: SkiaPreviewLayer
private lazy var displayLink = SkiaPreviewDisplayLink(callback: { [weak self] _ in
// Called everytime to render the screen - e.g. 60 FPS
if let self = self {
self.skiaRenderer.renderLatestFrame(to: self.previewLayer)
}
})
init(frame: CGRect, skiaRenderer: SkiaRenderer) {
self.skiaRenderer = skiaRenderer
previewLayer = SkiaPreviewLayer(device: skiaRenderer.metalDevice)
super.init(frame: frame)
}
deinit {
self.displayLink.stop()
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func willMove(toSuperview newSuperview: UIView?) {
if newSuperview != nil {
layer.addSublayer(previewLayer)
displayLink.start()
} else {
previewLayer.removeFromSuperlayer()
displayLink.stop()
}
}
override func layoutSubviews() {
previewLayer.setSize(width: bounds.size.width,
height: bounds.size.height)
}
}

View File

@@ -1,26 +0,0 @@
//
// SkiaRenderContext.h
// VisionCamera
//
// Created by Marc Rousavy on 02.12.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#pragma once
#import <MetalKit/MetalKit.h>
#import <include/gpu/GrDirectContext.h>
#import <include/gpu/mtl/GrMtlTypes.h>
struct RenderContext {
id<MTLDevice> device;
id<MTLCommandQueue> commandQueue;
sk_sp<GrDirectContext> skiaContext;
RenderContext() {
device = MTLCreateSystemDefaultDevice();
commandQueue = id<MTLCommandQueue>(CFRetain((GrMTLHandle)[device newCommandQueue]));
skiaContext = GrDirectContext::MakeMetal((__bridge void*)device,
(__bridge void*)commandQueue);
}
};

View File

@@ -1,45 +0,0 @@
//
// SkiaRenderer.h
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
typedef void* SkiaCanvas;
typedef void(^draw_callback_t)(SkiaCanvas _Nonnull);
/**
A Camera Frame Renderer powered by Skia.
It provides two Contexts, one offscreen and one onscreen.
- Offscreen Context: Allows you to render a Frame into a Skia Canvas and draw onto it using Skia commands
- Onscreen Context: Allows you to render a Frame from the offscreen context onto a Layer allowing it to be displayed for Preview.
The two contexts may run at different Frame Rates.
*/
@interface SkiaRenderer : NSObject
/**
Renders the given Camera Frame to the offscreen Skia Canvas.
The given callback will be executed with a reference to the Skia Canvas
for the user to perform draw operations on (in this case, through a JS Frame Processor)
*/
- (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
/**
Renders the latest Frame to the onscreen Layer.
This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms.
*/
- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer;
/**
The Metal Device used for Rendering to the Layer
*/
@property (nonatomic, readonly) id<MTLDevice> _Nonnull metalDevice;
@end

View File

@@ -1,212 +0,0 @@
//
// SkiaRenderer.mm
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SkiaRenderer.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import "SkiaRenderContext.h"
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkColorSpace.h>
#import <include/gpu/mtl/GrMtlTypes.h>
#import <include/gpu/GrBackendSurface.h>
#import <include/gpu/ganesh/SkSurfaceGanesh.h>
#import <include/gpu/ganesh/mtl/SkSurfaceMetal.h>
#import "SkImageHelpers.h"
#import <system_error>
#import <memory>
#import <mutex>
@implementation SkiaRenderer {
// The context we draw each Frame on
std::unique_ptr<RenderContext> _offscreenContext;
// The context the preview runs on
std::unique_ptr<RenderContext> _layerContext;
// The texture holding the drawn-to Frame
id<MTLTexture> _texture;
// For synchronization between the two Threads/Contexts
std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame;
}
- (instancetype)init {
if (self = [super init]) {
_offscreenContext = std::make_unique<RenderContext>();
_layerContext = std::make_unique<RenderContext>();
_texture = nil;
_hasNewFrame = false;
}
return self;
}
- (id<MTLDevice>)metalDevice {
return _layerContext->device;
}
- (id<MTLTexture>)getTexture:(NSUInteger)width height:(NSUInteger)height {
if (_texture == nil
|| _texture.width != width
|| _texture.height != height) {
// Create new texture with the given width and height
MTLTextureDescriptor* textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:width
height:height
mipmapped:NO];
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
_texture = [_offscreenContext->device newTextureWithDescriptor:textureDescriptor];
}
return _texture;
}
- (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
// Wrap in auto release pool since we want the system to clean up after rendering
@autoreleasepool {
// Get the Frame's PixelBuffer
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
throw std::runtime_error("SkiaRenderer: Pixel Buffer is corrupt/empty.");
}
// Lock Mutex to block the runLoop from overwriting the _currentDrawable
std::unique_lock lock(_textureMutex);
// Get the Metal Texture we use for in-memory drawing
auto texture = [self getTexture:CVPixelBufferGetWidth(pixelBuffer)
height:CVPixelBufferGetHeight(pixelBuffer)];
// Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRenderTarget((int)texture.width,
(int)texture.height,
1,
textureInfo);
auto context = _offscreenContext->skiaContext.get();
// Create a Skia Surface from the writable Texture
auto surface = SkSurfaces::WrapBackendRenderTarget(context,
backendRenderTarget,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
SkColorSpace::MakeSRGB(),
nullptr);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
// Converts the CMSampleBuffer to an SkImage - RGB.
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
auto canvas = surface->getCanvas();
// Clear everything so we keep it at a clean state
canvas->clear(SkColors::kBlack);
// Draw the Image into the Frame (aspectRatio: cover)
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0);
// Call the draw callback - probably a JS Frame Processor.
callback(static_cast<void*>(canvas));
// Flush all appended operations on the canvas and commit it to the SkSurface
surface->flushAndSubmit();
// Set dirty & free locks
_hasNewFrame = true;
lock.unlock();
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer {
if (!_hasNewFrame) {
// No new Frame has arrived in the meantime.
// We don't need to re-draw the texture to the screen if nothing has changed, abort.
return;
}
@autoreleasepool {
auto context = _layerContext->skiaContext.get();
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle;
auto surface = SkSurfaces::WrapCAMetalLayer(context,
(__bridge GrMTLHandle)layer,
kTopLeft_GrSurfaceOrigin,
1,
kBGRA_8888_SkColorType,
nullptr,
nullptr,
&drawableHandle);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
auto canvas = surface->getCanvas();
// Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex);
auto texture = _texture;
if (texture == nil) return;
// Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
sourceRect = SkImageHelpers::createCenterCropRect(sourceRect, destinationRect);
auto offsetX = -sourceRect.left();
auto offsetY = -sourceRect.top();
// The Canvas is equal to the View size, where-as the Frame has a different size (e.g. 4k)
// We scale the Canvas to the exact dimensions of the Frame so that the user can use the Frame as a coordinate system
canvas->save();
auto scaleW = static_cast<double>(surface->width()) / texture.width;
auto scaleH = static_cast<double>(surface->height()) / texture.height;
auto scale = MAX(scaleW, scaleH);
canvas->scale(scale, scale);
canvas->translate(offsetX, offsetY);
// Convert the rendered MTLTexture to an SkImage
auto image = SkImageHelpers::convertMTLTextureToSkImage(context, texture);
// Draw the Texture (Frame) to the Canvas
canvas->drawImage(image, 0, 0);
// Restore the scale & transform
canvas->restore();
surface->flushAndSubmit();
// Pass the drawable into the Metal Command Buffer and submit it to the GPU
id<CAMetalDrawable> drawable = (__bridge id<CAMetalDrawable>)drawableHandle;
id<MTLCommandBuffer> commandBuffer([_layerContext->commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
// Set flag back to false
_hasNewFrame = false;
lock.unlock();
}
}
@end

View File

@@ -12,7 +12,6 @@
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B82F3A0A2A6896E3002BB804 /* PreviewView.swift */; };
B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* NativePreviewView.swift */; };
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = B841262E292E41A1001AB448 /* SkImageHelpers.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; };
@@ -79,27 +78,22 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderContext.h; sourceTree = "<group>"; };
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewView.swift; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
B8412630292E41AD001AB448 /* SkImageHelpers.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkImageHelpers.h; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; };
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; };
B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewDisplayLink.swift; sourceTree = "<group>"; };
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
@@ -140,17 +134,11 @@
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; };
B89A28742A68795E0092207F /* SkiaRenderer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaRenderer.mm; sourceTree = "<group>"; };
B89A28752A68796A0092207F /* SkiaRenderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderer.h; sourceTree = "<group>"; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaFrameProcessor.h; sourceTree = "<group>"; };
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaFrameProcessor.mm; sourceTree = "<group>"; };
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = DrawableFrameHostObject.mm; sourceTree = "<group>"; };
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DrawableFrameHostObject.h; sourceTree = "<group>"; };
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; };
@@ -202,7 +190,6 @@
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */,
B83D5EE629377117000AFD2F /* NativePreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8FCA20C292669B800F1AC82 /* Skia Render Layer */,
B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */,
B887516D25E0102000DB86D6 /* React Utils */,
@@ -287,24 +274,6 @@
path = "Frame Processor";
sourceTree = "<group>";
};
B8FCA20C292669B800F1AC82 /* Skia Render Layer */ = {
isa = PBXGroup;
children = (
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */,
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */,
B8412630292E41AD001AB448 /* SkImageHelpers.h */,
B841262E292E41A1001AB448 /* SkImageHelpers.mm */,
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */,
B89A28752A68796A0092207F /* SkiaRenderer.h */,
B89A28742A68795E0092207F /* SkiaRenderer.mm */,
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */,
B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */,
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */,
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */,
);
path = "Skia Render Layer";
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@@ -417,7 +386,6 @@
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */,
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,