feat: Draw onto Frame as if it was a Skia Canvas (#1479)

* Create Shaders.ts

* Add `previewType` and `enableFpsGraph`

* Add RN Skia native dependency

* Add Skia Preview View on iOS

* Pass 1

* Update FrameHostObject.mm

* Wrap Canvas

* Lockfiles

* fix: Fix stuff

* chore: Upgrade RNWorklets

* Add `previewType` to set the Preview

* feat: Add Example

* Update project.pbxproj

* `enableFpsGraph`

* Cache the `std::shared_ptr<FrameHostObject>`

* Update CameraView+RecordVideo.swift

* Update SkiaMetalCanvasProvider.mm

* Android: Integrate Skia Dependency

* fix: Use new Prefix

* Add example for rendering shader

* chore: Upgrade CameraX

* Remove KTX

* Enable `viewBinding`

* Revert "Enable `viewBinding`"

This reverts commit f2a603f53b33ea4311a296422ffd1a910ce03f9e.

* Revert "chore: Upgrade CameraX"

This reverts commit 8dc832cf8754490d31a6192e6c1a1f11cdcd94fe.

* Remove unneeded `ProcessCameraProvider.getInstance()` call

* fix: Add REA hotfix patch

* fix: Fix FrameHostObject dead in runAsync

* fix: Make `runAsync` run truly async by dropping new Frames while executing

* chore: Upgrade RN Worklets to latest

* chore: Upgrade RN Skia

* Revert "Remove KTX"

This reverts commit 253f586633f7af2da992d2279fc206dc62597129.

* Make Skia optional in CMake

* Fix import

* Update CMakeLists.txt

* Update build.gradle

* Update CameraView.kt

* Update CameraView.kt

* Update CameraView.kt

* Update Shaders.ts

* Center Blur

* chore: Upgrade RN Worklets

* feat: Add `toByteArray()`, `orientation`, `isMirrored` and `timestamp` to `Frame` (#1487)

* feat: Implement `orientation` and `isMirrored` on Frame

* feat: Add `toArrayBuffer()` func

* perf: Do faster buffer copy

* feat: Implement `toArrayBuffer()` on Android

* feat: Add `orientation` and `isMirrored` to Android

* feat: Add `timestamp` to Frame

* Update Frame.ts

* Update JImageProxy.h

* Update FrameHostObject.cpp

* Update FrameHostObject.cpp

* Update CameraPage.tsx

* fix: Format Swift
This commit is contained in:
Marc Rousavy
2023-02-21 15:00:48 +01:00
committed by GitHub
parent 1f7a2e07f2
commit 12f850c8e1
49 changed files with 2166 additions and 85 deletions

View File

@@ -12,6 +12,7 @@
#import <React/RCTViewManager.h>
#import <React/RCTUIManager.h>
#import <React/RCTFPSGraph.h>
#import "FrameProcessorCallback.h"
#import "FrameProcessorRuntimeManager.h"

View File

@@ -134,6 +134,15 @@ extension CameraView {
}
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
videoOutput!.alwaysDiscardsLateVideoFrames = false
if previewType == "skia" {
// If the PreviewView is a Skia view, we need to use the RGB format since Skia works in the RGB colorspace instead of YUV.
// This does introduce a performance overhead, but it's inevitable since Skia would internally convert
// YUV frames to RGB anyways since all Shaders and draw operations operate in the RGB space.
videoOutput!.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_32BGRA, // default: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
]
}
captureSession.addOutput(videoOutput!)
}

View File

@@ -9,6 +9,61 @@
import Foundation
extension CameraView {
private func rotateFrameSize(frameSize: CGSize, orientation: UIInterfaceOrientation) -> CGSize {
switch orientation {
case .portrait, .portraitUpsideDown, .unknown:
// swap width and height since the input orientation is rotated
return CGSize(width: frameSize.height, height: frameSize.width)
case .landscapeLeft, .landscapeRight:
// is same as camera sensor orientation
return frameSize
@unknown default:
return frameSize
}
}
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
guard let previewView = previewView else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let viewScale = window?.screen.scale else {
invokeOnError(.unknown(message: "View has no parent Window!"))
return .zero
}
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
orientation: outputOrientation)
let viewSize = CGSize(width: previewView.bounds.width * viewScale,
height: previewView.bounds.height * viewScale)
let scale = min(frameSize.width / viewSize.width, frameSize.height / viewSize.height)
let scaledViewSize = CGSize(width: viewSize.width * scale, height: viewSize.height * scale)
let overlapX = scaledViewSize.width - frameSize.width
let overlapY = scaledViewSize.height - frameSize.height
let scaledPoint = CGPoint(x: point.x * scale, y: point.y * scale)
return CGPoint(x: scaledPoint.x - (overlapX / 2), y: scaledPoint.y - (overlapY / 2))
}
/// Converts a Point in the UI View Layer to a Point in the Camera Device Sensor coordinate system (x: [0..1], y: [0..1])
func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
orientation: outputOrientation)
let pointInFrame = convertLayerPointToFramePoint(layerPoint: pointInLayer)
return CGPoint(x: pointInFrame.x / frameSize.width, y: pointInFrame.y / frameSize.height)
}
func focus(point: CGPoint, promise: Promise) {
withPromise(promise) {
guard let device = self.videoDeviceInput?.device else {
@@ -18,7 +73,8 @@ extension CameraView {
throw CameraError.device(DeviceError.focusNotSupported)
}
let normalizedPoint = self.videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: point)
// in {0..1} system
let normalizedPoint = captureDevicePointConverted(fromLayerPoint: point)
do {
try device.lockForConfiguration()

View File

@@ -16,7 +16,7 @@ extension CameraView {
}
// Orientation of the output connections (photo, video, frame processor)
private var outputOrientation: UIInterfaceOrientation {
var outputOrientation: UIInterfaceOrientation {
if let userOrientation = orientation as String?,
let parsedOrientation = try? UIInterfaceOrientation(withString: userOrientation) {
// user is overriding output orientation
@@ -27,7 +27,7 @@ extension CameraView {
}
}
internal func updateOrientation() {
func updateOrientation() {
// Updates the Orientation for all rotable
let isMirrored = videoDeviceInput?.device.position == .front

View File

@@ -190,8 +190,26 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
// Draw Frame to Preview View Canvas (and call Frame Processor)
if captureOutput is AVCaptureVideoDataOutput {
if let previewView = previewView as? PreviewSkiaView {
// Render to Skia PreviewView
previewView.drawFrame(sampleBuffer) { canvas in
// Call JS Frame Processor before passing Frame to GPU - allows user to draw
guard let frameProcessor = self.frameProcessorCallback else { return }
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
frameProcessor(frame, canvas)
}
} else {
// Call JS Frame Processor. User cannot draw, since we don't have a Skia Canvas.
guard let frameProcessor = frameProcessorCallback else { return }
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
frameProcessor(frame, nil)
}
}
// Record Video Frame/Audio Sample to File
if isRecording {
// Write Video / Audio frame to file
guard let recordingSession = recordingSession else {
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
return
@@ -210,14 +228,21 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
}
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
// Call the JavaScript Frame Processor func (worklet)
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
frameProcessor(frame)
}
#if DEBUG
if captureOutput is AVCaptureVideoDataOutput {
// Update FPS Graph per Frame
if let fpsGraph = fpsGraph {
DispatchQueue.main.async {
fpsGraph.onTick(CACurrentMediaTime())
}
}
}
#endif
}
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput, fileType: AVFileType, videoCodec: AVVideoCodecType?) -> [String: Any]? {
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput,
fileType: AVFileType,
videoCodec: AVVideoCodecType?) -> [String: Any]? {
if videoCodec != nil {
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
} else {
@@ -233,7 +258,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
return .up
}
switch UIDevice.current.orientation {
switch outputOrientation {
case .portrait:
return cameraPosition == .front ? .leftMirrored : .right
@@ -246,8 +271,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
case .landscapeRight:
return cameraPosition == .front ? .upMirrored : .down
case .unknown, .faceUp, .faceDown:
fallthrough
case .unknown:
return .up
@unknown default:
return .up
}

View File

@@ -26,7 +26,8 @@ private let propsThatRequireReconfiguration = ["cameraId",
"preset",
"photo",
"video",
"enableFrameProcessor"]
"enableFrameProcessor",
"previewType"]
private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr",
"lowLightBoost",
@@ -60,7 +61,9 @@ public final class CameraView: UIView {
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 1.0 // in "factor"
@objc var enableFpsGraph = false
@objc var videoStabilizationMode: NSString?
@objc var previewType: NSString?
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@@ -92,7 +95,6 @@ public final class CameraView: UIView {
internal var isRecording = false
internal var recordingSession: RecordingSession?
@objc public var frameProcessorCallback: FrameProcessorCallback?
internal var lastFrameProcessorCall = DispatchTime.now().uptimeNanoseconds
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+Zoom
@@ -103,27 +105,19 @@ public final class CameraView: UIView {
internal let videoQueue = CameraQueues.videoQueue
internal let audioQueue = CameraQueues.audioQueue
internal var previewView: UIView?
#if DEBUG
internal var fpsGraph: RCTFPSGraph?
#endif
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
var isRunning: Bool {
return captureSession.isRunning
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
// pragma MARK: Setup
override public init(frame: CGRect) {
super.init(frame: frame)
videoPreviewLayer.session = captureSession
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.frame = layer.bounds
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
@@ -141,6 +135,8 @@ public final class CameraView: UIView {
selector: #selector(onOrientationChanged),
name: UIDevice.orientationDidChangeNotification,
object: nil)
setupPreviewView()
}
@available(*, unavailable)
@@ -165,15 +161,55 @@ public final class CameraView: UIView {
override public func willMove(toSuperview newSuperview: UIView?) {
super.willMove(toSuperview: newSuperview)
if !isMounted {
isMounted = true
guard let onViewReady = onViewReady else {
return
if newSuperview != nil {
if !isMounted {
isMounted = true
guard let onViewReady = onViewReady else {
return
}
onViewReady(nil)
}
onViewReady(nil)
}
}
override public func layoutSubviews() {
if let previewView = previewView {
previewView.frame = frame
previewView.bounds = bounds
}
}
func setupPreviewView() {
if previewType == "skia" {
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
if previewView is PreviewSkiaView { return }
previewView?.removeFromSuperview()
previewView = PreviewSkiaView(frame: frame)
} else {
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is PreviewView { return }
previewView?.removeFromSuperview()
previewView = PreviewView(frame: frame, session: captureSession)
}
addSubview(previewView!)
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
@@ -188,7 +224,18 @@ public final class CameraView: UIView {
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
let shouldUpdateOrientation = changedProps.contains("orientation")
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
if changedProps.contains("previewType") {
DispatchQueue.main.async {
self.setupPreviewView()
}
}
if changedProps.contains("enableFpsGraph") {
DispatchQueue.main.async {
self.setupFpsGraph()
}
}
if shouldReconfigure ||
shouldReconfigureAudioSession ||
@@ -199,6 +246,7 @@ public final class CameraView: UIView {
shouldReconfigureDevice ||
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
// Video Configuration
cameraQueue.async {
if shouldReconfigure {
self.configureCaptureSession()

View File

@@ -42,8 +42,10 @@ RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
// other props
RCT_EXPORT_VIEW_PROPERTY(preset, NSString);
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(previewType, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableFpsGraph, BOOL);
RCT_EXPORT_VIEW_PROPERTY(orientation, NSString);
// Camera View Events
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);

View File

@@ -10,13 +10,20 @@
#import <jsi/jsi.h>
#import <CoreMedia/CMSampleBuffer.h>
#import "Frame.h"
#import "SkCanvas.h"
#import "JsiSkCanvas.h"
using namespace facebook;
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
public:
explicit FrameHostObject(Frame* frame): frame(frame) { }
explicit FrameHostObject(Frame* frame): frame(frame) {}
explicit FrameHostObject(Frame* frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas):
frame(frame), canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
@@ -24,4 +31,5 @@ public:
public:
Frame* frame;
std::shared_ptr<RNSkia::JsiSkCanvas> canvas;
};

View File

@@ -9,8 +9,11 @@
#import "FrameHostObject.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import "JsiHostObject.h"
#import "JsiSharedValue.h"
#import "WKTJsiHostObject.h"
#import "SkCanvas.h"
#import "../Skia Render Layer/SkImageHelpers.h"
#import "../../cpp/JSITypedArray.h"
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
std::vector<jsi::PropNameID> result;
@@ -18,15 +21,37 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
// Debugging
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
// Ref Management
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
// Skia
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (canvas != nullptr) {
auto canvasPropNames = canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
@@ -55,7 +80,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
incrementRefCount);
}
if (name == "decrementRefCount") {
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame Buffer.
@@ -67,6 +92,59 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
decrementRefCount);
}
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
// convert CMSampleBuffer to SkImage
auto context = canvas->getCanvas()->recordingContext();
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
// draw SkImage
if (count > 0) {
// ..with paint/shader
auto paintHostObject = arguments[0].asObject(runtime).asHostObject<RNSkia::JsiSkPaint>(runtime);
auto paint = paintHostObject->getObject();
canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
} else {
// ..without paint/shader
canvas->getCanvas()->drawImage(image, 0, 0);
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "toArrayBuffer") {
auto toArrayBuffer = JSI_HOST_FUNCTION_LAMBDA {
auto pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
auto height = CVPixelBufferGetHeight(pixelBuffer);
auto buffer = (uint8_t*) CVPixelBufferGetBaseAddress(pixelBuffer);
auto arraySize = bytesPerRow * height;
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime, arraySize);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
auto arrayBufferCache = runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache).get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
if (arrayBuffer.size(runtime) != arraySize) {
arrayBuffer = vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, arraySize);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
arrayBuffer.updateUnsafe(runtime, buffer, arraySize);
return arrayBuffer;
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
if (name == "isValid") {
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
@@ -82,6 +160,41 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto height = CVPixelBufferGetHeight(imageBuffer);
return jsi::Value((double) height);
}
if (name == "orientation") {
switch (frame.orientation) {
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
return jsi::String::createFromUtf8(runtime, "portrait");
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
return jsi::String::createFromUtf8(runtime, "portraitUpsideDown");
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
return jsi::String::createFromUtf8(runtime, "landscapeLeft");
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
return jsi::String::createFromUtf8(runtime, "landscapeRight");
}
}
if (name == "isMirrored") {
switch (frame.orientation) {
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
return jsi::Value(false);
case UIImageOrientationDownMirrored:
case UIImageOrientationUpMirrored:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
return jsi::Value(true);
}
}
if (name == "timestamp") {
auto timestamp = CMSampleBufferGetPresentationTimeStamp(frame.buffer);
auto seconds = static_cast<double>(CMTimeGetSeconds(timestamp));
return jsi::Value(seconds * 1000.0);
}
if (name == "bytesPerRow") {
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
@@ -93,6 +206,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Value((double) planesCount);
}
if (canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
return canvas->get(runtime, propName);
}
// fallback to base implementation
return HostObject::get(runtime, propName);
}

View File

@@ -11,4 +11,4 @@
#import <Foundation/Foundation.h>
#import "Frame.h"
typedef void (^FrameProcessorCallback) (Frame* frame);
typedef void (^FrameProcessorCallback) (Frame* frame, void* skCanvas);

View File

@@ -19,10 +19,10 @@
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#import "JsiWorkletContext.h"
#import "JsiWorkletApi.h"
#import "JsiWorklet.h"
#import "JsiHostObject.h"
#import "WKTJsiWorkletContext.h"
#import "WKTJsiWorkletApi.h"
#import "WKTJsiWorklet.h"
#import "WKTJsiHostObject.h"
#import "FrameProcessorUtils.h"
#import "FrameProcessorCallback.h"

View File

@@ -17,7 +17,7 @@
#endif
#import <jsi/jsi.h>
#import "JsiWorklet.h"
#import "WKTJsiWorklet.h"
#import <memory>
using namespace facebook;

View File

@@ -19,22 +19,41 @@
#import "JSConsoleHelper.h"
#import <ReactCommon/RCTTurboModule.h>
#import "JsiWorklet.h"
#import "WKTJsiWorklet.h"
#import "RNSkPlatformContext.h"
#import "RNSkiOSPlatformContext.h"
#import "JsiSkCanvas.h"
FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr<RNWorklet::JsiWorklet> worklet) {
// Wrap Worklet call in invoker
auto workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
// Create cached Skia Canvas object
auto callInvoker = RCTBridge.currentBridge.jsCallInvoker;
auto skiaPlatformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(&runtime, callInvoker);
auto canvasHostObject = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
// Converts a Worklet to a callable Objective-C block function
return ^(Frame* frame) {
return ^(Frame* frame, void* skiaCanvas) {
try {
// Box the Frame to a JS Host Object
// Create cached Frame object
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
// Update cached Canvas object
if (skiaCanvas != nullptr) {
canvasHostObject->setCanvas((SkCanvas*)skiaCanvas);
frameHostObject->canvas = canvasHostObject;
} else {
frameHostObject->canvas = nullptr;
}
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
jsi::Value jsValue(std::move(argument));
// Call the Worklet with the Frame JS Host Object as an argument
workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
// After the sync Frame Processor finished executing, remove the Canvas on that Frame instance. It can no longer draw.
frameHostObject->canvas = nullptr;
} catch (jsi::JSError& jsError) {
// JS Error occured, print it to console.
auto stack = std::regex_replace(jsError.getStack(), std::regex("\n"), "\n ");

34
ios/PreviewView.swift Normal file
View File

@@ -0,0 +1,34 @@
//
// PreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
class PreviewView: UIView {
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
init(frame: CGRect, session: AVCaptureSession) {
super.init(frame: frame)
videoPreviewLayer.session = session
videoPreviewLayer.videoGravity = .resizeAspectFill
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented!")
}
}

View File

@@ -0,0 +1,26 @@
//
// PreviewSkiaView.h
// VisionCamera
//
// Created by Marc Rousavy on 17.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef PreviewSkiaView_h
#define PreviewSkiaView_h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "FrameProcessorCallback.h"
typedef void (^DrawCallback) (void* _Nonnull skCanvas);
@interface PreviewSkiaView: UIView
// Call to pass a new Frame to be drawn by the Skia Canvas
- (void) drawFrame:(_Nonnull CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback;
@end
#endif /* PreviewSkiaView_h */

View File

@@ -0,0 +1,59 @@
//
// PreviewSkiaView.mm
// VisionCamera
//
// Created by Marc Rousavy on 17.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "PreviewSkiaView.h"
#import <Foundation/Foundation.h>
#import "SkiaMetalCanvasProvider.h"
#include <exception>
#include <string>
#if SHOW_FPS
#import <React/RCTFPSGraph.h>
#endif
@implementation PreviewSkiaView {
std::shared_ptr<SkiaMetalCanvasProvider> _canvasProvider;
}
- (void)drawFrame:(CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback {
if (_canvasProvider == nullptr) {
throw std::runtime_error("Cannot draw new Frame to Canvas when SkiaMetalCanvasProvider is null!");
}
_canvasProvider->renderFrameToCanvas(buffer, ^(SkCanvas* canvas) {
callback((void*)canvas);
});
}
- (void) willMoveToSuperview:(UIView *)newWindow {
if (newWindow == NULL) {
// Remove implementation view when the parent view is not set
if (_canvasProvider != nullptr) {
[_canvasProvider->getLayer() removeFromSuperlayer];
_canvasProvider = nullptr;
}
} else {
// Create implementation view when the parent view is set
if (_canvasProvider == nullptr) {
_canvasProvider = std::make_shared<SkiaMetalCanvasProvider>();
[self.layer addSublayer: _canvasProvider->getLayer()];
_canvasProvider->start();
}
}
}
- (void) layoutSubviews {
if (_canvasProvider != nullptr) {
_canvasProvider->setSize(self.bounds.size.width, self.bounds.size.height);
}
}
@end

View File

@@ -0,0 +1,39 @@
//
// SkImageHelpers.h
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef SkImageHelpers_h
#define SkImageHelpers_h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <include/gpu/GrRecordingContext.h>
#import "SkImage.h"
#import "SkSize.h"
#import "SkRect.h"
class SkImageHelpers {
public:
SkImageHelpers() = delete;
public:
/**
Convert a CMSampleBuffer to an SkImage. Format has to be RGB.
*/
static sk_sp<SkImage> convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer);
/**
Creates a Center Crop Transformation Rect so that the source rect fills (aspectRatio: cover) the destination rect.
The return value should be passed as a sourceRect to a canvas->draw...Rect(..) function, destinationRect should stay the same.
*/
static SkRect createCenterCropRect(SkRect source, SkRect destination);
private:
static SkRect inscribe(SkSize size, SkRect rect);
};
#endif /* SkImageHelpers_h */

View File

@@ -0,0 +1,108 @@
//
// CMSampleBuffer+toSkImage.m
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "SkImageHelpers.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkData.h>
#import <include/gpu/GrRecordingContext.h>
#include <TargetConditionals.h>
#if TARGET_RT_BIG_ENDIAN
# define FourCC2Str(fourcc) (const char[]){*((char*)&fourcc), *(((char*)&fourcc)+1), *(((char*)&fourcc)+2), *(((char*)&fourcc)+3),0}
#else
# define FourCC2Str(fourcc) (const char[]){*(((char*)&fourcc)+3), *(((char*)&fourcc)+2), *(((char*)&fourcc)+1), *(((char*)&fourcc)+0),0}
#endif
CVMetalTextureCacheRef getTextureCache(GrRecordingContext* context) {
static CVMetalTextureCacheRef textureCache = nil;
if (textureCache == nil) {
// Create a new Texture Cache
auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
nil,
MTLCreateSystemDefaultDevice(),
nil,
&textureCache);
if (result != kCVReturnSuccess || textureCache == nil) {
throw std::runtime_error("Failed to create Metal Texture Cache!");
}
}
return textureCache;
}
sk_sp<SkImage> SkImageHelpers::convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer) {
auto pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
double width = CVPixelBufferGetWidth(pixelBuffer);
double height = CVPixelBufferGetHeight(pixelBuffer);
// Make sure the format is RGB (BGRA_8888)
auto format = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (format != kCVPixelFormatType_32BGRA) {
auto fourCharCode = @(FourCC2Str(format));
auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + fourCharCode.UTF8String + std::string(") - cannot convert to SkImage!");
throw std::runtime_error(error);
}
auto textureCache = getTextureCache(context);
// Convert CMSampleBuffer* -> CVMetalTexture*
CVMetalTextureRef cvTexture;
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
MTLPixelFormatBGRA8Unorm,
width,
height,
0, // plane index
&cvTexture);
GrMtlTextureInfo textureInfo;
auto mtlTexture = CVMetalTextureGetTexture(cvTexture);
textureInfo.fTexture.retain((__bridge void*)mtlTexture);
// Wrap it in a GrBackendTexture
GrBackendTexture texture(width, height, GrMipmapped::kNo, textureInfo);
// Create an SkImage from the existing texture
auto image = SkImage::MakeFromTexture(context,
texture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
// Release the Texture wrapper (it will still be strong)
CFRelease(cvTexture);
return image;
}
SkRect SkImageHelpers::createCenterCropRect(SkRect sourceRect, SkRect destinationRect) {
SkSize src;
if (destinationRect.width() / destinationRect.height() > sourceRect.width() / sourceRect.height()) {
src = SkSize::Make(sourceRect.width(), (sourceRect.width() * destinationRect.height()) / destinationRect.width());
} else {
src = SkSize::Make((sourceRect.height() * destinationRect.width()) / destinationRect.height(), sourceRect.height());
}
return inscribe(src, sourceRect);
}
SkRect SkImageHelpers::inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta,
size.width(),
size.height());
}

View File

@@ -0,0 +1,56 @@
#pragma once
#ifndef __cplusplus
#error This header has to be compiled with C++!
#endif
#import <MetalKit/MetalKit.h>
#import <QuartzCore/CAMetalLayer.h>
#import <AVFoundation/AVFoundation.h>
#include <functional>
#include <include/gpu/GrDirectContext.h>
#include <mutex>
#include <memory>
#include <atomic>
#import "VisionDisplayLink.h"
#import "SkiaMetalRenderContext.h"
class SkiaMetalCanvasProvider: public std::enable_shared_from_this<SkiaMetalCanvasProvider> {
public:
SkiaMetalCanvasProvider();
~SkiaMetalCanvasProvider();
// Render a Camera Frame to the off-screen canvas
void renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function<void(SkCanvas*)>& drawCallback);
// Start updating the DisplayLink (runLoop @ screen refresh rate) and draw Frames to the Layer
void start();
// Update the size of the View (Layer)
void setSize(int width, int height);
CALayer* getLayer();
private:
bool _isValid = false;
float _width = -1;
float _height = -1;
// For rendering Camera Frame -> off-screen MTLTexture
OffscreenRenderContext _offscreenContext;
// For rendering off-screen MTLTexture -> on-screen CAMetalLayer
LayerRenderContext _layerContext;
// For synchronization between the two Threads/Contexts
std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame = false;
private:
void render();
id<MTLTexture> getTexture(int width, int height);
float getPixelDensity();
};

View File

@@ -0,0 +1,240 @@
#import "SkiaMetalCanvasProvider.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkFont.h>
#import <include/gpu/GrDirectContext.h>
#import "SkImageHelpers.h"
#include <memory>
SkiaMetalCanvasProvider::SkiaMetalCanvasProvider(): std::enable_shared_from_this<SkiaMetalCanvasProvider>() {
// Configure Metal Layer
_layerContext.layer = [CAMetalLayer layer];
_layerContext.layer.framebufferOnly = NO;
_layerContext.layer.device = _layerContext.device;
_layerContext.layer.opaque = false;
_layerContext.layer.contentsScale = getPixelDensity();
_layerContext.layer.pixelFormat = MTLPixelFormatBGRA8Unorm;
// Set up DisplayLink
_layerContext.displayLink = [[VisionDisplayLink alloc] init];
_isValid = true;
}
SkiaMetalCanvasProvider::~SkiaMetalCanvasProvider() {
_isValid = false;
NSLog(@"VisionCamera: Stopping SkiaMetalCanvasProvider DisplayLink...");
[_layerContext.displayLink stop];
}
void SkiaMetalCanvasProvider::start() {
NSLog(@"VisionCamera: Starting SkiaMetalCanvasProvider DisplayLink...");
[_layerContext.displayLink start:[weakThis = weak_from_this()](double time) {
auto thiz = weakThis.lock();
if (thiz) {
thiz->render();
}
}];
}
id<MTLTexture> SkiaMetalCanvasProvider::getTexture(int width, int height) {
if (_offscreenContext.texture == nil
|| _offscreenContext.texture.width != width
|| _offscreenContext.texture.height != height) {
// Create new texture with the given width and height
MTLTextureDescriptor* textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:width
height:height
mipmapped:NO];
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
_offscreenContext.texture = [_offscreenContext.device newTextureWithDescriptor:textureDescriptor];
}
return _offscreenContext.texture;
}
/**
Callback from the DisplayLink - renders the current in-memory off-screen texture to the on-screen CAMetalLayer
*/
void SkiaMetalCanvasProvider::render() {
if (_width == -1 && _height == -1) {
return;
}
if (!_hasNewFrame) {
// No new Frame has arrived in the meantime.
// We don't need to re-draw the texture to the screen if nothing has changed, abort.
return;
}
@autoreleasepool {
auto context = _layerContext.skiaContext.get();
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle;
auto surface = SkSurface::MakeFromCAMetalLayer(context,
(__bridge GrMTLHandle)_layerContext.layer,
kTopLeft_GrSurfaceOrigin,
1,
kBGRA_8888_SkColorType,
nullptr,
nullptr,
&drawableHandle);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
auto canvas = surface->getCanvas();
// Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex);
// Get the texture
auto texture = _offscreenContext.texture;
if (texture == nil) return;
// Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
sourceRect = SkImageHelpers::createCenterCropRect(sourceRect, destinationRect);
auto offsetX = -sourceRect.left();
auto offsetY = -sourceRect.top();
// The Canvas is equal to the View size, where-as the Frame has a different size (e.g. 4k)
// We scale the Canvas to the exact dimensions of the Frame so that the user can use the Frame as a coordinate system
canvas->save();
auto scaleW = static_cast<double>(surface->width()) / texture.width;
auto scaleH = static_cast<double>(surface->height()) / texture.height;
auto scale = MAX(scaleW, scaleH);
canvas->scale(scale, scale);
canvas->translate(offsetX, offsetY);
// Convert the rendered MTLTexture to an SkImage
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendTexture backendTexture(texture.width, texture.height, GrMipmapped::kNo, textureInfo);
auto image = SkImage::MakeFromTexture(context,
backendTexture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
// Draw the Texture (Frame) to the Canvas
canvas->drawImage(image, 0, 0);
// Restore the scale & transform
canvas->restore();
surface->flushAndSubmit();
// Pass the drawable into the Metal Command Buffer and submit it to the GPU
id<CAMetalDrawable> drawable = (__bridge id<CAMetalDrawable>)drawableHandle;
id<MTLCommandBuffer> commandBuffer([_layerContext.commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
_hasNewFrame = false;
lock.unlock();
}
}
float SkiaMetalCanvasProvider::getPixelDensity() {
return UIScreen.mainScreen.scale;
}
/**
Render to a canvas. This uses the current in-memory off-screen texture and draws to it.
The buffer is expected to be in RGB (`BGRA_8888`) format.
While rendering, `drawCallback` will be invoked with a Skia Canvas instance which can be used for Frame Processing (JS).
*/
void SkiaMetalCanvasProvider::renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function<void(SkCanvas*)>& drawCallback) {
if (_width == -1 && _height == -1) {
return;
}
// Wrap in auto release pool since we want the system to clean up after rendering
// and not wait until later - we've seen some example of memory usage growing very
// fast in the simulator without this.
@autoreleasepool {
// Get the Frame's PixelBuffer
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
throw std::runtime_error("drawFrame: Pixel Buffer is corrupt/empty.");
}
// Lock Mutex to block the runLoop from overwriting the _currentDrawable
std::unique_lock lock(_textureMutex);
// Get the Metal Texture we use for in-memory drawing
auto texture = getTexture(CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer));
// Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo fbInfo;
fbInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRT(texture.width,
texture.height,
1,
fbInfo);
auto context = _offscreenContext.skiaContext.get();
// Create a Skia Surface from the writable Texture
auto surface = SkSurface::MakeFromBackendRenderTarget(context,
backendRT,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
nullptr,
nullptr);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
// Lock the Frame's PixelBuffer for the duration of the Frame Processor so the user can safely do operations on it
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
// Converts the CMSampleBuffer to an SkImage - RGB.
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
auto canvas = surface->getCanvas();
// Clear everything so we keep it at a clean state
canvas->clear(SkColors::kBlack);
// Draw the Image into the Frame (aspectRatio: cover)
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0);
// Call the JS Frame Processor.
drawCallback(canvas);
// Flush all appended operations on the canvas and commit it to the SkSurface
surface->flushAndSubmit();
_hasNewFrame = true;
lock.unlock();
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
void SkiaMetalCanvasProvider::setSize(int width, int height) {
_width = width;
_height = height;
_layerContext.layer.frame = CGRectMake(0, 0, width, height);
_layerContext.layer.drawableSize = CGSizeMake(width * getPixelDensity(),
height* getPixelDensity());
}
CALayer* SkiaMetalCanvasProvider::getLayer() { return _layerContext.layer; }

View File

@@ -0,0 +1,41 @@
//
// SkiaMetalRenderContext.h
// VisionCamera
//
// Created by Marc Rousavy on 02.12.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef SkiaMetalRenderContext_h
#define SkiaMetalRenderContext_h
#import <MetalKit/MetalKit.h>
#import <QuartzCore/CAMetalLayer.h>
#import <AVFoundation/AVFoundation.h>
#include <include/gpu/GrDirectContext.h>
struct RenderContext {
id<MTLDevice> device;
id<MTLCommandQueue> commandQueue;
sk_sp<GrDirectContext> skiaContext;
RenderContext() {
device = MTLCreateSystemDefaultDevice();
commandQueue = id<MTLCommandQueue>(CFRetain((GrMTLHandle)[device newCommandQueue]));
skiaContext = GrDirectContext::MakeMetal((__bridge void*)device,
(__bridge void*)commandQueue);
}
};
// For rendering to an off-screen in-memory Metal Texture (MTLTexture)
struct OffscreenRenderContext: public RenderContext {
id<MTLTexture> texture;
};
// For rendering to a Metal Layer (CAMetalLayer)
struct LayerRenderContext: public RenderContext {
CAMetalLayer* layer;
VisionDisplayLink* displayLink;
};
#endif /* SkiaMetalRenderContext_h */

View File

@@ -0,0 +1,38 @@
//
// VisionDisplayLink.h
// VisionCamera
//
// Created by Marc Rousavy on 28.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef DisplayLink_h
#define DisplayLink_h
#import <CoreFoundation/CoreFoundation.h>
#import <UIKit/UIKit.h>
typedef void (^block_t)(double);
@interface VisionDisplayLink : NSObject {
CADisplayLink *_displayLink;
double _currentFps;
double _previousFrameTimestamp;
}
@property(nonatomic, copy) block_t updateBlock;
// Start the DisplayLink's runLoop
- (void)start:(block_t)block;
// Stop the DisplayLink's runLoop
- (void)stop;
// Get the current FPS value
- (double)currentFps;
// The FPS value this DisplayLink is targeting
- (double)targetFps;
@end
#endif /* VisionDisplayLink_h */

View File

@@ -0,0 +1,63 @@
//
// VisionDisplayLink.m
// VisionCamera
//
// Created by Marc Rousavy on 28.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "VisionDisplayLink.h"
#import <Foundation/Foundation.h>
@implementation VisionDisplayLink
- (void)start:(block_t)block {
self.updateBlock = block;
// check whether the loop is already running
if (_displayLink == nil) {
// specify update method
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(update:)];
// Start a new Queue/Thread that will run the runLoop
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, -1);
dispatch_queue_t queue = dispatch_queue_create("mrousavy/VisionCamera.preview", qos);
dispatch_async(queue, ^{
// Add the display link to the current run loop (thread on which we're currently running on)
NSRunLoop* loop = [NSRunLoop currentRunLoop];
[self->_displayLink addToRunLoop:loop forMode:NSRunLoopCommonModes];
// Run the runLoop (blocking)
[loop run];
NSLog(@"VisionCamera: DisplayLink runLoop ended.");
});
}
}
- (void)stop {
// check whether the loop is already stopped
if (_displayLink != nil) {
// if the display link is present, it gets invalidated (loop stops)
[_displayLink invalidate];
_displayLink = nil;
}
}
- (void)update:(CADisplayLink *)sender {
double time = sender.timestamp;
double diff = time - _previousFrameTimestamp;
_currentFps = 1.0 / diff;
_previousFrameTimestamp = time;
_updateBlock(time);
}
- (double)targetFps {
return 1.0 / _displayLink.duration;
}
- (double)currentFps {
return _currentFps;
}
@end

View File

@@ -11,7 +11,11 @@
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8248867292644EF00729383 /* PreviewSkiaView.mm */; };
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */ = {isa = PBXBuildFile; fileRef = B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */; };
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = B841262E292E41A1001AB448 /* SkImageHelpers.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */; };
@@ -54,6 +58,7 @@
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */ = {isa = PBXBuildFile; fileRef = B88A020C2934FC22009E035A /* VisionDisplayLink.m */; };
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
@@ -78,8 +83,7 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80416F026AB16E8000DEB6A /* VisionCameraScheduler.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraScheduler.mm; sourceTree = "<group>"; };
B80416F126AB16F3000DEB6A /* VisionCameraScheduler.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraScheduler.h; sourceTree = "<group>"; };
B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaMetalRenderContext.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginRegistry.mm; sourceTree = "<group>"; };
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorCallback.h; sourceTree = "<group>"; };
@@ -89,8 +93,15 @@
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
B8248866292644E300729383 /* PreviewSkiaView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PreviewSkiaView.h; sourceTree = "<group>"; };
B8248867292644EF00729383 /* PreviewSkiaView.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PreviewSkiaView.mm; sourceTree = "<group>"; };
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = "<group>"; };
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = "<group>"; };
B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */ = {isa = PBXFileReference; explicitFileType = sourcecode.cpp.h; fileEncoding = 4; path = SkiaMetalCanvasProvider.h; sourceTree = "<group>"; };
B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaMetalCanvasProvider.mm; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
B8412630292E41AD001AB448 /* SkImageHelpers.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkImageHelpers.h; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
@@ -137,6 +148,8 @@
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = "<group>"; };
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88A020C2934FC22009E035A /* VisionDisplayLink.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisionDisplayLink.m; sourceTree = "<group>"; };
B88A020E2934FC29009E035A /* VisionDisplayLink.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionDisplayLink.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
@@ -186,7 +199,9 @@
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
B83D5EE629377117000AFD2F /* PreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8FCA20C292669B800F1AC82 /* Skia Render Layer */,
B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */,
B887516D25E0102000DB86D6 /* React Utils */,
@@ -270,12 +285,26 @@
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */,
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B80416F026AB16E8000DEB6A /* VisionCameraScheduler.mm */,
B80416F126AB16F3000DEB6A /* VisionCameraScheduler.h */,
);
path = "Frame Processor";
sourceTree = "<group>";
};
B8FCA20C292669B800F1AC82 /* Skia Render Layer */ = {
isa = PBXGroup;
children = (
B8248866292644E300729383 /* PreviewSkiaView.h */,
B8248867292644EF00729383 /* PreviewSkiaView.mm */,
B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */,
B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */,
B8412630292E41AD001AB448 /* SkImageHelpers.h */,
B841262E292E41A1001AB448 /* SkImageHelpers.mm */,
B88A020E2934FC29009E035A /* VisionDisplayLink.h */,
B88A020C2934FC22009E035A /* VisionDisplayLink.m */,
B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */,
);
path = "Skia Render Layer";
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@@ -379,18 +408,22 @@
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */,
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
@@ -411,6 +444,7 @@
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,