feat: Expose unified VisionCameraProxy object, make FrameProcessorPlugins object-oriented (#1660)

* feat: Replace `FrameProcessorRuntimeManager` with `VisionCameraProxy` (iOS)

* Make `FrameProcessorPlugin` a constructable HostObject

* fix: Fix `name` override

* Simplify `useFrameProcessor

* fix: Fix lint errors

* Remove FrameProcessorPlugin::name

* JSIUtils -> JSINSObjectConversion
This commit is contained in:
Marc Rousavy
2023-07-21 17:52:30 +02:00
committed by GitHub
parent 375e894038
commit 44ed42d5d6
41 changed files with 762 additions and 607 deletions

View File

@@ -17,6 +17,6 @@
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import "FrameProcessor.h"
#import "FrameProcessorRuntimeManager.h"
#import "Frame.h"
#import "VisionCameraProxy.h"
#endif

View File

@@ -13,10 +13,6 @@ import Foundation
final class CameraViewManager: RCTViewManager {
// pragma MARK: Properties
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
private var runtimeManager: FrameProcessorRuntimeManager?
#endif
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
@@ -34,10 +30,9 @@ final class CameraViewManager: RCTViewManager {
@objc
final func installFrameProcessorBindings() -> NSNumber {
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Runs on JS Thread
runtimeManager = FrameProcessorRuntimeManager()
runtimeManager!.installFrameProcessorBindings()
return true as NSNumber
// Called on JS Thread (blocking sync method)
let result = VisionCameraInstaller.install(to: bridge)
return NSNumber(value: result)
#else
return false as NSNumber
#endif

View File

@@ -22,8 +22,8 @@
@interface FrameProcessor : NSObject
#ifdef __cplusplus
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet;
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context;
- (void)callWithFrameHostObject:(std::shared_ptr<FrameHostObject>)frameHostObject;
#endif

View File

@@ -21,11 +21,11 @@ using namespace facebook;
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
}
- (instancetype)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet {
- (instancetype)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context {
if (self = [super init]) {
_workletContext = context;
_workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
_workletContext = context;
}
return self;
}

View File

@@ -15,18 +15,24 @@
///
/// Subclass this class in a Swift or Objective-C class and override the `callback:withArguments:` method, and
/// implement your Frame Processing there.
/// Then, in your App's startup (AppDelegate.m), call `FrameProcessorPluginBase.registerPlugin(YourNewPlugin())`
///
/// Use `[FrameProcessorPluginRegistry addFrameProcessorPlugin:]` to register the Plugin to the VisionCamera Runtime.
@interface FrameProcessorPlugin : NSObject
/// Get the name of the Frame Processor Plugin.
/// This will be exposed to JS under the `FrameProcessorPlugins` Proxy object.
- (NSString * _Nonnull)name;
/// The actual callback when calling this plugin. Any Frame Processing should be handled there.
/// Make sure your code is optimized, as this is a hot path.
- (id _Nullable) callback:(Frame* _Nonnull)frame withArguments:(NSArray<id>* _Nullable)arguments;
/// Register the given plugin in the Plugin Registry. This should be called on App Startup.
+ (void) registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
- (id _Nullable) callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments;
@end
// Base implementation (empty)
@implementation FrameProcessorPlugin
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin does not override the `callback(frame:withArguments:)` method!"];
return nil;
}
@end

View File

@@ -1,31 +0,0 @@
//
// FrameProcessorPlugin.m
// VisionCamera
//
// Created by Marc Rousavy on 24.02.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "FrameProcessorPlugin.h"
#import "FrameProcessorPluginRegistry.h"
@implementation FrameProcessorPlugin
- (NSString *)name {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin \"%@\" does not override the `name` getter!", [self name]];
return nil;
}
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSArray<id>* _Nullable)arguments {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin \"%@\" does not override the `callback(frame:withArguments:)` method!", [self name]];
return nil;
}
+ (void)registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin {
[FrameProcessorPluginRegistry addFrameProcessorPlugin:plugin];
}
@end

View File

@@ -0,0 +1,32 @@
//
// FrameProcessorPluginHostObject.h
// VisionCamera
//
// Created by Marc Rousavy on 21.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <jsi/jsi.h>
#import "FrameProcessorPlugin.h"
#import <memory>
#import <ReactCommon/CallInvoker.h>
using namespace facebook;
class FrameProcessorPluginHostObject: public jsi::HostObject {
public:
explicit FrameProcessorPluginHostObject(FrameProcessorPlugin* plugin,
std::shared_ptr<react::CallInvoker> callInvoker):
_plugin(plugin), _callInvoker(callInvoker) { }
~FrameProcessorPluginHostObject() { }
public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
private:
FrameProcessorPlugin* _plugin;
std::shared_ptr<react::CallInvoker> _callInvoker;
};

View File

@@ -0,0 +1,52 @@
//
// FrameProcessorPluginHostObject.mm
// VisionCamera
//
// Created by Marc Rousavy on 21.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "FrameProcessorPluginHostObject.h"
#import <Foundation/Foundation.h>
#import <vector>
#import "FrameHostObject.h"
#import "JSINSObjectConversion.h"
std::vector<jsi::PropNameID> FrameProcessorPluginHostObject::getPropertyNames(jsi::Runtime& runtime) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("call")));
return result;
}
jsi::Value FrameProcessorPluginHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "call") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "call"),
2,
[=](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
// Frame is first argument
auto frameHostObject = arguments[0].asObject(runtime).asHostObject<FrameHostObject>(runtime);
Frame* frame = frameHostObject->frame;
// Options are second argument (possibly undefined)
NSDictionary* options = nil;
if (count > 1) {
auto optionsObject = arguments[1].asObject(runtime);
options = JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, optionsObject, _callInvoker);
}
// Call actual Frame Processor Plugin
id result = [_plugin callback:frame withArguments:nil];
// Convert result value to jsi::Value (possibly undefined)
return JSINSObjectConversion::convertObjCObjectToJSIValue(runtime, result);
});
}
return jsi::Value::undefined();
}

View File

@@ -14,7 +14,12 @@
@interface FrameProcessorPluginRegistry : NSObject
+ (NSMutableDictionary<NSString*, FrameProcessorPlugin*>*)frameProcessorPlugins;
+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
typedef FrameProcessorPlugin* _Nonnull (^PluginInitializerFunction)(NSDictionary* _Nullable options);
+ (void)addFrameProcessorPlugin:(NSString* _Nonnull)name
withInitializer:(PluginInitializerFunction _Nonnull)pluginInitializer;
+ (FrameProcessorPlugin* _Nullable)getPlugin:(NSString* _Nonnull)name
withOptions:(NSDictionary* _Nullable)options;
@end

View File

@@ -1,5 +1,5 @@
//
// FrameProcessorPluginRegistry.mm
// FrameProcessorPluginRegistry.m
// VisionCamera
//
// Created by Marc Rousavy on 24.03.21.
@@ -11,19 +11,28 @@
@implementation FrameProcessorPluginRegistry
+ (NSMutableDictionary<NSString*, FrameProcessorPlugin*>*)frameProcessorPlugins {
static NSMutableDictionary<NSString*, FrameProcessorPlugin*>* plugins = nil;
+ (NSMutableDictionary<NSString*, PluginInitializerFunction>*)frameProcessorPlugins {
static NSMutableDictionary<NSString*, PluginInitializerFunction>* plugins = nil;
if (plugins == nil) {
plugins = [[NSMutableDictionary alloc] init];
}
return plugins;
}
+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin*)plugin {
BOOL alreadyExists = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:plugin.name] != nil;
NSAssert(!alreadyExists, @"Tried to add a Frame Processor Plugin with a name that already exists! Either choose unique names, or remove the unused plugin. Name: %@", plugin.name);
+ (void) addFrameProcessorPlugin:(NSString *)name withInitializer:(PluginInitializerFunction)pluginInitializer {
BOOL alreadyExists = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:name] != nil;
NSAssert(!alreadyExists, @"Tried to add a Frame Processor Plugin with a name that already exists! Either choose unique names, or remove the unused plugin. Name: %@", name);
[[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:plugin forKey:plugin.name];
[[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:pluginInitializer forKey:name];
}
+ (FrameProcessorPlugin*)getPlugin:(NSString* _Nonnull)name withOptions:(NSDictionary* _Nullable)options {
PluginInitializerFunction initializer = [[FrameProcessorPluginRegistry frameProcessorPlugins] objectForKey:name];
if (initializer == nil) {
return nil;
}
return initializer(options);
}
@end

View File

@@ -1,18 +0,0 @@
//
// FrameProcessorRuntimeManager.h
// VisionCamera
//
// Created by Marc Rousavy on 23.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
@interface FrameProcessorRuntimeManager : NSObject
- (void) installFrameProcessorBindings;
@end

View File

@@ -1,203 +0,0 @@
//
// FrameProcessorRuntimeManager.m
// VisionCamera
//
// Created by Marc Rousavy on 23.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "FrameProcessorRuntimeManager.h"
#import "FrameProcessorPluginRegistry.h"
#import "FrameProcessorPlugin.h"
#import "FrameProcessor.h"
#import "FrameHostObject.h"
#import <memory>
#import <React/RCTBridge.h>
#import <ReactCommon/RCTTurboModule.h>
#import <React/RCTBridge+Private.h>
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#import "WKTJsiWorkletContext.h"
#import "WKTJsiWorklet.h"
#import "../React Utils/JSIUtils.h"
#import "../../cpp/JSITypedArray.h"
#if VISION_CAMERA_ENABLE_SKIA
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Forward declarations for the Swift classes
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues : NSObject
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView : UIView
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
@end
@implementation FrameProcessorRuntimeManager {
// Separate Camera Worklet Context
std::shared_ptr<RNWorklet::JsiWorkletContext> workletContext;
}
- (void) setupWorkletContext:(jsi::Runtime&)runtime {
NSLog(@"FrameProcessorBindings: Creating Worklet Context...");
auto callInvoker = RCTBridge.currentBridge.jsCallInvoker;
auto runOnJS = [callInvoker](std::function<void()>&& f) {
// Run on React JS Runtime
callInvoker->invokeAsync(std::move(f));
};
auto runOnWorklet = [](std::function<void()>&& f) {
// Run on Frame Processor Worklet Runtime
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)](){
f();
});
};
workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
&runtime,
runOnJS,
runOnWorklet);
NSLog(@"FrameProcessorBindings: Worklet Context Created!");
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugins...");
jsi::Object frameProcessorPlugins(runtime);
// Iterate through all registered plugins (+init)
for (NSString* pluginKey in [FrameProcessorPluginRegistry frameProcessorPlugins]) {
auto pluginName = [pluginKey UTF8String];
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugin \"%s\"...", pluginName);
// Get the Plugin
FrameProcessorPlugin* plugin = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey];
// Create the JSI host function
auto function = [plugin, callInvoker](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
// Get the first parameter, which is always the native Frame Host Object.
auto frameHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
auto frame = static_cast<FrameHostObject*>(frameHostObject.get());
// Convert any additional parameters to the Frame Processor to ObjC objects
auto args = convertJSICStyleArrayToNSArray(runtime,
arguments + 1, // start at index 1 since first arg = Frame
count - 1, // use smaller count
callInvoker);
// Call the FP Plugin, which might return something.
id result = [plugin callback:frame->frame withArguments:args];
// Convert the return value (or null) to a JS Value and return it to JS
return convertObjCObjectToJSIValue(runtime, result);
};
// Assign it to the Proxy.
// A FP Plugin called "example_plugin" can be now called from JS using "FrameProcessorPlugins.example_plugin(frame)"
frameProcessorPlugins.setProperty(runtime,
pluginName,
jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forAscii(runtime, pluginName),
1, // frame
function));
}
// global.FrameProcessorPlugins Proxy
runtime.global().setProperty(runtime, "FrameProcessorPlugins", frameProcessorPlugins);
NSLog(@"FrameProcessorBindings: Frame Processor plugins installed!");
}
- (void) installFrameProcessorBindings {
NSLog(@"FrameProcessorBindings: Installing Frame Processor Bindings for Bridge...");
RCTCxxBridge *cxxBridge = (RCTCxxBridge *)[RCTBridge currentBridge];
if (!cxxBridge.runtime) {
return;
}
jsi::Runtime& jsiRuntime = *(jsi::Runtime*)cxxBridge.runtime;
// HostObject that attaches the cache to the lifecycle of the Runtime. On Runtime destroy, we destroy the cache.
auto propNameCacheObject = std::make_shared<vision::InvalidateCacheOnDestroy>(jsiRuntime);
jsiRuntime.global().setProperty(jsiRuntime,
"__visionCameraArrayBufferCache",
jsi::Object::createFromHostObject(jsiRuntime, propNameCacheObject));
// Install the Worklet Runtime in the main React JS Runtime
[self setupWorkletContext:jsiRuntime];
NSLog(@"FrameProcessorBindings: Installing global functions...");
// setFrameProcessor(viewTag: number, frameProcessor: (frame: Frame) => void)
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
auto viewTag = arguments[0].asNumber();
auto object = arguments[1].asObject(runtime);
auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, object.getProperty(runtime, "frameProcessor"));
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
if (frameProcessorType == "frame-processor") {
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:self->workletContext
worklet:worklet];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:self->workletContext
worklet:worklet
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
});
return jsi::Value::undefined();
};
jsiRuntime.global().setProperty(jsiRuntime, "setFrameProcessor", jsi::Function::createFromHostFunction(jsiRuntime,
jsi::PropNameID::forAscii(jsiRuntime, "setFrameProcessor"),
2, // viewTag, frameProcessor
setFrameProcessor));
// unsetFrameProcessor(viewTag: number)
auto unsetFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Removing frame processor...");
auto viewTag = arguments[0].asNumber();
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
if (!currentBridge) return;
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
view.frameProcessor = nil;
});
return jsi::Value::undefined();
};
jsiRuntime.global().setProperty(jsiRuntime, "unsetFrameProcessor", jsi::Function::createFromHostFunction(jsiRuntime,
jsi::PropNameID::forAscii(jsiRuntime, "unsetFrameProcessor"),
1, // viewTag
unsetFrameProcessor));
NSLog(@"FrameProcessorBindings: Finished installing bindings.");
}
@end

View File

@@ -1,5 +1,5 @@
//
// JSIUtils.h
// JSINSObjectConversion.h
// VisionCamera
//
// Created by Marc Rousavy on 30.04.21.
@@ -12,6 +12,8 @@
#import <ReactCommon/CallInvoker.h>
#import <React/RCTBridgeModule.h>
namespace JSINSObjectConversion {
using namespace facebook;
using namespace facebook::react;
@@ -53,3 +55,5 @@ id convertJSIValueToObjCObject(jsi::Runtime& runtime, const jsi::Value& value, s
// (any...) => any -> (void)(id, id)
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime& runtime, const jsi::Function& value, std::shared_ptr<CallInvoker> jsInvoker);
} // namespace JSINSObjectConversion

View File

@@ -1,5 +1,5 @@
//
// JSIUtils.mm
// JSINSObjectConversion.mm
// VisionCamera
//
// Forked and Adjusted by Marc Rousavy on 02.05.21.
@@ -14,7 +14,7 @@
// LICENSE file in the root directory of this source tree.
//
#import "JSIUtils.h"
#import "JSINSObjectConversion.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import <ReactCommon/CallInvoker.h>

View File

@@ -0,0 +1,44 @@
//
// VisionCameraProxy.h
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
#ifdef __cplusplus
#import <jsi/jsi.h>
#import "WKTJsiWorkletContext.h"
#import <ReactCommon/CallInvoker.h>
using namespace facebook;
class VisionCameraProxy: public jsi::HostObject {
public:
explicit VisionCameraProxy(jsi::Runtime& runtime,
std::shared_ptr<react::CallInvoker> callInvoker);
~VisionCameraProxy();
public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
private:
void setFrameProcessor(jsi::Runtime& runtime, int viewTag, const jsi::Object& frameProcessor);
void removeFrameProcessor(jsi::Runtime& runtime, int viewTag);
jsi::Value getFrameProcessorPlugin(jsi::Runtime& runtime, std::string name, const jsi::Object& options);
private:
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
std::shared_ptr<react::CallInvoker> _callInvoker;
};
#endif
@interface VisionCameraInstaller : NSObject
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge;
@end

View File

@@ -0,0 +1,211 @@
//
// VisionCameraProxy.mm
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "VisionCameraProxy.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import "FrameProcessorPluginRegistry.h"
#import "FrameProcessorPluginHostObject.h"
#import "FrameProcessor.h"
#import "FrameHostObject.h"
#import "JSINSObjectConversion.h"
#import "../../cpp/JSITypedArray.h"
#import "WKTJsiWorklet.h"
#import <React/RCTUtils.h>
#import <React/RCTBridge.h>
#import <React/RCTBridge+Private.h>
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#if VISION_CAMERA_ENABLE_SKIA
#import "SkiaRenderer.h"
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Swift forward-declarations
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues: NSObject
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView: UIView
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
#if VISION_CAMERA_ENABLE_SKIA
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
#endif
@end
using namespace facebook;
VisionCameraProxy::VisionCameraProxy(jsi::Runtime& runtime,
std::shared_ptr<react::CallInvoker> callInvoker) {
_callInvoker = callInvoker;
NSLog(@"VisionCameraProxy: Creating Worklet Context...");
auto runOnJS = [callInvoker](std::function<void()>&& f) {
// Run on React JS Runtime
callInvoker->invokeAsync(std::move(f));
};
auto runOnWorklet = [](std::function<void()>&& f) {
// Run on Frame Processor Worklet Runtime
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)](){
f();
});
};
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
&runtime,
runOnJS,
runOnWorklet);
NSLog(@"VisionCameraProxy: Worklet Context Created!");
}
VisionCameraProxy::~VisionCameraProxy() {
NSLog(@"VisionCameraProxy: Destroying context...");
// Destroy ArrayBuffer cache for both the JS and the Worklet Runtime.
vision::invalidateArrayBufferCache(*_workletContext->getJsRuntime());
vision::invalidateArrayBufferCache(_workletContext->getWorkletRuntime());
}
std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& runtime) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("isSkiaEnabled")));
return result;
}
void VisionCameraProxy::setFrameProcessor(jsi::Runtime& runtime, int viewTag, const jsi::Object& object) {
auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, object.getProperty(runtime, "frameProcessor"));
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
if (frameProcessorType == "frame-processor") {
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:worklet
context:_workletContext];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:worklet
context:_workletContext
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
});
}
void VisionCameraProxy::removeFrameProcessor(jsi::Runtime& runtime, int viewTag) {
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
view.frameProcessor = nil;
});
}
jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime, std::string name, const jsi::Object& options) {
NSString* key = [NSString stringWithUTF8String:name.c_str()];
NSDictionary* optionsObjc = JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, options, _callInvoker);
FrameProcessorPlugin* plugin = [FrameProcessorPluginRegistry getPlugin:key withOptions:optionsObjc];
if (plugin == nil) {
return jsi::Value::undefined();
}
auto pluginHostObject = std::make_shared<FrameProcessorPluginHostObject>(plugin, _callInvoker);
return jsi::Object::createFromHostObject(runtime, pluginHostObject);
}
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "isSkiaEnabled") {
#ifdef VISION_CAMERA_ENABLE_SKIA
return jsi::Value(true);
#else
return jsi::Value(false);
#endif
}
if (name == "setFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"),
1,
[this](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto viewTag = arguments[0].asNumber();
auto object = arguments[1].asObject(runtime);
this->setFrameProcessor(runtime, static_cast<int>(viewTag), object);
return jsi::Value::undefined();
});
}
if (name == "removeFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "removeFrameProcessor"),
1,
[this](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto viewTag = arguments[0].asNumber();
this->removeFrameProcessor(runtime, static_cast<int>(viewTag));
return jsi::Value::undefined();
});
}
if (name == "getFrameProcessorPlugin") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "getFrameProcessorPlugin"),
1,
[this](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
if (count != 1 || !arguments[0].isString()) {
throw jsi::JSError(runtime, "First argument needs to be a string (pluginName)!");
}
auto pluginName = arguments[0].asString(runtime).utf8(runtime);
auto options = count > 1 ? arguments[1].asObject(runtime) : jsi::Object(runtime);
return this->getFrameProcessorPlugin(runtime, pluginName, options);
});
}
return jsi::Value::undefined();
}
@implementation VisionCameraInstaller
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge {
RCTCxxBridge* cxxBridge = (RCTCxxBridge*)[RCTBridge currentBridge];
if (!cxxBridge.runtime) {
return NO;
}
jsi::Runtime& runtime = *(jsi::Runtime*)cxxBridge.runtime;
// global.VisionCameraProxy
auto visionCameraProxy = std::make_shared<VisionCameraProxy>(runtime, bridge.jsCallInvoker);
runtime.global().setProperty(runtime,
"VisionCameraProxy",
jsi::Object::createFromHostObject(runtime, visionCameraProxy));
return YES;
}
@end

View File

@@ -19,8 +19,8 @@
@interface SkiaFrameProcessor: FrameProcessor
#ifdef __cplusplus
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer;
#endif

View File

@@ -25,11 +25,11 @@ using namespace facebook;
std::shared_ptr<RNSkia::JsiSkCanvas> _skiaCanvas;
}
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer {
if (self = [super initWithWorklet:context
worklet:worklet]) {
if (self = [super initWithWorklet:worklet
context:context]) {
_skiaRenderer = skiaRenderer;
auto platformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(context->getJsRuntime(),
RCTBridge.currentBridge);

View File

@@ -54,8 +54,7 @@
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */; };
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
@@ -80,13 +79,15 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderContext.h; sourceTree = "<group>"; };
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewView.swift; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
@@ -100,7 +101,6 @@
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
@@ -137,11 +137,9 @@
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; };
B89A28742A68795E0092207F /* SkiaRenderer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaRenderer.mm; sourceTree = "<group>"; };
B89A28752A68796A0092207F /* SkiaRenderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderer.h; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaFrameProcessor.h; sourceTree = "<group>"; };
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaFrameProcessor.mm; sourceTree = "<group>"; };
@@ -151,6 +149,8 @@
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = DrawableFrameHostObject.mm; sourceTree = "<group>"; };
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DrawableFrameHostObject.h; sourceTree = "<group>"; };
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; };
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
@@ -237,8 +237,6 @@
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
B887517025E0102000DB86D6 /* Promise.swift */,
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
B81D41EF263C86F900B041FD /* JSIUtils.h */,
B8994E6B263F03E100069589 /* JSIUtils.mm */,
);
path = "React Utils";
sourceTree = "<group>";
@@ -273,12 +271,15 @@
B8F7DDD1266F715D00120533 /* Frame.m */,
B84760A22608EE38004C3180 /* FrameHostObject.h */,
B84760A52608EE7C004C3180 /* FrameHostObject.mm */,
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */,
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */,
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */,
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */,
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */,
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */,
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */,
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */,
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */,
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */,
);
path = "Frame Processor";
sourceTree = "<group>";
@@ -436,7 +437,6 @@
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */,
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
@@ -450,7 +450,7 @@
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */,
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */,
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */,