2021-06-27 12:37:54 +02:00
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 11.06.21.
|
|
|
|
//
|
|
|
|
|
|
|
|
#include "FrameProcessorRuntimeManager.h"
|
|
|
|
#include <android/log.h>
|
|
|
|
#include <jni.h>
|
|
|
|
#include <utility>
|
2021-06-29 10:38:13 +02:00
|
|
|
#include <string>
|
2023-02-21 15:00:48 +01:00
|
|
|
#include <WKTJsiWorklet.h>
|
|
|
|
#include <WKTJsiHostObject.h>
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
#include "CameraView.h"
|
2021-09-29 12:30:50 +02:00
|
|
|
#include "FrameHostObject.h"
|
2021-06-27 12:37:54 +02:00
|
|
|
#include "JSIJNIConversion.h"
|
2021-09-29 12:30:50 +02:00
|
|
|
#include "java-bindings/JImageProxy.h"
|
|
|
|
#include "java-bindings/JFrameProcessorPlugin.h"
|
2023-02-21 15:44:43 +01:00
|
|
|
#include "JSITypedArray.h"
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
namespace vision {
|
|
|
|
|
|
|
|
// type aliases
|
|
|
|
using TSelf = local_ref<HybridClass<vision::FrameProcessorRuntimeManager>::jhybriddata>;
|
2021-09-29 12:30:50 +02:00
|
|
|
using TJSCallInvokerHolder = jni::alias_ref<facebook::react::CallInvokerHolder::javaobject>;
|
|
|
|
using TAndroidScheduler = jni::alias_ref<VisionCameraScheduler::javaobject>;
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
FrameProcessorRuntimeManager::FrameProcessorRuntimeManager(jni::alias_ref<FrameProcessorRuntimeManager::jhybridobject> jThis,
|
|
|
|
jsi::Runtime* jsRuntime,
|
|
|
|
std::shared_ptr<facebook::react::CallInvoker> jsCallInvoker,
|
|
|
|
std::shared_ptr<vision::VisionCameraScheduler> scheduler) :
|
|
|
|
javaPart_(jni::make_global(jThis)),
|
|
|
|
_jsRuntime(jsRuntime) {
|
|
|
|
auto runOnJS = [jsCallInvoker](std::function<void()>&& f) {
|
|
|
|
// Run on React JS Runtime
|
|
|
|
jsCallInvoker->invokeAsync(std::move(f));
|
|
|
|
};
|
|
|
|
auto runOnWorklet = [scheduler](std::function<void()>&& f) {
|
|
|
|
// Run on Frame Processor Worklet Runtime
|
|
|
|
scheduler->dispatchAsync(std::move(f));
|
|
|
|
};
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
|
|
|
|
jsRuntime,
|
|
|
|
runOnJS,
|
|
|
|
runOnWorklet);
|
2023-02-13 15:22:45 +01:00
|
|
|
}
|
|
|
|
|
2021-06-27 12:37:54 +02:00
|
|
|
// JNI binding
|
|
|
|
void vision::FrameProcessorRuntimeManager::registerNatives() {
|
|
|
|
registerHybrid({
|
|
|
|
makeNativeMethod("initHybrid",
|
|
|
|
FrameProcessorRuntimeManager::initHybrid),
|
|
|
|
makeNativeMethod("installJSIBindings",
|
|
|
|
FrameProcessorRuntimeManager::installJSIBindings),
|
|
|
|
makeNativeMethod("registerPlugin",
|
|
|
|
FrameProcessorRuntimeManager::registerPlugin),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
// JNI init
|
|
|
|
TSelf vision::FrameProcessorRuntimeManager::initHybrid(
|
|
|
|
alias_ref<jhybridobject> jThis,
|
2021-09-29 12:30:50 +02:00
|
|
|
jlong jsRuntimePointer,
|
|
|
|
TJSCallInvokerHolder jsCallInvokerHolder,
|
|
|
|
TAndroidScheduler androidScheduler) {
|
2021-06-27 12:37:54 +02:00
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG,
|
|
|
|
"Initializing FrameProcessorRuntimeManager...");
|
|
|
|
|
|
|
|
// cast from JNI hybrid objects to C++ instances
|
2023-02-13 15:22:45 +01:00
|
|
|
auto jsRuntime = reinterpret_cast<jsi::Runtime*>(jsRuntimePointer);
|
2021-06-27 12:37:54 +02:00
|
|
|
auto jsCallInvoker = jsCallInvokerHolder->cthis()->getCallInvoker();
|
2021-07-30 10:27:45 +02:00
|
|
|
auto scheduler = std::shared_ptr<VisionCameraScheduler>(androidScheduler->cthis());
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
return makeCxxInstance(jThis, jsRuntime, jsCallInvoker, scheduler);
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
global_ref<CameraView::javaobject> FrameProcessorRuntimeManager::findCameraViewById(int viewId) {
|
|
|
|
static const auto findCameraViewByIdMethod = javaPart_->getClass()->getMethod<CameraView(jint)>("findCameraViewById");
|
|
|
|
auto weakCameraView = findCameraViewByIdMethod(javaPart_.get(), viewId);
|
|
|
|
return make_global(weakCameraView);
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
|
2021-06-28 20:45:08 +02:00
|
|
|
void FrameProcessorRuntimeManager::logErrorToJS(const std::string& message) {
|
2023-02-13 15:22:45 +01:00
|
|
|
if (!_workletContext) {
|
2021-06-29 10:38:13 +02:00
|
|
|
return;
|
|
|
|
}
|
2023-02-13 15:22:45 +01:00
|
|
|
// Call console.error() on JS Thread
|
|
|
|
_workletContext->invokeOnJsThread([message](jsi::Runtime& runtime) {
|
|
|
|
auto consoleError = runtime
|
|
|
|
.global()
|
|
|
|
.getPropertyAsObject(runtime, "console")
|
|
|
|
.getPropertyAsFunction(runtime, "error");
|
|
|
|
consoleError.call(runtime, jsi::String::createFromUtf8(runtime, message));
|
2021-06-29 10:38:13 +02:00
|
|
|
});
|
2021-06-29 10:38:35 +02:00
|
|
|
}
|
2021-06-28 20:44:50 +02:00
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
void FrameProcessorRuntimeManager::setFrameProcessor(jsi::Runtime& runtime,
|
|
|
|
int viewTag,
|
|
|
|
const jsi::Value& frameProcessor) {
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG, "Setting new Frame Processor...");
|
2021-09-29 12:30:50 +02:00
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
if (!_workletContext) {
|
2021-09-29 12:30:50 +02:00
|
|
|
throw jsi::JSError(runtime,
|
2023-02-13 15:22:45 +01:00
|
|
|
"setFrameProcessor(..): VisionCamera's Worklet Context is not yet initialized!");
|
2021-09-29 12:30:50 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// find camera view
|
|
|
|
auto cameraView = findCameraViewById(viewTag);
|
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
// convert jsi::Function to a Worklet (can be shared across runtimes)
|
|
|
|
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, frameProcessor);
|
|
|
|
auto workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
|
2021-09-29 12:30:50 +02:00
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
_workletContext->invokeOnWorkletThread([=](RNWorklet::JsiWorkletContext*, jsi::Runtime& rt) {
|
|
|
|
// Set Frame Processor as callable C++ lambda - this will then call the Worklet
|
|
|
|
cameraView->cthis()->setFrameProcessor([this, workletInvoker, &rt](jni::alias_ref<JImageProxy::javaobject> frame) {
|
|
|
|
try {
|
|
|
|
// create HostObject which holds the Frame (JImageProxy)
|
|
|
|
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
|
|
|
|
auto argument = jsi::Object::createFromHostObject(rt, frameHostObject);
|
|
|
|
jsi::Value jsValue(std::move(argument));
|
|
|
|
// Call the Worklet on the Worklet Runtime
|
|
|
|
workletInvoker->call(rt, jsi::Value::undefined(), &jsValue, 1);
|
|
|
|
} catch (jsi::JSError& jsError) {
|
|
|
|
// Worklet threw a JS Error, catch it and log it to JS.
|
|
|
|
auto message = "Frame Processor threw an error: " + jsError.getMessage();
|
|
|
|
__android_log_write(ANDROID_LOG_ERROR, TAG, message.c_str());
|
|
|
|
this->logErrorToJS(message);
|
|
|
|
}
|
|
|
|
});
|
2021-09-29 12:30:50 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
void FrameProcessorRuntimeManager::unsetFrameProcessor(int viewTag) {
|
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG, "Removing Frame Processor...");
|
|
|
|
|
|
|
|
// find camera view
|
|
|
|
auto cameraView = findCameraViewById(viewTag);
|
|
|
|
|
|
|
|
// call Java method to unset frame processor
|
|
|
|
cameraView->cthis()->unsetFrameProcessor();
|
|
|
|
}
|
|
|
|
|
2021-06-27 12:37:54 +02:00
|
|
|
// actual JSI installer
|
|
|
|
void FrameProcessorRuntimeManager::installJSIBindings() {
|
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG, "Installing JSI bindings...");
|
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
if (_jsRuntime == nullptr) {
|
2021-06-27 12:37:54 +02:00
|
|
|
__android_log_write(ANDROID_LOG_ERROR, TAG,
|
|
|
|
"JS-Runtime was null, Frame Processor JSI bindings could not be installed!");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
auto& jsiRuntime = *_jsRuntime;
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2023-02-21 15:44:43 +01:00
|
|
|
// HostObject that attaches the cache to the lifecycle of the Runtime. On Runtime destroy, we destroy the cache.
|
|
|
|
auto propNameCacheObject = std::make_shared<vision::InvalidateCacheOnDestroy>(jsiRuntime);
|
|
|
|
jsiRuntime.global().setProperty(jsiRuntime,
|
|
|
|
"__visionCameraPropNameCache",
|
|
|
|
jsi::Object::createFromHostObject(jsiRuntime, propNameCacheObject));
|
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
|
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG, "Setting new Frame Processor...");
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
double viewTag = arguments[0].asNumber();
|
|
|
|
const jsi::Value& frameProcessor = arguments[1];
|
|
|
|
this->setFrameProcessor(runtime, static_cast<int>(viewTag), frameProcessor);
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
|
|
|
jsiRuntime.global().setProperty(jsiRuntime,
|
|
|
|
"setFrameProcessor",
|
|
|
|
jsi::Function::createFromHostFunction(
|
|
|
|
jsiRuntime,
|
|
|
|
jsi::PropNameID::forAscii(jsiRuntime,
|
|
|
|
"setFrameProcessor"),
|
|
|
|
2, // viewTag, frameProcessor
|
|
|
|
setFrameProcessor));
|
|
|
|
|
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
auto unsetFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
|
2021-06-27 12:37:54 +02:00
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG, "Removing Frame Processor...");
|
|
|
|
|
|
|
|
auto viewTag = arguments[0].asNumber();
|
2021-09-29 12:30:50 +02:00
|
|
|
this->unsetFrameProcessor(static_cast<int>(viewTag));
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
|
|
|
jsiRuntime.global().setProperty(jsiRuntime,
|
|
|
|
"unsetFrameProcessor",
|
|
|
|
jsi::Function::createFromHostFunction(
|
|
|
|
jsiRuntime,
|
|
|
|
jsi::PropNameID::forAscii(jsiRuntime,
|
|
|
|
"unsetFrameProcessor"),
|
|
|
|
1, // viewTag
|
|
|
|
unsetFrameProcessor));
|
|
|
|
|
|
|
|
__android_log_write(ANDROID_LOG_INFO, TAG, "Finished installing JSI bindings!");
|
|
|
|
}
|
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
void FrameProcessorRuntimeManager::registerPlugin(alias_ref<JFrameProcessorPlugin::javaobject> plugin) {
|
2023-02-13 15:22:45 +01:00
|
|
|
auto& runtime = *_jsRuntime;
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
// we need a strong reference on the plugin, make_global does that.
|
|
|
|
auto pluginGlobal = make_global(plugin);
|
2023-02-13 15:22:45 +01:00
|
|
|
auto pluginName = pluginGlobal->getName();
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Installing Frame Processor Plugin \"%s\"...", pluginName.c_str());
|
|
|
|
|
|
|
|
if (!runtime.global().hasProperty(runtime, "FrameProcessorPlugins")) {
|
|
|
|
runtime.global().setProperty(runtime, "FrameProcessorPlugins", jsi::Object(runtime));
|
|
|
|
}
|
|
|
|
jsi::Object frameProcessorPlugins = runtime.global().getPropertyAsObject(runtime, "FrameProcessorPlugins");
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
auto function = [pluginGlobal](jsi::Runtime& runtime,
|
2021-09-24 10:19:30 +02:00
|
|
|
const jsi::Value& thisValue,
|
|
|
|
const jsi::Value* arguments,
|
|
|
|
size_t count) -> jsi::Value {
|
2021-06-27 12:37:54 +02:00
|
|
|
// Unbox object and get typed HostObject
|
|
|
|
auto boxedHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
|
2023-02-13 15:22:45 +01:00
|
|
|
auto frameHostObject = dynamic_cast<FrameHostObject*>(boxedHostObject.get());
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
// parse params - we are offset by `1` because the frame is the first parameter.
|
|
|
|
auto params = JArrayClass<jobject>::newArray(count - 1);
|
|
|
|
for (size_t i = 1; i < count; i++) {
|
|
|
|
params->setElement(i - 1, JSIJNIConversion::convertJSIValueToJNIObject(runtime, arguments[i]));
|
|
|
|
}
|
|
|
|
|
|
|
|
// call implemented virtual method
|
2021-09-29 12:30:50 +02:00
|
|
|
auto result = pluginGlobal->callback(frameHostObject->frame, params);
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
// convert result from JNI to JSI value
|
|
|
|
return JSIJNIConversion::convertJNIObjectToJSIValue(runtime, result);
|
|
|
|
};
|
|
|
|
|
2023-02-13 15:22:45 +01:00
|
|
|
// Assign it to the Proxy.
|
|
|
|
// A FP Plugin called "example_plugin" can be now called from JS using "FrameProcessorPlugins.example_plugin(frame)"
|
|
|
|
frameProcessorPlugins.setProperty(runtime,
|
|
|
|
pluginName.c_str(),
|
|
|
|
jsi::Function::createFromHostFunction(runtime,
|
|
|
|
jsi::PropNameID::forAscii(runtime, pluginName),
|
|
|
|
1, // frame
|
|
|
|
function));
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace vision
|