2021-05-06 06:11:55 -06:00
|
|
|
//
|
|
|
|
// FrameProcessorRuntimeManager.m
|
|
|
|
// VisionCamera
|
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 23.03.21.
|
2021-06-01 05:07:57 -06:00
|
|
|
// Copyright © 2021 mrousavy. All rights reserved.
|
2021-05-06 06:11:55 -06:00
|
|
|
//
|
|
|
|
|
|
|
|
#import <Foundation/Foundation.h>
|
|
|
|
#import "FrameProcessorRuntimeManager.h"
|
|
|
|
#import "FrameProcessorPluginRegistry.h"
|
2023-02-27 03:18:03 -07:00
|
|
|
#import "FrameProcessorPlugin.h"
|
2021-05-06 06:11:55 -06:00
|
|
|
#import "FrameHostObject.h"
|
|
|
|
|
|
|
|
#import <memory>
|
|
|
|
|
|
|
|
#import <React/RCTBridge.h>
|
|
|
|
#import <ReactCommon/RCTTurboModule.h>
|
|
|
|
#import <React/RCTBridge+Private.h>
|
|
|
|
#import <React/RCTUIManager.h>
|
|
|
|
#import <ReactCommon/RCTTurboModuleManager.h>
|
|
|
|
|
2023-02-21 07:00:48 -07:00
|
|
|
#import "WKTJsiWorkletContext.h"
|
|
|
|
#import "WKTJsiWorkletApi.h"
|
|
|
|
#import "WKTJsiWorklet.h"
|
|
|
|
#import "WKTJsiHostObject.h"
|
2021-05-06 06:11:55 -06:00
|
|
|
|
|
|
|
#import "FrameProcessorUtils.h"
|
|
|
|
#import "FrameProcessorCallback.h"
|
|
|
|
#import "../React Utils/JSIUtils.h"
|
2023-02-21 07:44:43 -07:00
|
|
|
#import "../../cpp/JSITypedArray.h"
|
2021-05-06 06:11:55 -06:00
|
|
|
|
|
|
|
// Forward declarations for the Swift classes
|
|
|
|
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
|
|
|
|
@interface CameraQueues : NSObject
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
|
2021-05-06 06:11:55 -06:00
|
|
|
@end
|
|
|
|
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
|
|
|
@interface CameraView : UIView
|
|
|
|
@property (nonatomic, copy) FrameProcessorCallback _Nullable frameProcessorCallback;
|
|
|
|
@end
|
|
|
|
|
|
|
|
@implementation FrameProcessorRuntimeManager {
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// Running Frame Processors on camera's video thread (synchronously)
|
2023-02-13 07:22:45 -07:00
|
|
|
std::shared_ptr<RNWorklet::JsiWorkletContext> workletContext;
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
|
2023-02-13 07:22:45 -07:00
|
|
|
- (instancetype)init {
|
|
|
|
if (self = [super init]) {
|
|
|
|
// Initialize self
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2023-02-13 07:22:45 -07:00
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void) setupWorkletContext:(jsi::Runtime&)runtime {
|
|
|
|
NSLog(@"FrameProcessorBindings: Creating Worklet Context...");
|
|
|
|
|
|
|
|
auto callInvoker = RCTBridge.currentBridge.jsCallInvoker;
|
|
|
|
|
|
|
|
auto runOnJS = [callInvoker](std::function<void()>&& f) {
|
|
|
|
// Run on React JS Runtime
|
|
|
|
callInvoker->invokeAsync(std::move(f));
|
|
|
|
};
|
|
|
|
auto runOnWorklet = [](std::function<void()>&& f) {
|
|
|
|
// Run on Frame Processor Worklet Runtime
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)](){
|
2023-02-13 07:22:45 -07:00
|
|
|
f();
|
|
|
|
});
|
|
|
|
};
|
2021-06-09 03:14:49 -06:00
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
|
|
|
|
&runtime,
|
|
|
|
runOnJS,
|
|
|
|
runOnWorklet);
|
2023-02-13 07:22:45 -07:00
|
|
|
|
|
|
|
NSLog(@"FrameProcessorBindings: Worklet Context Created!");
|
|
|
|
|
|
|
|
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugins...");
|
|
|
|
|
|
|
|
jsi::Object frameProcessorPlugins(runtime);
|
|
|
|
|
|
|
|
// Iterate through all registered plugins (+init)
|
|
|
|
for (NSString* pluginKey in [FrameProcessorPluginRegistry frameProcessorPlugins]) {
|
|
|
|
auto pluginName = [pluginKey UTF8String];
|
|
|
|
|
|
|
|
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugin \"%s\"...", pluginName);
|
2023-02-27 03:18:03 -07:00
|
|
|
// Get the Plugin
|
|
|
|
FrameProcessorPlugin* plugin = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey];
|
2023-02-13 07:22:45 -07:00
|
|
|
|
|
|
|
// Create the JSI host function
|
2023-02-27 03:18:03 -07:00
|
|
|
auto function = [plugin, callInvoker](jsi::Runtime& runtime,
|
|
|
|
const jsi::Value& thisValue,
|
|
|
|
const jsi::Value* arguments,
|
|
|
|
size_t count) -> jsi::Value {
|
2023-02-13 07:22:45 -07:00
|
|
|
// Get the first parameter, which is always the native Frame Host Object.
|
|
|
|
auto frameHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
|
|
|
|
auto frame = static_cast<FrameHostObject*>(frameHostObject.get());
|
|
|
|
|
|
|
|
// Convert any additional parameters to the Frame Processor to ObjC objects
|
|
|
|
auto args = convertJSICStyleArrayToNSArray(runtime,
|
|
|
|
arguments + 1, // start at index 1 since first arg = Frame
|
|
|
|
count - 1, // use smaller count
|
|
|
|
callInvoker);
|
|
|
|
// Call the FP Plugin, which might return something.
|
2023-02-27 03:18:03 -07:00
|
|
|
id result = [plugin callback:frame->frame withArguments:args];
|
2023-02-13 07:22:45 -07:00
|
|
|
|
|
|
|
// Convert the return value (or null) to a JS Value and return it to JS
|
|
|
|
return convertObjCObjectToJSIValue(runtime, result);
|
|
|
|
};
|
|
|
|
|
|
|
|
// Assign it to the Proxy.
|
|
|
|
// A FP Plugin called "example_plugin" can be now called from JS using "FrameProcessorPlugins.example_plugin(frame)"
|
|
|
|
frameProcessorPlugins.setProperty(runtime,
|
|
|
|
pluginName,
|
|
|
|
jsi::Function::createFromHostFunction(runtime,
|
|
|
|
jsi::PropNameID::forAscii(runtime, pluginName),
|
|
|
|
1, // frame
|
|
|
|
function));
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2023-02-13 07:22:45 -07:00
|
|
|
|
|
|
|
// global.FrameProcessorPlugins Proxy
|
|
|
|
runtime.global().setProperty(runtime, "FrameProcessorPlugins", frameProcessorPlugins);
|
|
|
|
|
|
|
|
NSLog(@"FrameProcessorBindings: Frame Processor plugins installed!");
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void) installFrameProcessorBindings {
|
|
|
|
NSLog(@"FrameProcessorBindings: Installing Frame Processor Bindings for Bridge...");
|
2023-02-13 07:22:45 -07:00
|
|
|
RCTCxxBridge *cxxBridge = (RCTCxxBridge *)[RCTBridge currentBridge];
|
2021-05-06 06:11:55 -06:00
|
|
|
if (!cxxBridge.runtime) {
|
|
|
|
return;
|
|
|
|
}
|
2021-06-09 03:14:49 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
jsi::Runtime& jsiRuntime = *(jsi::Runtime*)cxxBridge.runtime;
|
2023-02-21 07:44:43 -07:00
|
|
|
|
|
|
|
// HostObject that attaches the cache to the lifecycle of the Runtime. On Runtime destroy, we destroy the cache.
|
|
|
|
auto propNameCacheObject = std::make_shared<vision::InvalidateCacheOnDestroy>(jsiRuntime);
|
|
|
|
jsiRuntime.global().setProperty(jsiRuntime,
|
|
|
|
"__visionCameraPropNameCache",
|
|
|
|
jsi::Object::createFromHostObject(jsiRuntime, propNameCacheObject));
|
2023-02-13 07:22:45 -07:00
|
|
|
|
|
|
|
// Install the Worklet Runtime in the main React JS Runtime
|
|
|
|
[self setupWorkletContext:jsiRuntime];
|
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
NSLog(@"FrameProcessorBindings: Installing global functions...");
|
2021-05-27 03:08:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
// setFrameProcessor(viewTag: number, frameProcessor: (frame: Frame) => void)
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
|
2021-05-06 06:11:55 -06:00
|
|
|
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
|
|
|
|
auto viewTag = arguments[0].asNumber();
|
2023-02-13 07:22:45 -07:00
|
|
|
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, arguments[1]);
|
2021-05-27 03:08:57 -06:00
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
RCTExecuteOnMainQueue(^{
|
2021-05-06 06:11:55 -06:00
|
|
|
auto currentBridge = [RCTBridge currentBridge];
|
|
|
|
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
|
|
|
|
auto view = static_cast<CameraView*>(anonymousView);
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
auto callback = convertWorkletToFrameProcessorCallback(self->workletContext->getWorkletRuntime(), worklet);
|
|
|
|
view.frameProcessorCallback = callback;
|
2021-05-06 06:11:55 -06:00
|
|
|
});
|
2021-05-27 03:08:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
|
|
|
jsiRuntime.global().setProperty(jsiRuntime, "setFrameProcessor", jsi::Function::createFromHostFunction(jsiRuntime,
|
|
|
|
jsi::PropNameID::forAscii(jsiRuntime, "setFrameProcessor"),
|
|
|
|
2, // viewTag, frameProcessor
|
|
|
|
setFrameProcessor));
|
2021-05-27 03:08:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
// unsetFrameProcessor(viewTag: number)
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
auto unsetFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
|
2021-05-06 06:11:55 -06:00
|
|
|
NSLog(@"FrameProcessorBindings: Removing frame processor...");
|
|
|
|
auto viewTag = arguments[0].asNumber();
|
2021-05-27 03:08:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
RCTExecuteOnMainQueue(^{
|
|
|
|
auto currentBridge = [RCTBridge currentBridge];
|
|
|
|
if (!currentBridge) return;
|
2021-06-09 03:14:49 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
|
|
|
|
if (!anonymousView) return;
|
2021-06-09 03:14:49 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
auto view = static_cast<CameraView*>(anonymousView);
|
|
|
|
view.frameProcessorCallback = nil;
|
|
|
|
});
|
2021-05-27 03:08:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
|
|
|
jsiRuntime.global().setProperty(jsiRuntime, "unsetFrameProcessor", jsi::Function::createFromHostFunction(jsiRuntime,
|
|
|
|
jsi::PropNameID::forAscii(jsiRuntime, "unsetFrameProcessor"),
|
|
|
|
1, // viewTag
|
|
|
|
unsetFrameProcessor));
|
|
|
|
|
|
|
|
NSLog(@"FrameProcessorBindings: Finished installing bindings.");
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|