2021-05-06 06:11:55 -06:00
|
|
|
//
|
|
|
|
// FrameProcessorUtils.m
|
|
|
|
// VisionCamera
|
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 15.03.21.
|
2021-06-01 05:07:57 -06:00
|
|
|
// Copyright © 2021 mrousavy. All rights reserved.
|
2021-05-06 06:11:55 -06:00
|
|
|
//
|
|
|
|
|
|
|
|
#import "FrameProcessorUtils.h"
|
|
|
|
#import <chrono>
|
|
|
|
#import <memory>
|
2022-01-10 08:37:47 -07:00
|
|
|
#import <regex>
|
2021-06-28 12:44:50 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
#import "FrameHostObject.h"
|
2021-06-09 02:57:05 -06:00
|
|
|
#import "Frame.h"
|
2021-05-06 06:11:55 -06:00
|
|
|
|
2021-06-28 12:44:50 -06:00
|
|
|
#import <React/RCTBridge.h>
|
|
|
|
#import <React/RCTBridge+Private.h>
|
|
|
|
#import "JSConsoleHelper.h"
|
|
|
|
#import <ReactCommon/RCTTurboModule.h>
|
|
|
|
|
2023-02-21 07:00:48 -07:00
|
|
|
#import "WKTJsiWorklet.h"
|
|
|
|
|
|
|
|
#import "RNSkPlatformContext.h"
|
|
|
|
#import "RNSkiOSPlatformContext.h"
|
|
|
|
#import "JsiSkCanvas.h"
|
2021-06-01 05:07:57 -06:00
|
|
|
|
2023-02-13 07:22:45 -07:00
|
|
|
FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr<RNWorklet::JsiWorklet> worklet) {
|
2023-02-21 07:00:48 -07:00
|
|
|
// Wrap Worklet call in invoker
|
2023-02-13 07:22:45 -07:00
|
|
|
auto workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
|
2023-02-21 07:00:48 -07:00
|
|
|
// Create cached Skia Canvas object
|
2023-07-03 04:40:07 -06:00
|
|
|
auto skiaPlatformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(&runtime, RCTBridge.currentBridge);
|
2023-02-21 07:00:48 -07:00
|
|
|
auto canvasHostObject = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
|
|
|
|
|
2023-02-13 07:22:45 -07:00
|
|
|
// Converts a Worklet to a callable Objective-C block function
|
2023-02-21 07:00:48 -07:00
|
|
|
return ^(Frame* frame, void* skiaCanvas) {
|
2021-06-28 12:44:50 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
try {
|
2023-03-29 04:03:20 -06:00
|
|
|
// create HostObject which holds the Frame
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
|
2023-02-21 07:00:48 -07:00
|
|
|
// Update cached Canvas object
|
|
|
|
if (skiaCanvas != nullptr) {
|
|
|
|
canvasHostObject->setCanvas((SkCanvas*)skiaCanvas);
|
|
|
|
frameHostObject->canvas = canvasHostObject;
|
|
|
|
} else {
|
|
|
|
frameHostObject->canvas = nullptr;
|
|
|
|
}
|
2023-03-29 04:03:20 -06:00
|
|
|
|
2023-02-13 07:22:45 -07:00
|
|
|
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
|
|
|
|
jsi::Value jsValue(std::move(argument));
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// Call the Worklet with the Frame JS Host Object as an argument
|
2023-02-13 07:22:45 -07:00
|
|
|
workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
|
2023-03-29 04:03:20 -06:00
|
|
|
|
2023-02-21 07:00:48 -07:00
|
|
|
// After the sync Frame Processor finished executing, remove the Canvas on that Frame instance. It can no longer draw.
|
|
|
|
frameHostObject->canvas = nullptr;
|
2021-05-06 06:11:55 -06:00
|
|
|
} catch (jsi::JSError& jsError) {
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// JS Error occured, print it to console.
|
2022-01-10 08:37:47 -07:00
|
|
|
auto stack = std::regex_replace(jsError.getStack(), std::regex("\n"), "\n ");
|
|
|
|
auto message = [NSString stringWithFormat:@"Frame Processor threw an error: %s\nIn: %s", jsError.getMessage().c_str(), stack.c_str()];
|
2023-02-13 07:22:45 -07:00
|
|
|
|
2021-06-28 12:44:50 -06:00
|
|
|
RCTBridge* bridge = [RCTBridge currentBridge];
|
2023-02-15 10:15:13 -07:00
|
|
|
if (bridge != nil && bridge.jsCallInvoker != nullptr) {
|
2021-06-28 12:44:50 -06:00
|
|
|
bridge.jsCallInvoker->invokeAsync([bridge, message]() {
|
|
|
|
auto logFn = [JSConsoleHelper getLogFunctionForBridge:bridge];
|
2022-01-10 08:37:47 -07:00
|
|
|
logFn(RCTLogLevelError, message);
|
2021-06-28 12:44:50 -06:00
|
|
|
});
|
|
|
|
} else {
|
2022-01-10 08:37:47 -07:00
|
|
|
NSLog(@"%@", message);
|
2021-06-28 12:44:50 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|