2021-05-06 06:11:55 -06:00
|
|
|
//
|
|
|
|
// FrameHostObject.m
|
|
|
|
// VisionCamera
|
|
|
|
//
|
|
|
|
// Created by Marc Rousavy on 22.03.21.
|
2021-06-01 05:07:57 -06:00
|
|
|
// Copyright © 2021 mrousavy. All rights reserved.
|
2021-05-06 06:11:55 -06:00
|
|
|
//
|
|
|
|
|
|
|
|
#import "FrameHostObject.h"
|
2023-12-29 06:09:56 -07:00
|
|
|
#import "UIImageOrientation+descriptor.h"
|
2023-09-01 04:58:32 -06:00
|
|
|
#import "WKTJsiHostObject.h"
|
2021-05-06 06:11:55 -06:00
|
|
|
#import <Foundation/Foundation.h>
|
|
|
|
#import <jsi/jsi.h>
|
2023-02-21 07:00:48 -07:00
|
|
|
|
|
|
|
#import "../../cpp/JSITypedArray.h"
|
2021-05-06 06:11:55 -06:00
|
|
|
|
|
|
|
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
|
|
|
|
std::vector<jsi::PropNameID> result;
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// Ref Management
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
|
2023-02-21 07:00:48 -07:00
|
|
|
|
2024-01-02 05:54:07 -07:00
|
|
|
if (frame != nil && frame.isValid) {
|
|
|
|
// Frame Properties
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat")));
|
|
|
|
// Conversion
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
|
|
|
|
}
|
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2023-12-29 06:09:56 -07:00
|
|
|
Frame* FrameHostObject::getFrame() {
|
|
|
|
Frame* frame = this->frame;
|
2024-01-02 05:54:07 -07:00
|
|
|
if (frame == nil || !frame.isValid) {
|
2023-12-29 06:09:56 -07:00
|
|
|
throw std::runtime_error("Frame is already closed! "
|
|
|
|
"Are you trying to access the Image data outside of a Frame Processor's lifetime?\n"
|
|
|
|
"- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n"
|
|
|
|
"- If you want to do async processing, use `runAsync(...)` instead.\n"
|
|
|
|
"- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`");
|
|
|
|
}
|
|
|
|
return frame;
|
|
|
|
}
|
|
|
|
|
2024-01-02 05:54:07 -07:00
|
|
|
#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value
|
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
|
|
|
auto name = propName.utf8(runtime);
|
2021-06-01 05:07:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
if (name == "toString") {
|
2024-01-02 05:54:07 -07:00
|
|
|
auto toString = JSI_FUNC {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
2021-06-01 05:07:57 -06:00
|
|
|
|
2023-12-29 06:09:56 -07:00
|
|
|
// Print debug description (width, height)
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
NSMutableString* string = [NSMutableString stringWithFormat:@"%lu x %lu %@ Frame", frame.width, frame.height, frame.pixelFormat];
|
2021-05-06 06:11:55 -06:00
|
|
|
return jsi::String::createFromUtf8(runtime, string.UTF8String);
|
|
|
|
};
|
2023-09-01 11:39:25 -06:00
|
|
|
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
if (name == "incrementRefCount") {
|
2024-01-02 05:54:07 -07:00
|
|
|
auto incrementRefCount = JSI_FUNC {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
// Increment our self-counted ref count by one.
|
|
|
|
_refCount++;
|
2021-07-06 02:08:44 -06:00
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
2023-09-01 11:39:25 -06:00
|
|
|
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "incrementRefCount"), 0, incrementRefCount);
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
}
|
|
|
|
if (name == "decrementRefCount") {
|
2024-01-02 05:54:07 -07:00
|
|
|
auto decrementRefCount = JSI_FUNC {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
// Decrement our self-counted ref count by one.
|
|
|
|
_refCount--;
|
|
|
|
if (_refCount < 1) {
|
|
|
|
// ARC will then delete the Frame and the underlying Frame Buffer.
|
|
|
|
this->frame = nil;
|
|
|
|
}
|
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
2023-09-01 11:39:25 -06:00
|
|
|
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "decrementRefCount"), 0, decrementRefCount);
|
2021-07-06 02:08:44 -06:00
|
|
|
}
|
2023-02-21 07:00:48 -07:00
|
|
|
if (name == "toArrayBuffer") {
|
2024-01-02 05:54:07 -07:00
|
|
|
auto toArrayBuffer = JSI_FUNC {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
// Get CPU readable Pixel Buffer from Frame and write it to a jsi::ArrayBuffer
|
|
|
|
Frame* frame = this->getFrame();
|
2023-02-21 07:00:48 -07:00
|
|
|
auto pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
|
|
|
auto bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
|
|
|
auto height = CVPixelBufferGetHeight(pixelBuffer);
|
2023-08-24 08:37:20 -06:00
|
|
|
|
2023-02-21 07:00:48 -07:00
|
|
|
auto arraySize = bytesPerRow * height;
|
|
|
|
|
|
|
|
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
|
|
|
|
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
|
2023-09-01 11:39:25 -06:00
|
|
|
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime, arraySize);
|
2023-02-21 07:00:48 -07:00
|
|
|
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
|
|
|
|
}
|
|
|
|
|
2023-09-01 11:39:25 -06:00
|
|
|
auto arrayBufferCache = runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
|
|
|
|
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache).get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
|
2023-02-21 07:00:48 -07:00
|
|
|
|
|
|
|
if (arrayBuffer.size(runtime) != arraySize) {
|
2023-09-01 11:39:25 -06:00
|
|
|
arrayBuffer = vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, arraySize);
|
2023-02-21 07:00:48 -07:00
|
|
|
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
|
|
|
|
}
|
|
|
|
|
2023-08-24 08:37:20 -06:00
|
|
|
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
2023-09-01 04:58:32 -06:00
|
|
|
auto buffer = (uint8_t*)CVPixelBufferGetBaseAddress(pixelBuffer);
|
2023-02-21 07:00:48 -07:00
|
|
|
arrayBuffer.updateUnsafe(runtime, buffer, arraySize);
|
2023-08-24 08:37:20 -06:00
|
|
|
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
2023-02-21 07:00:48 -07:00
|
|
|
|
|
|
|
return arrayBuffer;
|
|
|
|
};
|
2023-09-01 11:39:25 -06:00
|
|
|
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
|
2023-02-21 07:00:48 -07:00
|
|
|
}
|
2021-06-01 05:07:57 -06:00
|
|
|
|
2021-05-06 06:11:55 -06:00
|
|
|
if (name == "isValid") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
// unsafely access the Frame and try to see if it's valid
|
|
|
|
Frame* frame = this->frame;
|
|
|
|
return jsi::Value(frame != nil && frame.isValid);
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
if (name == "width") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::Value((double)frame.width);
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
|
|
|
if (name == "height") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::Value((double)frame.height);
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2023-02-21 07:00:48 -07:00
|
|
|
if (name == "orientation") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
NSString* orientation = [NSString stringWithParsed:frame.orientation];
|
|
|
|
return jsi::String::createFromUtf8(runtime, orientation.UTF8String);
|
2023-02-21 07:00:48 -07:00
|
|
|
}
|
|
|
|
if (name == "isMirrored") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::Value(frame.isMirrored);
|
2023-02-21 07:00:48 -07:00
|
|
|
}
|
|
|
|
if (name == "timestamp") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::Value(frame.timestamp);
|
2023-02-21 07:00:48 -07:00
|
|
|
}
|
2023-08-21 04:50:14 -06:00
|
|
|
if (name == "pixelFormat") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::String::createFromUtf8(runtime, frame.pixelFormat.UTF8String);
|
2023-08-21 04:50:14 -06:00
|
|
|
}
|
2021-05-06 06:11:55 -06:00
|
|
|
if (name == "bytesPerRow") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::Value((double)frame.bytesPerRow);
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|
2023-09-01 04:20:17 -06:00
|
|
|
if (name == "planesCount") {
|
2023-12-29 06:09:56 -07:00
|
|
|
// Lock Frame so it cannot be deallocated while we access it
|
|
|
|
std::lock_guard lock(this->_mutex);
|
|
|
|
|
|
|
|
Frame* frame = this->getFrame();
|
|
|
|
return jsi::Value((double)frame.planesCount);
|
2023-09-01 04:20:17 -06:00
|
|
|
}
|
2021-06-01 05:07:57 -06:00
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
|
|
|
// fallback to base implementation
|
|
|
|
return HostObject::get(runtime, propName);
|
2021-05-06 06:11:55 -06:00
|
|
|
}
|