2021-06-27 12:37:54 +02:00
|
|
|
//
|
|
|
|
// Created by Marc on 19/06/2021.
|
|
|
|
//
|
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
#include "FrameHostObject.h"
|
2023-07-03 22:32:08 +02:00
|
|
|
|
2021-09-24 16:57:12 +02:00
|
|
|
#include <fbjni/fbjni.h>
|
|
|
|
#include <jni.h>
|
2023-07-03 22:32:08 +02:00
|
|
|
|
|
|
|
#include "JSITypedArray.h"
|
|
|
|
|
2021-06-27 12:37:54 +02:00
|
|
|
#include <vector>
|
|
|
|
#include <string>
|
|
|
|
|
|
|
|
namespace vision {
|
|
|
|
|
2021-09-24 16:57:12 +02:00
|
|
|
using namespace facebook;
|
|
|
|
|
2023-08-21 12:50:14 +02:00
|
|
|
FrameHostObject::FrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame): frame(make_global(frame)) { }
|
2021-09-24 16:57:12 +02:00
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
FrameHostObject::~FrameHostObject() {
|
2021-09-24 16:57:12 +02:00
|
|
|
// Hermes' Garbage Collector (Hades GC) calls destructors on a separate Thread
|
|
|
|
// which might not be attached to JNI. Ensure that we use the JNI class loader when
|
|
|
|
// deallocating the `frame` HybridClass, because otherwise JNI cannot call the Java
|
|
|
|
// destroy() function.
|
2022-01-02 09:57:15 -05:00
|
|
|
jni::ThreadScope::WithClassLoader([&] { frame.reset(); });
|
2021-09-24 16:57:12 +02:00
|
|
|
}
|
|
|
|
|
2021-09-29 12:30:50 +02:00
|
|
|
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
|
2021-06-27 12:37:54 +02:00
|
|
|
std::vector<jsi::PropNameID> result;
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
|
2023-09-01 12:20:17 +02:00
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
|
2023-02-21 15:00:48 +01:00
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
|
2023-08-21 12:50:14 +02:00
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat")));
|
2023-02-21 15:00:48 +01:00
|
|
|
// Conversion
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
|
2023-02-21 15:00:48 +01:00
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
// Ref Management
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
|
|
|
|
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
|
2021-06-27 12:37:54 +02:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
|
|
|
auto name = propName.utf8(runtime);
|
2021-06-27 12:37:54 +02:00
|
|
|
|
2023-08-23 12:42:38 +02:00
|
|
|
if (name == "incrementRefCount") {
|
|
|
|
jsi::HostFunctionType incrementRefCount = [=](jsi::Runtime& runtime,
|
|
|
|
const jsi::Value& thisArg,
|
|
|
|
const jsi::Value* args,
|
|
|
|
size_t count) -> jsi::Value {
|
|
|
|
// Increment retain count by one.
|
2023-09-01 12:20:17 +02:00
|
|
|
this->frame->incrementRefCount();
|
2023-08-23 12:42:38 +02:00
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
|
|
|
return jsi::Function::createFromHostFunction(runtime,
|
|
|
|
jsi::PropNameID::forUtf8(runtime, "incrementRefCount"),
|
|
|
|
0,
|
|
|
|
incrementRefCount);
|
|
|
|
}
|
|
|
|
if (name == "decrementRefCount") {
|
|
|
|
auto decrementRefCount = [=](jsi::Runtime& runtime,
|
|
|
|
const jsi::Value& thisArg,
|
|
|
|
const jsi::Value* args,
|
|
|
|
size_t count) -> jsi::Value {
|
|
|
|
// Decrement retain count by one. If the retain count is zero, the Frame gets closed.
|
2023-09-01 12:20:17 +02:00
|
|
|
this->frame->decrementRefCount();
|
2023-08-23 12:42:38 +02:00
|
|
|
return jsi::Value::undefined();
|
|
|
|
};
|
|
|
|
return jsi::Function::createFromHostFunction(runtime,
|
|
|
|
jsi::PropNameID::forUtf8(runtime, "decrementRefCount"),
|
|
|
|
0,
|
|
|
|
decrementRefCount);
|
|
|
|
}
|
2021-06-27 12:37:54 +02:00
|
|
|
if (name == "toString") {
|
2023-08-23 12:42:38 +02:00
|
|
|
jsi::HostFunctionType toString = [=](jsi::Runtime& runtime,
|
|
|
|
const jsi::Value& thisArg,
|
|
|
|
const jsi::Value* args,
|
|
|
|
size_t count) -> jsi::Value {
|
2021-07-06 10:08:44 +02:00
|
|
|
if (!this->frame) {
|
|
|
|
return jsi::String::createFromUtf8(runtime, "[closed frame]");
|
|
|
|
}
|
2023-09-01 12:20:17 +02:00
|
|
|
auto width = this->frame->getWidth();
|
|
|
|
auto height = this->frame->getHeight();
|
2021-06-27 12:37:54 +02:00
|
|
|
auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame";
|
|
|
|
return jsi::String::createFromUtf8(runtime, str);
|
|
|
|
};
|
|
|
|
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
|
|
|
|
}
|
2023-02-21 15:00:48 +01:00
|
|
|
if (name == "toArrayBuffer") {
|
2023-08-23 12:42:38 +02:00
|
|
|
jsi::HostFunctionType toArrayBuffer = [=](jsi::Runtime& runtime,
|
|
|
|
const jsi::Value& thisArg,
|
|
|
|
const jsi::Value* args,
|
|
|
|
size_t count) -> jsi::Value {
|
2023-09-01 12:20:17 +02:00
|
|
|
auto buffer = this->frame->toByteBuffer();
|
|
|
|
if (!buffer->isDirect()) {
|
|
|
|
throw std::runtime_error("Failed to get byte content of Frame - array is not direct ByteBuffer!");
|
|
|
|
}
|
|
|
|
auto size = buffer->getDirectSize();
|
2023-02-21 15:00:48 +01:00
|
|
|
|
|
|
|
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
|
|
|
|
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
|
2023-08-23 14:23:31 +02:00
|
|
|
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime, size);
|
2023-02-21 15:00:48 +01:00
|
|
|
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get from global JS cache
|
|
|
|
auto arrayBufferCache = runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
|
|
|
|
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache).get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
|
2023-08-23 14:23:31 +02:00
|
|
|
if (arrayBuffer.size(runtime) != size) {
|
|
|
|
arrayBuffer = vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, size);
|
2023-02-21 15:00:48 +01:00
|
|
|
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
// directly write to C++ JSI ArrayBuffer
|
|
|
|
auto destinationBuffer = arrayBuffer.data(runtime);
|
2023-09-01 12:20:17 +02:00
|
|
|
memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size);
|
2023-02-21 15:00:48 +01:00
|
|
|
|
|
|
|
return arrayBuffer;
|
|
|
|
};
|
|
|
|
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
|
|
|
|
}
|
2021-06-27 12:37:54 +02:00
|
|
|
|
|
|
|
if (name == "isValid") {
|
2023-09-01 12:20:17 +02:00
|
|
|
return jsi::Value(this->frame && this->frame->getIsValid());
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
if (name == "width") {
|
2023-09-01 12:20:17 +02:00
|
|
|
return jsi::Value(this->frame->getWidth());
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
if (name == "height") {
|
2023-09-01 12:20:17 +02:00
|
|
|
return jsi::Value(this->frame->getHeight());
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
2023-02-21 15:00:48 +01:00
|
|
|
if (name == "isMirrored") {
|
2023-09-01 12:20:17 +02:00
|
|
|
return jsi::Value(this->frame->getIsMirrored());
|
2023-02-21 15:00:48 +01:00
|
|
|
}
|
|
|
|
if (name == "orientation") {
|
2023-09-01 12:20:17 +02:00
|
|
|
auto string = this->frame->getOrientation();
|
2023-02-21 15:00:48 +01:00
|
|
|
return jsi::String::createFromUtf8(runtime, string->toStdString());
|
|
|
|
}
|
2023-08-21 12:50:14 +02:00
|
|
|
if (name == "pixelFormat") {
|
2023-09-01 12:20:17 +02:00
|
|
|
auto string = this->frame->getPixelFormat();
|
2023-08-21 12:50:14 +02:00
|
|
|
return jsi::String::createFromUtf8(runtime, string->toStdString());
|
|
|
|
}
|
2023-02-21 15:00:48 +01:00
|
|
|
if (name == "timestamp") {
|
2023-09-01 12:20:17 +02:00
|
|
|
return jsi::Value(static_cast<double>(this->frame->getTimestamp()));
|
2023-02-21 15:00:48 +01:00
|
|
|
}
|
2021-06-27 12:37:54 +02:00
|
|
|
if (name == "bytesPerRow") {
|
2023-09-01 12:20:17 +02:00
|
|
|
return jsi::Value(this->frame->getBytesPerRow());
|
|
|
|
}
|
|
|
|
if (name == "planesCount") {
|
|
|
|
return jsi::Value(this->frame->getPlanesCount());
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
|
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 16:47:09 +01:00
|
|
|
// fallback to base implementation
|
|
|
|
return HostObject::get(runtime, propName);
|
2021-06-27 12:37:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace vision
|