feat: Sync Frame Processors (plus runAsync and runAtTargetFps) (#1472)

Before, Frame Processors ran on a separate Thread.

After, Frame Processors run fully synchronous and always at the same FPS as the Camera.

Two new functions have been introduced:

* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.

You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)

You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side. 

You can also combine both functions.

Examples:

```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")
}, [])
```

```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")

  runAtTargetFps(10, () => {
    'worklet'
    console.log("I'm running at 10 FPS!")
  })
}, [])
```



```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")

  runAsync(frame, () => {
    'worklet'
    console.log("I'm running on another Thread, I can block for longer!")
  })
}, [])
```

```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")

  runAtTargetFps(10, () => {
    'worklet'
    runAsync(frame, () => {
      'worklet'
      console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
    })
  })
}, [])
```
This commit is contained in:
Marc Rousavy
2023-02-15 16:47:09 +01:00
committed by GitHub
parent a0590dccb5
commit 30b56153db
30 changed files with 660 additions and 914 deletions

View File

@@ -8,12 +8,13 @@
#include <jni.h>
#include <vector>
#include <string>
#include <JsiHostObject.h>
namespace vision {
using namespace facebook;
FrameHostObject::FrameHostObject(jni::alias_ref<JImageProxy::javaobject> image): frame(make_global(image)) { }
FrameHostObject::FrameHostObject(jni::alias_ref<JImageProxy::javaobject> image): frame(make_global(image)), _refCount(0) { }
FrameHostObject::~FrameHostObject() {
// Hermes' Garbage Collector (Hades GC) calls destructors on a separate Thread
@@ -25,21 +26,24 @@ FrameHostObject::~FrameHostObject() {
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("close")));
// Debugging
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
// Ref Management
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
return result;
}
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propNameId) {
auto name = propNameId.utf8(runtime);
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "toString") {
auto toString = [this] (jsi::Runtime& runtime, const jsi::Value&, const jsi::Value*, size_t) -> jsi::Value {
auto toString = JSI_HOST_FUNCTION_LAMBDA {
if (!this->frame) {
return jsi::String::createFromUtf8(runtime, "[closed frame]");
}
@@ -50,51 +54,53 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
}
if (name == "close") {
auto close = [this] (jsi::Runtime& runtime, const jsi::Value&, const jsi::Value*, size_t) -> jsi::Value {
if (!this->frame) {
throw jsi::JSError(runtime, "Trying to close an already closed frame! Did you call frame.close() twice?");
}
this->close();
if (name == "incrementRefCount") {
auto incrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Increment retain count by one so ARC doesn't destroy the Frame Buffer.
std::lock_guard lock(this->_refCountMutex);
this->_refCount++;
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "close"), 0, close);
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "incrementRefCount"),
0,
incrementRefCount);
}
if (name == "decrementRefCount") {
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame Buffer.
std::lock_guard lock(this->_refCountMutex);
this->_refCount--;
if (_refCount < 1) {
this->frame->close();
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "decrementRefCount"),
0,
decrementRefCount);
}
if (name == "isValid") {
return jsi::Value(this->frame && this->frame->getIsValid());
}
if (name == "width") {
this->assertIsFrameStrong(runtime, name);
return jsi::Value(this->frame->getWidth());
}
if (name == "height") {
this->assertIsFrameStrong(runtime, name);
return jsi::Value(this->frame->getHeight());
}
if (name == "bytesPerRow") {
this->assertIsFrameStrong(runtime, name);
return jsi::Value(this->frame->getBytesPerRow());
}
if (name == "planesCount") {
this->assertIsFrameStrong(runtime, name);
return jsi::Value(this->frame->getPlanesCount());
}
return jsi::Value::undefined();
}
void FrameHostObject::assertIsFrameStrong(jsi::Runtime& runtime, const std::string& accessedPropName) const {
if (!this->frame) {
auto message = "Cannot get `" + accessedPropName + "`, frame is already closed!";
throw jsi::JSError(runtime, message.c_str());
}
}
void FrameHostObject::close() {
if (this->frame) {
this->frame->close();
}
// fallback to base implementation
return HostObject::get(runtime, propName);
}
} // namespace vision

View File

@@ -9,6 +9,7 @@
#include <fbjni/fbjni.h>
#include <vector>
#include <string>
#include <mutex>
#include "java-bindings/JImageProxy.h"
@@ -25,15 +26,14 @@ class JSI_EXPORT FrameHostObject : public jsi::HostObject {
jsi::Value get(jsi::Runtime &, const jsi::PropNameID &name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override;
void close();
public:
jni::global_ref<JImageProxy> frame;
private:
static auto constexpr TAG = "VisionCamera";
void assertIsFrameStrong(jsi::Runtime& runtime, const std::string& accessedPropName) const; // NOLINT(runtime/references)
size_t _refCount;
std::mutex _refCountMutex;
};
} // namespace vision

View File

@@ -8,6 +8,7 @@
#include <utility>
#include <string>
#include <JsiWorklet.h>
#include <JsiHostObject.h>
#include "CameraView.h"
#include "FrameHostObject.h"
@@ -36,12 +37,10 @@ FrameProcessorRuntimeManager::FrameProcessorRuntimeManager(jni::alias_ref<FrameP
// Run on Frame Processor Worklet Runtime
scheduler->dispatchAsync(std::move(f));
};
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera");
_workletContext->initialize("VisionCamera",
jsRuntime,
runOnJS,
runOnWorklet);
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
jsRuntime,
runOnJS,
runOnWorklet);
}
// JNI binding
@@ -96,8 +95,7 @@ void FrameProcessorRuntimeManager::logErrorToJS(const std::string& message) {
void FrameProcessorRuntimeManager::setFrameProcessor(jsi::Runtime& runtime,
int viewTag,
const jsi::Value& frameProcessor) {
__android_log_write(ANDROID_LOG_INFO, TAG,
"Setting new Frame Processor...");
__android_log_write(ANDROID_LOG_INFO, TAG, "Setting new Frame Processor...");
if (!_workletContext) {
throw jsi::JSError(runtime,
@@ -106,13 +104,10 @@ void FrameProcessorRuntimeManager::setFrameProcessor(jsi::Runtime& runtime,
// find camera view
auto cameraView = findCameraViewById(viewTag);
__android_log_write(ANDROID_LOG_INFO, TAG, "Found CameraView!");
// convert jsi::Function to a Worklet (can be shared across runtimes)
__android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet...");
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, frameProcessor);
auto workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
__android_log_write(ANDROID_LOG_INFO, TAG, "Successfully created worklet!");
_workletContext->invokeOnWorkletThread([=](RNWorklet::JsiWorkletContext*, jsi::Runtime& rt) {
// Set Frame Processor as callable C++ lambda - this will then call the Worklet
@@ -142,8 +137,6 @@ void FrameProcessorRuntimeManager::unsetFrameProcessor(int viewTag) {
// call Java method to unset frame processor
cameraView->cthis()->unsetFrameProcessor();
__android_log_write(ANDROID_LOG_INFO, TAG, "Frame Processor removed!");
}
// actual JSI installer
@@ -158,21 +151,8 @@ void FrameProcessorRuntimeManager::installJSIBindings() {
auto& jsiRuntime = *_jsRuntime;
auto setFrameProcessor = [this](jsi::Runtime &runtime,
const jsi::Value &thisValue,
const jsi::Value *arguments,
size_t count) -> jsi::Value {
__android_log_write(ANDROID_LOG_INFO, TAG,
"Setting new Frame Processor...");
if (!arguments[0].isNumber()) {
throw jsi::JSError(runtime,
"Camera::setFrameProcessor: First argument ('viewTag') must be a number!");
}
if (!arguments[1].isObject()) {
throw jsi::JSError(runtime,
"Camera::setFrameProcessor: Second argument ('frameProcessor') must be a function!");
}
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
__android_log_write(ANDROID_LOG_INFO, TAG, "Setting new Frame Processor...");
double viewTag = arguments[0].asNumber();
const jsi::Value& frameProcessor = arguments[1];
@@ -190,15 +170,8 @@ void FrameProcessorRuntimeManager::installJSIBindings() {
setFrameProcessor));
auto unsetFrameProcessor = [this](jsi::Runtime &runtime,
const jsi::Value &thisValue,
const jsi::Value *arguments,
size_t count) -> jsi::Value {
auto unsetFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
__android_log_write(ANDROID_LOG_INFO, TAG, "Removing Frame Processor...");
if (!arguments[0].isNumber()) {
throw jsi::JSError(runtime,
"Camera::unsetFrameProcessor: First argument ('viewTag') must be a number!");
}
auto viewTag = arguments[0].asNumber();
this->unsetFrameProcessor(static_cast<int>(viewTag));

View File

@@ -31,17 +31,6 @@ fun CameraView.invokeOnError(error: Throwable) {
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event)
}
fun CameraView.invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: Double, suggestedFps: Double) {
Log.e(CameraView.TAG, "invokeOnFrameProcessorPerformanceSuggestionAvailable(suggestedFps: $suggestedFps):")
val event = Arguments.createMap()
val type = if (suggestedFps > currentFps) "can-use-higher-fps" else "should-use-lower-fps"
event.putString("type", type)
event.putDouble("suggestedFrameProcessorFps", suggestedFps)
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraPerformanceSuggestionAvailable", event)
}
fun CameraView.invokeOnViewReady() {
val event = Arguments.createMap()
val reactContext = context as ReactContext

View File

@@ -25,7 +25,6 @@ import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.*
import com.facebook.react.uimanager.events.RCTEventEmitter
import com.mrousavy.camera.frameprocessor.FrameProcessorPerformanceDataCollector
import com.mrousavy.camera.frameprocessor.FrameProcessorRuntimeManager
import com.mrousavy.camera.utils.*
import kotlinx.coroutines.*
@@ -103,13 +102,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
field = value
setOnTouchListener(if (value) touchEventListener else null)
}
var frameProcessorFps = 1.0
set(value) {
field = value
actualFrameProcessorFps = if (value == -1.0) 30.0 else value
lastFrameProcessorPerformanceEvaluation = System.currentTimeMillis()
frameProcessorPerformanceDataCollector.clear()
}
// private properties
private var isMounted = false
@@ -166,16 +158,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
private var minZoom: Float = 1f
private var maxZoom: Float = 1f
private var actualFrameProcessorFps = 30.0
private val frameProcessorPerformanceDataCollector = FrameProcessorPerformanceDataCollector()
private var lastSuggestedFrameProcessorFps = 0.0
private var lastFrameProcessorPerformanceEvaluation = System.currentTimeMillis()
private val isReadyForNewEvaluation: Boolean
get() {
val lastPerformanceEvaluationElapsedTime = System.currentTimeMillis() - lastFrameProcessorPerformanceEvaluation
return lastPerformanceEvaluationElapsedTime > 1000
}
@DoNotStrip
private var mHybridData: HybridData? = null
@@ -480,21 +462,8 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
Log.i(TAG, "Adding ImageAnalysis use-case...")
imageAnalysis = imageAnalysisBuilder.build().apply {
setAnalyzer(cameraExecutor, { image ->
val now = System.currentTimeMillis()
val intervalMs = (1.0 / actualFrameProcessorFps) * 1000.0
if (now - lastFrameProcessorCall > intervalMs) {
lastFrameProcessorCall = now
val perfSample = frameProcessorPerformanceDataCollector.beginPerformanceSampleCollection()
frameProcessorCallback(image)
perfSample.endPerformanceSampleCollection()
}
image.close()
if (isReadyForNewEvaluation) {
// last evaluation was more than a second ago, evaluate again
evaluateNewPerformanceSamples()
}
// Call JS Frame Processor
frameProcessorCallback(image)
})
}
useCases.add(imageAnalysis!!)
@@ -526,22 +495,4 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
}
}
}
private fun evaluateNewPerformanceSamples() {
lastFrameProcessorPerformanceEvaluation = System.currentTimeMillis()
val maxFrameProcessorFps = 30 // TODO: Get maxFrameProcessorFps from ImageAnalyser
val averageFps = 1.0 / frameProcessorPerformanceDataCollector.averageExecutionTimeSeconds
val suggestedFrameProcessorFps = floor(min(averageFps, maxFrameProcessorFps.toDouble()))
if (frameProcessorFps == -1.0) {
// frameProcessorFps="auto"
actualFrameProcessorFps = suggestedFrameProcessorFps
} else {
// frameProcessorFps={someCustomFpsValue}
if (suggestedFrameProcessorFps != lastSuggestedFrameProcessorFps && suggestedFrameProcessorFps != frameProcessorFps) {
invokeOnFrameProcessorPerformanceSuggestionAvailable(frameProcessorFps, suggestedFrameProcessorFps)
lastSuggestedFrameProcessorFps = suggestedFrameProcessorFps
}
}
}
}

View File

@@ -27,7 +27,6 @@ class CameraViewManager(reactContext: ReactApplicationContext) : ViewGroupManage
.put("cameraViewReady", MapBuilder.of("registrationName", "onViewReady"))
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraPerformanceSuggestionAvailable", MapBuilder.of("registrationName", "onFrameProcessorPerformanceSuggestionAvailable"))
.build()
}
@@ -108,13 +107,6 @@ class CameraViewManager(reactContext: ReactApplicationContext) : ViewGroupManage
view.fps = if (fps > 0) fps else null
}
@ReactProp(name = "frameProcessorFps", defaultDouble = 1.0)
fun setFrameProcessorFps(view: CameraView, frameProcessorFps: Double) {
if (view.frameProcessorFps != frameProcessorFps)
addChangedPropToTransaction(view, "frameProcessorFps")
view.frameProcessorFps = frameProcessorFps
}
@ReactProp(name = "hdr")
fun setHdr(view: CameraView, hdr: Boolean?) {
if (view.hdr != hdr)

View File

@@ -1,38 +0,0 @@
package com.mrousavy.camera.frameprocessor
data class PerformanceSampleCollection(val endPerformanceSampleCollection: () -> Unit)
// keep a maximum of `maxSampleSize` historical performance data samples cached.
private const val maxSampleSize = 15
class FrameProcessorPerformanceDataCollector {
private var counter = 0
private var performanceSamples: ArrayList<Double> = ArrayList()
val averageExecutionTimeSeconds: Double
get() = performanceSamples.average()
fun beginPerformanceSampleCollection(): PerformanceSampleCollection {
val begin = System.currentTimeMillis()
return PerformanceSampleCollection {
val end = System.currentTimeMillis()
val seconds = (end - begin) / 1_000.0
val index = counter % maxSampleSize
if (performanceSamples.size > index) {
performanceSamples[index] = seconds
} else {
performanceSamples.add(seconds)
}
counter++
}
}
fun clear() {
counter = 0
performanceSamples.clear()
}
}