feat: Sync Frame Processors (plus runAsync and runAtTargetFps) (#1472)

Before, Frame Processors ran on a separate Thread.

After, Frame Processors run fully synchronous and always at the same FPS as the Camera.

Two new functions have been introduced:

* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.

You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)

You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side. 

You can also combine both functions.

Examples:

```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")
}, [])
```

```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")

  runAtTargetFps(10, () => {
    'worklet'
    console.log("I'm running at 10 FPS!")
  })
}, [])
```



```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")

  runAsync(frame, () => {
    'worklet'
    console.log("I'm running on another Thread, I can block for longer!")
  })
}, [])
```

```js
const frameProcessor = useFrameProcessor((frame) => {
  'worklet'
  console.log("I'm running at 60 FPS!")

  runAtTargetFps(10, () => {
    'worklet'
    runAsync(frame, () => {
      'worklet'
      console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
    })
  })
}, [])
```
This commit is contained in:
Marc Rousavy
2023-02-15 16:47:09 +01:00
committed by GitHub
parent a0590dccb5
commit 30b56153db
30 changed files with 660 additions and 914 deletions

View File

@@ -17,20 +17,13 @@ public class CameraQueues: NSObject {
autoreleaseFrequency: .inherit,
target: nil)
/// The serial execution queue for output processing of videos for recording.
/// The serial execution queue for output processing of videos for recording or synchronous frame processing.
@objc public static let videoQueue = DispatchQueue(label: "mrousavy/VisionCamera.video",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
/// The serial execution queue for output processing of videos for frame processing.
@objc public static let frameProcessorQueue = DispatchQueue(label: "mrousavy/VisionCamera.frame-processor",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
/// The serial execution queue for output processing of audio buffers.
@objc public static let audioQueue = DispatchQueue(label: "mrousavy/VisionCamera.audio",
qos: .userInteractive,

View File

@@ -190,8 +190,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
// Video Recording runs in the same queue
if isRecording {
// Write Video / Audio frame to file
guard let recordingSession = recordingSession else {
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
return
@@ -211,54 +211,9 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
// check if last frame was x nanoseconds ago, effectively throttling FPS
let frameTime = UInt64(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds * 1_000_000_000.0)
let lastFrameProcessorCallElapsedTime = frameTime - lastFrameProcessorCall
let secondsPerFrame = 1.0 / actualFrameProcessorFps
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
if lastFrameProcessorCallElapsedTime >= UInt64(nanosecondsPerFrame) {
if !isRunningFrameProcessor {
// we're not in the middle of executing the Frame Processor, so prepare for next call.
CameraQueues.frameProcessorQueue.async {
self.isRunningFrameProcessor = true
let perfSample = self.frameProcessorPerformanceDataCollector.beginPerformanceSampleCollection()
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
frameProcessor(frame)
perfSample.endPerformanceSampleCollection()
self.isRunningFrameProcessor = false
}
lastFrameProcessorCall = frameTime
} else {
// we're still in the middle of executing a Frame Processor for a previous frame, so a frame was dropped.
ReactLogger.log(level: .warning, message: "The Frame Processor took so long to execute that a frame was dropped.")
}
}
if isReadyForNewEvaluation {
// last evaluation was more than 1sec ago, evaluate again
evaluateNewPerformanceSamples()
}
}
}
private func evaluateNewPerformanceSamples() {
lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
guard let videoDevice = videoDeviceInput?.device else { return }
guard frameProcessorPerformanceDataCollector.hasEnoughData else { return }
let maxFrameProcessorFps = Double(videoDevice.activeVideoMinFrameDuration.timescale) * Double(videoDevice.activeVideoMinFrameDuration.value)
let averageFps = 1.0 / frameProcessorPerformanceDataCollector.averageExecutionTimeSeconds
let suggestedFrameProcessorFps = max(floor(min(averageFps, maxFrameProcessorFps)), 1)
if frameProcessorFps.intValue == -1 {
// frameProcessorFps="auto"
actualFrameProcessorFps = suggestedFrameProcessorFps
} else {
// frameProcessorFps={someCustomFpsValue}
invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: frameProcessorFps.doubleValue,
suggestedFps: suggestedFrameProcessorFps)
// Call the JavaScript Frame Processor func (worklet)
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
frameProcessor(frame)
}
}
@@ -270,11 +225,6 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
}
private var isReadyForNewEvaluation: Bool {
let lastPerformanceEvaluationElapsedTime = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorPerformanceEvaluation.uptimeNanoseconds
return lastPerformanceEvaluationElapsedTime > 1_000_000_000
}
/**
Gets the orientation of the CameraView's images (CMSampleBuffers).
*/

View File

@@ -52,7 +52,6 @@ public final class CameraView: UIView {
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var frameProcessorFps: NSNumber = -1.0 // "auto"
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
@@ -65,7 +64,6 @@ public final class CameraView: UIView {
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onFrameProcessorPerformanceSuggestionAvailable: RCTDirectEventBlock?
@objc var onViewReady: RCTDirectEventBlock?
// zoom
@objc var enableZoomGesture = false {
@@ -105,13 +103,6 @@ public final class CameraView: UIView {
internal let videoQueue = CameraQueues.videoQueue
internal let audioQueue = CameraQueues.audioQueue
/// Specifies whether the frameProcessor() function is currently executing. used to drop late frames.
internal var isRunningFrameProcessor = false
internal let frameProcessorPerformanceDataCollector = FrameProcessorPerformanceDataCollector()
internal var actualFrameProcessorFps = 30.0
internal var lastSuggestedFrameProcessorFps = 0.0
internal var lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
var isRunning: Bool {
return captureSession.isRunning
@@ -259,18 +250,6 @@ public final class CameraView: UIView {
}
}
}
// Frame Processor FPS Configuration
if changedProps.contains("frameProcessorFps") {
if frameProcessorFps.doubleValue == -1 {
// "auto"
actualFrameProcessorFps = 30.0
} else {
actualFrameProcessorFps = frameProcessorFps.doubleValue
}
lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
frameProcessorPerformanceDataCollector.clear()
}
}
internal final func setTorchMode(_ torchMode: String) {
@@ -343,18 +322,4 @@ public final class CameraView: UIView {
guard let onInitialized = onInitialized else { return }
onInitialized([String: Any]())
}
internal final func invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: Double, suggestedFps: Double) {
ReactLogger.log(level: .info, message: "Frame Processor Performance Suggestion available!")
guard let onFrameProcessorPerformanceSuggestionAvailable = onFrameProcessorPerformanceSuggestionAvailable else { return }
if lastSuggestedFrameProcessorFps == suggestedFps { return }
if suggestedFps == currentFps { return }
onFrameProcessorPerformanceSuggestionAvailable([
"type": suggestedFps > currentFps ? "can-use-higher-fps" : "should-use-lower-fps",
"suggestedFrameProcessorFps": suggestedFps,
])
lastSuggestedFrameProcessorFps = suggestedFps
}
}

View File

@@ -35,7 +35,6 @@ RCT_EXPORT_VIEW_PROPERTY(enableFrameProcessor, BOOL);
// device format
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(frameProcessorFps, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(hdr, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(colorSpace, NSString);
@@ -49,7 +48,6 @@ RCT_EXPORT_VIEW_PROPERTY(orientation, NSString);
// Camera View Events
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onFrameProcessorPerformanceSuggestionAvailable, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onViewReady, RCTDirectEventBlock);
// Camera View Functions

View File

@@ -16,16 +16,12 @@ using namespace facebook;
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
public:
explicit FrameHostObject(Frame* frame): frame(frame) {}
explicit FrameHostObject(Frame* frame): frame(frame) { }
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
void close();
public:
Frame* frame;
private:
void assertIsFrameStrong(jsi::Runtime& runtime, const std::string& accessedPropName);
};

View File

@@ -9,16 +9,21 @@
#import "FrameHostObject.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import "JsiHostObject.h"
#import "JsiSharedValue.h"
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("close")));
// Debugging
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
// Ref Management
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
return result;
}
@@ -26,7 +31,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto name = propName.utf8(runtime);
if (name == "toString") {
auto toString = [this] (jsi::Runtime& runtime, const jsi::Value&, const jsi::Value*, size_t) -> jsi::Value {
auto toString = JSI_HOST_FUNCTION_LAMBDA {
if (this->frame == nil) {
return jsi::String::createFromUtf8(runtime, "[closed frame]");
}
@@ -39,60 +44,55 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
}
if (name == "close") {
auto close = [this] (jsi::Runtime& runtime, const jsi::Value&, const jsi::Value*, size_t) -> jsi::Value {
if (this->frame == nil) {
throw jsi::JSError(runtime, "Trying to close an already closed frame! Did you call frame.close() twice?");
}
this->close();
if (name == "incrementRefCount") {
auto incrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Increment retain count by one so ARC doesn't destroy the Frame Buffer.
CFRetain(frame.buffer);
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "close"), 0, close);
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "incrementRefCount"),
0,
incrementRefCount);
}
if (name == "decrementRefCount") {
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame Buffer.
CFRelease(frame.buffer);
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "decrementRefCount"),
0,
decrementRefCount);
}
if (name == "isValid") {
auto isValid = frame != nil && CMSampleBufferIsValid(frame.buffer);
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
return jsi::Value(isValid);
}
if (name == "width") {
this->assertIsFrameStrong(runtime, name);
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto width = CVPixelBufferGetWidth(imageBuffer);
return jsi::Value((double) width);
}
if (name == "height") {
this->assertIsFrameStrong(runtime, name);
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto height = CVPixelBufferGetHeight(imageBuffer);
return jsi::Value((double) height);
}
if (name == "bytesPerRow") {
this->assertIsFrameStrong(runtime, name);
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
return jsi::Value((double) bytesPerRow);
}
if (name == "planesCount") {
this->assertIsFrameStrong(runtime, name);
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
return jsi::Value((double) planesCount);
}
return jsi::Value::undefined();
}
void FrameHostObject::assertIsFrameStrong(jsi::Runtime &runtime, const std::string &accessedPropName) {
if (frame == nil) {
auto message = "Cannot get `" + accessedPropName + "`, frame is already closed!";
throw jsi::JSError(runtime, message.c_str());
}
}
void FrameHostObject::close() {
if (frame != nil) {
CMSampleBufferInvalidate(frame.buffer);
// ARC will hopefully delete it lol
this->frame = nil;
}
// fallback to base implementation
return HostObject::get(runtime, propName);
}

View File

@@ -1,67 +0,0 @@
//
// FrameProcessorPerformanceDataCollector.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.08.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import Foundation
// keep a maximum of `maxSampleSize` historical performance data samples cached.
private let maxSampleSize = 15
// MARK: - PerformanceSampleCollection
struct PerformanceSampleCollection {
var endPerformanceSampleCollection: () -> Void
init(end: @escaping () -> Void) {
endPerformanceSampleCollection = end
}
}
// MARK: - FrameProcessorPerformanceDataCollector
class FrameProcessorPerformanceDataCollector {
private var performanceSamples: [Double] = []
private var counter = 0
private var lastEvaluation = -1
var hasEnoughData: Bool {
return !performanceSamples.isEmpty
}
var averageExecutionTimeSeconds: Double {
let sum = performanceSamples.reduce(0, +)
let average = sum / Double(performanceSamples.count)
lastEvaluation = counter
return average
}
func beginPerformanceSampleCollection() -> PerformanceSampleCollection {
let begin = DispatchTime.now()
return PerformanceSampleCollection {
let end = DispatchTime.now()
let seconds = Double(end.uptimeNanoseconds - begin.uptimeNanoseconds) / 1_000_000_000.0
let index = self.counter % maxSampleSize
if self.performanceSamples.count > index {
self.performanceSamples[index] = seconds
} else {
self.performanceSamples.append(seconds)
}
self.counter += 1
}
}
func clear() {
counter = 0
performanceSamples.removeAll()
}
}

View File

@@ -22,6 +22,7 @@
#import "JsiWorkletContext.h"
#import "JsiWorkletApi.h"
#import "JsiWorklet.h"
#import "JsiHostObject.h"
#import "FrameProcessorUtils.h"
#import "FrameProcessorCallback.h"
@@ -30,7 +31,7 @@
// Forward declarations for the Swift classes
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues : NSObject
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull frameProcessorQueue;
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView : UIView
@@ -38,6 +39,7 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@end
@implementation FrameProcessorRuntimeManager {
// Running Frame Processors on camera's video thread (synchronously)
std::shared_ptr<RNWorklet::JsiWorkletContext> workletContext;
}
@@ -59,16 +61,15 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
};
auto runOnWorklet = [](std::function<void()>&& f) {
// Run on Frame Processor Worklet Runtime
dispatch_async(CameraQueues.frameProcessorQueue, [f = std::move(f)](){
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)](){
f();
});
};
workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera");
workletContext->initialize("VisionCamera",
&runtime,
runOnJS,
runOnWorklet);
workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
&runtime,
runOnJS,
runOnWorklet);
NSLog(@"FrameProcessorBindings: Worklet Context Created!");
@@ -136,28 +137,17 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
NSLog(@"FrameProcessorBindings: Installing global functions...");
// setFrameProcessor(viewTag: number, frameProcessor: (frame: Frame) => void)
auto setFrameProcessor = [self](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
if (!arguments[0].isNumber()) throw jsi::JSError(runtime, "Camera::setFrameProcessor: First argument ('viewTag') must be a number!");
if (!arguments[1].isObject()) throw jsi::JSError(runtime, "Camera::setFrameProcessor: Second argument ('frameProcessor') must be a function!");
auto viewTag = arguments[0].asNumber();
NSLog(@"FrameProcessorBindings: Converting JSI Function to Worklet...");
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, arguments[1]);
RCTExecuteOnMainQueue([=]() {
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
NSLog(@"FrameProcessorBindings: Converting worklet to Objective-C callback...");
view.frameProcessorCallback = convertWorkletToFrameProcessorCallback(workletContext->getWorkletRuntime(), worklet);
NSLog(@"FrameProcessorBindings: Frame processor set!");
auto callback = convertWorkletToFrameProcessorCallback(self->workletContext->getWorkletRuntime(), worklet);
view.frameProcessorCallback = callback;
});
return jsi::Value::undefined();
@@ -168,12 +158,8 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
setFrameProcessor));
// unsetFrameProcessor(viewTag: number)
auto unsetFrameProcessor = [](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto unsetFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Removing frame processor...");
if (!arguments[0].isNumber()) throw jsi::JSError(runtime, "Camera::unsetFrameProcessor: First argument ('viewTag') must be a number!");
auto viewTag = arguments[0].asNumber();
RCTExecuteOnMainQueue(^{
@@ -185,7 +171,6 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
auto view = static_cast<CameraView*>(anonymousView);
view.frameProcessorCallback = nil;
NSLog(@"FrameProcessorBindings: Frame processor removed!");
});
return jsi::Value::undefined();

View File

@@ -28,13 +28,15 @@ FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runt
// Converts a Worklet to a callable Objective-C block function
return ^(Frame* frame) {
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
try {
// Call JS Frame Processor function with boxed Frame Host Object
// Box the Frame to a JS Host Object
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
jsi::Value jsValue(std::move(argument));
// Call the Worklet with the Frame JS Host Object as an argument
workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
} catch (jsi::JSError& jsError) {
// JS Error occured, print it to console.
auto stack = std::regex_replace(jsError.getStack(), std::regex("\n"), "\n ");
auto message = [NSString stringWithFormat:@"Frame Processor threw an error: %s\nIn: %s", jsError.getMessage().c_str(), stack.c_str()];
@@ -48,11 +50,5 @@ FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runt
NSLog(@"%@", message);
}
}
// Manually free the buffer because:
// 1. we are sure we don't need it anymore, the frame processor worklet has finished executing.
// 2. we don't know when the JS runtime garbage collects this object, it might be holding it for a few more frames
// which then blocks the camera queue from pushing new frames (memory limit)
frameHostObject->close();
};
}

View File

@@ -138,7 +138,6 @@
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8948BDF26DCEE2B00B430E2 /* FrameProcessorPerformanceDataCollector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameProcessorPerformanceDataCollector.swift; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
@@ -273,7 +272,6 @@
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B80416F026AB16E8000DEB6A /* VisionCameraScheduler.mm */,
B80416F126AB16F3000DEB6A /* VisionCameraScheduler.h */,
B8948BDF26DCEE2B00B430E2 /* FrameProcessorPerformanceDataCollector.swift */,
);
path = "Frame Processor";
sourceTree = "<group>";