feat: Add zero-copy SharedArray type to Frame Processor Plugins (#2383)

* feat: Create `TypedArray` class for Frame Processor Plugins

* Type

* feat: Pass `VisionCameraProxy` along (BREAKING)

* feat: Finish implementation

* Log a bit

* feat: Successfully convert JSI <> JNI buffers

* Wrap buffer

* fix: Fix using wrong Runtime

* feat: Add docs

* add zero copy example

* Format C++

* Create iOS base

* feat: Finish iOS implementation

* chore: Format

* fix: Use `NSData` instead of `NSMutableData`

* Format

* fix: Fix build when Frame Processors are disabled

* chore: Rename `TypedArray` to `SharedArray`

* fix: Fix Swift typings for Array

* Remove a few default inits

* fix: Fix Android build

* fix: Use `NSInteger`

* Update SharedArray.mm

* fix: Expose bytes directly on iOS (NSData was immutable)
This commit is contained in:
Marc Rousavy 2024-01-12 16:00:36 +01:00 committed by GitHub
parent 56cecaa814
commit 29fe98cc44
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 491 additions and 65 deletions

View File

@ -44,6 +44,7 @@ Similar to a TurboModule, the Frame Processor Plugin Registry API automatically
| `{}` | `NSDictionary*` | `Map<String, Object>` |
| `undefined` / `null` | `nil` | `null` |
| `(any, any) => void` | [`RCTResponseSenderBlock`][4] | `(Object, Object) -> void` |
| `ArrayBuffer` | [`SharedArray`][7] | [`SharedArray`][8] |
| [`Frame`][1] | [`Frame*`][2] | [`Frame`][3] |
### Return values
@ -207,3 +208,5 @@ Your Frame Processor Plugins have to be fast. Use the FPS Graph (`enableFpsGraph
[4]: https://github.com/facebook/react-native/blob/9a43eac7a32a6ba3164a048960101022a92fcd5a/React/Base/RCTBridgeModule.h#L20-L24
[5]: https://developer.apple.com/documentation/coremedia/cmsamplebuffer
[6]: https://developer.android.com/reference/androidx/camera/core/ImageProxy
[7]: https://github.com/mrousavy/react-native-vision-camera/blob/main/package/ios/Frame%20Processor/SharedArray.h
[8]: https://github.com/mrousavy/react-native-vision-camera/blob/main/package/android/src/main/java/com/mrousavy/camera/frameprocessor/SharedArray.java

View File

@ -60,9 +60,10 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.mrousavy.camera.frameprocessor.Frame;
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin;
import com.mrousavy.camera.frameprocessor.VisionCameraProxy;
public class FaceDetectorFrameProcessorPlugin extends FrameProcessorPlugin {
FaceDetectorFrameProcessorPlugin(@Nullable Map<String, Object> options) {}
FaceDetectorFrameProcessorPlugin(@NonNull VisionCameraProxy proxy, @Nullable Map<String, Object> options) {}
@Nullable
@Override
@ -89,7 +90,7 @@ import com.mrousavy.camera.frameprocessor.FrameProcessorPluginRegistry;
public class FaceDetectorFrameProcessorPluginPackage implements ReactPackage {
// highlight-start
static {
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces", options -> new FaceDetectorFrameProcessorPlugin(options));
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces", FaceDetectorFrameProcessorPlugin::new);
}
// highlight-end
@ -135,8 +136,9 @@ The Frame Processor Plugin will be exposed to JS through the `VisionCameraProxy`
```kotlin
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
class FaceDetectorFrameProcessorPlugin(options: Map<String, Any>?): FrameProcessorPlugin(options) {
class FaceDetectorFrameProcessorPlugin(proxy: VisionCameraProxy, options: Map<String, Any>?): FrameProcessorPlugin() {
override fun callback(frame: Frame, arguments: Map<String, Any>?): Any? {
// highlight-next-line
@ -161,8 +163,8 @@ class FaceDetectorFrameProcessorPluginPackage : ReactPackage {
// highlight-start
companion object {
init {
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces") { options ->
FaceDetectorFrameProcessorPlugin(options)
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces") { proxy, options ->
FaceDetectorFrameProcessorPlugin(proxy, options)
}
}
}

View File

@ -51,8 +51,9 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
@implementation FaceDetectorFrameProcessorPlugin
- (instancetype) initWithOptions:(NSDictionary* _Nullable)options; {
self = [super initWithOptions:options];
- (instancetype) initWithProxy:(VisionCameraProxyHolder*)proxy
withOptions:(NSDictionary* _Nullable)options {
self = [super initWithProxy:proxy options:options];
return self;
}

View File

@ -54,6 +54,7 @@ Pod::Spec.new do |s|
hasWorklets ? "ios/Frame Processor/FrameProcessor.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorPlugin.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorPluginRegistry.h" : "",
hasWorklets ? "ios/Frame Processor/SharedArray.h" : "",
hasWorklets ? "ios/Frame Processor/VisionCameraProxy.h" : "",
hasWorklets ? "cpp/**/*.{cpp}" : "",
]

View File

@ -34,6 +34,7 @@ add_library(
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/JSIJNIConversion.cpp
src/main/cpp/frameprocessor/VisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp
src/main/cpp/frameprocessor/java-bindings/JFrame.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp

View File

@ -1,4 +1,5 @@
#include "JFrameProcessor.h"
#include "JSharedArray.h"
#include "JVisionCameraProxy.h"
#include "JVisionCameraScheduler.h"
#include "VideoPipeline.h"
@ -14,6 +15,7 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives();
vision::JSharedArray::registerNatives();
#endif
});
}

View File

@ -15,6 +15,8 @@
#include "FrameHostObject.h"
#include "JFrame.h"
#include "JSITypedArray.h"
#include "JSharedArray.h"
namespace vision {
@ -56,8 +58,24 @@ jni::local_ref<jobject> JSIJNIConversion::convertJSIValueToJNIObject(jsi::Runtim
arrayList->add(jniItem);
}
return arrayList;
} else if (valueAsObject.isArrayBuffer(runtime)) {
// ArrayBuffer/TypedArray
TypedArrayBase array = getTypedArray(runtime, valueAsObject);
return JSharedArray::create(runtime, std::move(array));
} else if (valueAsObject.isHostObject(runtime)) {
throw std::runtime_error("You can't pass HostObjects here.");
if (valueAsObject.isHostObject<FrameHostObject>(runtime)) {
// Frame
auto frame = valueAsObject.getHostObject<FrameHostObject>(runtime);
return jni::make_local(frame->frame);
} else {
throw std::runtime_error("The given HostObject is not supported by a Frame Processor Plugin.");
}
} else {
// Map<String, Object>
@ -75,7 +93,7 @@ jni::local_ref<jobject> JSIJNIConversion::convertJSIValueToJNIObject(jsi::Runtim
}
} else {
auto stringRepresentation = value.toString(runtime).utf8(runtime);
throw std::runtime_error("Failed to convert jsi::Value to JNI value - unsupported type!" + stringRepresentation);
throw std::runtime_error("Failed to convert jsi::Value to JNI value - unsupported type! " + stringRepresentation);
}
}
@ -154,6 +172,12 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime& runtime, c
// box into HostObject
auto hostObject = std::make_shared<FrameHostObject>(frame);
return jsi::Object::createFromHostObject(runtime, hostObject);
} else if (object->isInstanceOf(JSharedArray::javaClassStatic())) {
// SharedArray
auto sharedArray = static_ref_cast<JSharedArray::javaobject>(object);
std::shared_ptr<TypedArrayBase> array = sharedArray->cthis()->getTypedArray();
return array->getBuffer(runtime);
}
auto type = object->getClass()->toString();

View File

@ -0,0 +1,69 @@
//
// Created by Marc Rousavy on 12.01.24.
//
#include "JSharedArray.h"
#include <android/log.h>
namespace vision {
using namespace facebook;
TypedArrayKind getTypedArrayKind(int unsafeEnumValue) {
return static_cast<TypedArrayKind>(unsafeEnumValue);
}
jni::local_ref<JSharedArray::javaobject> JSharedArray::create(jsi::Runtime& runtime, TypedArrayBase array) {
return newObjectCxxArgs(runtime, std::make_shared<TypedArrayBase>(std::move(array)));
}
jni::global_ref<jni::JByteBuffer> JSharedArray::wrapInByteBuffer(jsi::Runtime& runtime, std::shared_ptr<TypedArrayBase> typedArray) {
jsi::ArrayBuffer arrayBuffer = typedArray->getBuffer(runtime);
__android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping ArrayBuffer in a JNI ByteBuffer...");
auto byteBuffer = jni::JByteBuffer::wrapBytes(arrayBuffer.data(runtime), arrayBuffer.size(runtime));
__android_log_print(ANDROID_LOG_INFO, TAG, "Successfully created TypedArray (JNI Size: %i)!", byteBuffer->getDirectSize());
return jni::make_global(byteBuffer);
}
JSharedArray::JSharedArray(jsi::Runtime& runtime, std::shared_ptr<TypedArrayBase> array) {
_array = array;
_byteBuffer = wrapInByteBuffer(runtime, _array);
}
JSharedArray::JSharedArray(const jni::alias_ref<JSharedArray::jhybridobject>& javaThis,
const jni::alias_ref<JVisionCameraProxy::javaobject>& proxy, int dataType, int size) {
_javaPart = jni::make_global(javaThis);
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
jsi::Runtime& runtime = proxy->cthis()->getWorkletRuntime();
#else
jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime();
#endif
TypedArrayKind kind = getTypedArrayKind(dataType);
__android_log_print(ANDROID_LOG_INFO, TAG, "Allocating ArrayBuffer with size %i and type %i...", size, dataType);
_array = std::make_shared<TypedArrayBase>(runtime, size, kind);
_byteBuffer = wrapInByteBuffer(runtime, _array);
}
void JSharedArray::registerNatives() {
registerHybrid({
makeNativeMethod("initHybrid", JSharedArray::initHybrid),
makeNativeMethod("getByteBuffer", JSharedArray::getByteBuffer),
});
}
jni::local_ref<jni::JByteBuffer> JSharedArray::getByteBuffer() {
return jni::make_local(_byteBuffer);
}
std::shared_ptr<TypedArrayBase> JSharedArray::getTypedArray() {
return _array;
}
jni::local_ref<JSharedArray::jhybriddata> JSharedArray::initHybrid(jni::alias_ref<jhybridobject> javaThis,
jni::alias_ref<JVisionCameraProxy::javaobject> proxy, jint type,
jint size) {
return makeCxxInstance(javaThis, proxy, type, size);
}
} // namespace vision

View File

@ -0,0 +1,47 @@
//
// Created by Marc Rousavy on 12.01.24.
//
#pragma once
#include "JSITypedArray.h"
#include "JVisionCameraProxy.h"
#include <fbjni/ByteBuffer.h>
#include <fbjni/fbjni.h>
#include <jni.h>
namespace vision {
using namespace facebook;
class JSharedArray : public jni::HybridClass<JSharedArray> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/TypedArray;";
static void registerNatives();
public:
static jni::local_ref<JSharedArray::javaobject> create(jsi::Runtime& runtime, TypedArrayBase array);
public:
jni::local_ref<jni::JByteBuffer> getByteBuffer();
std::shared_ptr<TypedArrayBase> getTypedArray();
private:
jni::global_ref<jni::JByteBuffer> wrapInByteBuffer(jsi::Runtime& runtime, std::shared_ptr<TypedArrayBase> typedArray);
private:
static auto constexpr TAG = "TypedArray";
friend HybridBase;
jni::global_ref<javaobject> _javaPart;
jni::global_ref<jni::JByteBuffer> _byteBuffer;
std::shared_ptr<TypedArrayBase> _array;
private:
explicit JSharedArray(jsi::Runtime& runtime, std::shared_ptr<TypedArrayBase> array);
explicit JSharedArray(const jni::alias_ref<jhybridobject>& javaThis, const jni::alias_ref<JVisionCameraProxy::javaobject>& proxy,
int dataType, int size);
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> javaThis,
jni::alias_ref<JVisionCameraProxy::javaobject> proxy, jint dataType, jint size);
};
} // namespace vision

View File

@ -37,6 +37,12 @@ public:
return _runtime;
}
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
jsi::Runtime& getWorkletRuntime() {
return _workletContext->getWorkletRuntime();
}
#endif
private:
friend HybridBase;
jni::global_ref<JVisionCameraProxy::javaobject> _javaPart;

View File

@ -23,9 +23,8 @@ public abstract class FrameProcessorPlugin {
* The initializer of this Frame Processor Plugin.
* This is called everytime this Frame Processor Plugin is loaded from the JS side (`initFrameProcessorPlugin(..)`).
* Optionally override this method to implement custom initialization logic.
* @param options An options dictionary passed from the JS side, or null if none.
*/
public FrameProcessorPlugin(@Nullable Map<String, Object> options) {}
public FrameProcessorPlugin() { }
/**
* The actual Frame Processor Plugin's implementation that runs when `plugin.call(..)` is called in the JS Frame Processor.

View File

@ -2,6 +2,7 @@ package com.mrousavy.camera.frameprocessor;
import android.util.Log;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.facebook.proguard.annotations.DoNotStrip;
import java.util.Map;
@ -24,7 +25,7 @@ public class FrameProcessorPluginRegistry {
@DoNotStrip
@Keep
public static FrameProcessorPlugin getPlugin(String name, Map<String, Object> options) {
public static FrameProcessorPlugin getPlugin(String name, VisionCameraProxy proxy, Map<String, Object> options) {
Log.i(TAG, "Looking up Frame Processor Plugin \"" + name + "\"...");
PluginInitializer initializer = Plugins.get(name);
if (initializer == null) {
@ -32,10 +33,10 @@ public class FrameProcessorPluginRegistry {
return null;
}
Log.i(TAG, "Frame Processor Plugin \"" + name + "\" found! Initializing...");
return initializer.initializePlugin(options);
return initializer.initializePlugin(proxy, options);
}
public interface PluginInitializer {
FrameProcessorPlugin initializePlugin(@Nullable Map<String, Object> options);
FrameProcessorPlugin initializePlugin(@NonNull VisionCameraProxy proxy, @Nullable Map<String, Object> options);
}
}

View File

@ -0,0 +1,59 @@
package com.mrousavy.camera.frameprocessor;
import androidx.annotation.Keep;
import com.facebook.jni.HybridData;
import com.facebook.proguard.annotations.DoNotStrip;
import java.nio.ByteBuffer;
/**
* A JSI TypedArray/ArrayBuffer implementation used for passing buffers between JS and Native without copying data.
* ByteBuffers are used for efficient data transfer.
*
* @noinspection JavaJniMissingFunction
*/
public final class SharedArray {
@DoNotStrip
@Keep
private final HybridData mHybridData;
@DoNotStrip
@Keep
public SharedArray(HybridData hybridData) {
mHybridData = hybridData;
}
/**
* Allocate a new SharedArray. Use `getByteBuffer` to obtain a reference to the direct ByteBuffer for writing.
* @param proxy The VisionCamera Proxy from the Frame Processor Plugin's initializer.
* @param dataType The ArrayBuffer's data type. `Type.Int8Array` = `Int8Array` in JS
* @param size The size of the ArrayBuffer.
*/
public SharedArray(VisionCameraProxy proxy, Type dataType, int size) {
mHybridData = initHybrid(proxy, dataType.ordinal(), size);
}
/**
* Gets the direct ByteBuffer that can be used to directly update the JSI ArrayBuffer.
*/
public native ByteBuffer getByteBuffer();
private native HybridData initHybrid(VisionCameraProxy proxy, int dataType, int size);
/**
* The Type of the SharedArray.
*/
public enum Type {
// Values start at 0 and need to match with JSITypedArray.h::TypedArrayKind
Int8Array,
Int16Array,
Int32Array,
Uint8Array,
Uint8ClampedArray,
Uint16Array,
Uint32Array,
Float32Array,
Float64Array,
}
}

View File

@ -72,7 +72,7 @@ class VisionCameraProxy(context: ReactApplicationContext) {
@DoNotStrip
@Keep
fun initFrameProcessorPlugin(name: String, options: Map<String, Any>): FrameProcessorPlugin =
FrameProcessorPluginRegistry.getPlugin(name, options)
FrameProcessorPluginRegistry.getPlugin(name, this, options)
// private C++ funcs
private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData

View File

@ -5,15 +5,21 @@ import android.util.Log;
import com.mrousavy.camera.frameprocessor.Frame;
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin;
import com.mrousavy.camera.frameprocessor.SharedArray;
import com.mrousavy.camera.frameprocessor.VisionCameraProxy;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ExampleFrameProcessorPlugin extends FrameProcessorPlugin {
SharedArray _sharedArray;
@Override
public Object callback(@NotNull Frame frame, @Nullable Map<String, Object> params) {
if (params == null) return null;
@ -37,11 +43,17 @@ public class ExampleFrameProcessorPlugin extends FrameProcessorPlugin {
array.add(17.38);
map.put("example_array", array);
ByteBuffer byteBuffer = _sharedArray.getByteBuffer();
byteBuffer.put(0, (byte)(Math.random() * 100));
map.put("example_array_buffer", _sharedArray);
return map;
}
ExampleFrameProcessorPlugin(@Nullable Map<String, Object> options) {
super(options);
ExampleFrameProcessorPlugin(VisionCameraProxy proxy, @Nullable Map<String, Object> options) {
super();
_sharedArray = new SharedArray(proxy, SharedArray.Type.Uint8Array, 5);
Log.d("ExamplePlugin", "ExampleFrameProcessorPlugin initialized with options: " + options);
}
}

View File

@ -3,8 +3,9 @@ package com.mrousavy.camera.example
import android.util.Log
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
class ExampleKotlinFrameProcessorPlugin(options: Map<String, Any>?): FrameProcessorPlugin(options) {
class ExampleKotlinFrameProcessorPlugin(proxy: VisionCameraProxy, options: Map<String, Any>?): FrameProcessorPlugin() {
init {
Log.d("ExampleKotlinPlugin", "ExampleKotlinFrameProcessorPlugin initialized with options: " + options?.toString())
}

View File

@ -20,8 +20,8 @@ import com.mrousavy.camera.frameprocessor.FrameProcessorPluginRegistry;
public class MainApplication extends Application implements ReactApplication {
// Register the Frame Processor Plugins for our app
static {
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_plugin", options -> new ExampleFrameProcessorPlugin(options));
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_kotlin_swift_plugin", options -> new ExampleKotlinFrameProcessorPlugin(options));
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_plugin", ExampleFrameProcessorPlugin::new);
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_kotlin_swift_plugin", ExampleKotlinFrameProcessorPlugin::new);
}
private final ReactNativeHost mReactNativeHost =
@ -33,7 +33,6 @@ public class MainApplication extends Application implements ReactApplication {
@Override
protected List<ReactPackage> getPackages() {
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for VisionCameraExample:
packages.add(new CameraPackage());

View File

@ -10,18 +10,25 @@
#import <VisionCamera/FrameProcessorPlugin.h>
#import <VisionCamera/FrameProcessorPluginRegistry.h>
#import <VisionCamera/Frame.h>
#import <VisionCamera/SharedArray.h>
// Example for an Objective-C Frame Processor plugin
@interface ExampleFrameProcessorPlugin : FrameProcessorPlugin
@end
@implementation ExampleFrameProcessorPlugin
@implementation ExampleFrameProcessorPlugin {
SharedArray* _sharedArray;
}
- (instancetype)initWithOptions:(NSDictionary* _Nullable)options
{
self = [super initWithOptions:options];
NSLog(@"ExampleFrameProcessorPlugin initialized with options: %@", options);
- (instancetype)initWithProxy:(VisionCameraProxyHolder*)proxy
withOptions:(NSDictionary* _Nullable)options {
if (self = [super initWithProxy:proxy withOptions:options]) {
_sharedArray = [[SharedArray alloc] initWithProxy:proxy
type:Int8Array
size:5];
NSLog(@"ExampleFrameProcessorPlugin initialized with options: %@", options);
}
return self;
}
@ -33,6 +40,9 @@
NSLog(@"ExamplePlugin: -> %@ (%@)", param == nil ? @"(nil)" : [param description], NSStringFromClass([param classForCoder]));
}
uint8_t* data = _sharedArray.data;
data[0] = (uint8_t)(random() * 100);
return @{
@"example_str": @"Test",
@"example_bool": @(YES),
@ -41,7 +51,8 @@
@"Hello",
@(YES),
@17.38
]
],
@"example_array_buffer": _sharedArray
};
}

View File

@ -11,8 +11,8 @@ import VisionCamera
// Example for a Swift Frame Processor plugin
@objc(ExampleSwiftFrameProcessorPlugin)
public class ExampleSwiftFrameProcessorPlugin: FrameProcessorPlugin {
public override init(options: [AnyHashable: Any]! = [:]) {
super.init(options: options)
public override init(proxy: VisionCameraProxyHolder, options: [AnyHashable: Any]! = [:]) {
super.init(proxy: proxy, options: options)
print("ExampleSwiftFrameProcessorPlugin initialized with options: \(String(describing: options))")
}

View File

@ -484,7 +484,7 @@ PODS:
- libwebp (~> 1.0)
- SDWebImage/Core (~> 5.10)
- SocketRocket (0.6.1)
- VisionCamera (3.6.17):
- VisionCamera (3.7.1):
- React
- React-callinvoker
- React-Core
@ -724,9 +724,9 @@ SPEC CHECKSUMS:
SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d
SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
VisionCamera: 361df29347b7b7ecc47b3d173daa17751a11ffc1
VisionCamera: ac444079a315b38ec664cf77ed548c384554f0ca
Yoga: 4c3aa327e4a6a23eeacd71f61c81df1bcdf677d5
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
COCOAPODS: 1.11.3
COCOAPODS: 1.14.3

View File

@ -4,6 +4,7 @@ const plugin = VisionCameraProxy.initFrameProcessorPlugin('example_plugin')
interface Result {
example_array: (string | number | boolean)[]
example_array_buffer: ArrayBuffer
example_str: string
example_bool: boolean
example_double: number

View File

@ -19,5 +19,6 @@
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import "Frame.h"
#import "FrameProcessor.h"
#import "SharedArray.h"
#import "VisionCameraProxy.h"
#endif

View File

@ -16,6 +16,8 @@
- (instancetype _Nonnull)initWithBuffer:(CMSampleBufferRef _Nonnull)buffer orientation:(UIImageOrientation)orientation;
- (instancetype)init NS_UNAVAILABLE;
@property(nonatomic, readonly) CMSampleBufferRef _Nonnull buffer;
@property(nonatomic, readonly) UIImageOrientation orientation;

View File

@ -21,6 +21,8 @@
@interface FrameProcessor : NSObject
- (instancetype)init NS_UNAVAILABLE;
#ifdef __cplusplus
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context;

View File

@ -9,6 +9,7 @@
#pragma once
#import "Frame.h"
#import "VisionCameraProxy.h"
#import <Foundation/Foundation.h>
/**
@ -28,9 +29,13 @@
* This is called everytime this Frame Processor Plugin is loaded from the JS side (`initFrameProcessorPlugin(..)`).
* Optionally override this method to implement custom initialization logic.
* - Parameters:
* - proxy: The VisionCameraProxy instance for using the Frame Processor Context, e.g. to initialize SharedArrays.
* - options: An options dictionary passed from the JS side, or `nil` if none.
*/
- (instancetype _Nonnull)initWithOptions:(NSDictionary* _Nullable)options;
- (instancetype _Nonnull)initWithProxy:(VisionCameraProxyHolder* _Nonnull)proxy
withOptions:(NSDictionary* _Nullable)options NS_SWIFT_NAME(init(proxy:options:));
- (instancetype)init NS_UNAVAILABLE;
/**
* The actual Frame Processor Plugin's implementation that runs when `plugin.call(..)` is called in the JS Frame Processor.
@ -52,10 +57,11 @@
#define VISION_EXPORT_FRAME_PROCESSOR(frame_processor_class, frame_processor_plugin_name) \
+(void)load { \
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@ #frame_processor_plugin_name \
withInitializer:^FrameProcessorPlugin*(NSDictionary* _Nullable options) { \
return [[frame_processor_class alloc] initWithOptions:options]; \
}]; \
[FrameProcessorPluginRegistry \
addFrameProcessorPlugin:@ #frame_processor_plugin_name \
withInitializer:^FrameProcessorPlugin*(VisionCameraProxyHolder* _Nonnull proxy, NSDictionary* _Nullable options) { \
return [[frame_processor_class alloc] initWithProxy:proxy withOptions:options]; \
}]; \
}
#define VISION_EXPORT_SWIFT_FRAME_PROCESSOR(frame_processor_class, frame_processor_plugin_name) \
@ -67,8 +73,9 @@
\
__attribute__((constructor)) static void VISION_CONCAT(initialize_, frame_processor_plugin_name)(void) { \
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@ #frame_processor_plugin_name \
withInitializer:^FrameProcessorPlugin* _Nonnull(NSDictionary* _Nullable options) { \
return [[frame_processor_class alloc] initWithOptions:options]; \
withInitializer:^FrameProcessorPlugin* _Nonnull(VisionCameraProxyHolder* _Nonnull proxy, \
NSDictionary* _Nullable options) { \
return [[frame_processor_class alloc] initWithProxy:proxy withOptions:options]; \
}]; \
} \
\

View File

@ -11,7 +11,7 @@
// Base implementation (empty)
@implementation FrameProcessorPlugin
- (instancetype)initWithOptions:(NSDictionary* _Nullable)options {
- (instancetype)initWithProxy:(VisionCameraProxyHolder* _Nonnull)proxy withOptions:(NSDictionary* _Nullable)options {
self = [super init];
return self;
}

View File

@ -10,14 +10,18 @@
#import "Frame.h"
#import "FrameProcessorPlugin.h"
#import "VisionCameraProxy.h"
#import <Foundation/Foundation.h>
@interface FrameProcessorPluginRegistry : NSObject
typedef FrameProcessorPlugin* _Nonnull (^PluginInitializerFunction)(NSDictionary* _Nullable options);
typedef FrameProcessorPlugin* _Nonnull (^PluginInitializerFunction)(VisionCameraProxyHolder* _Nonnull proxy,
NSDictionary* _Nullable options);
+ (void)addFrameProcessorPlugin:(NSString* _Nonnull)name withInitializer:(PluginInitializerFunction _Nonnull)pluginInitializer;
+ (FrameProcessorPlugin* _Nullable)getPlugin:(NSString* _Nonnull)name withOptions:(NSDictionary* _Nullable)options;
+ (FrameProcessorPlugin* _Nullable)getPlugin:(NSString* _Nonnull)name
withProxy:(VisionCameraProxyHolder* _Nonnull)proxy
withOptions:(NSDictionary* _Nullable)options;
@end

View File

@ -31,7 +31,9 @@
NSLog(@"Successfully registered Frame Processor Plugin \"%@\"!", name);
}
+ (FrameProcessorPlugin*)getPlugin:(NSString* _Nonnull)name withOptions:(NSDictionary* _Nullable)options {
+ (FrameProcessorPlugin*)getPlugin:(NSString* _Nonnull)name
withProxy:(VisionCameraProxyHolder* _Nonnull)proxy
withOptions:(NSDictionary* _Nullable)options {
NSLog(@"Looking up Frame Processor Plugin \"%@\"...", name);
PluginInitializerFunction initializer = [[FrameProcessorPluginRegistry frameProcessorPlugins] objectForKey:name];
if (initializer == nil) {
@ -40,7 +42,7 @@
}
NSLog(@"Frame Processor Plugin \"%@\" found! Initializing...", name);
return initializer(options);
return initializer(proxy, options);
}
@end

View File

@ -8,6 +8,7 @@
#pragma once
#import "../Frame Processor/SharedArray.h"
#import <React/RCTBridgeModule.h>
#import <ReactCommon/CallInvoker.h>
#import <jsi/jsi.h>
@ -32,6 +33,9 @@ jsi::Object convertNSDictionaryToJSIObject(jsi::Runtime& runtime, NSDictionary*
// NSArray -> []
jsi::Array convertNSArrayToJSIArray(jsi::Runtime& runtime, NSArray* value);
// SharedArray -> ArrayBuffer
jsi::Object convertSharedArrayToJSIArrayBuffer(jsi::Runtime& runtime, SharedArray* sharedArray);
// id -> ???
jsi::Value convertObjCObjectToJSIValue(jsi::Runtime& runtime, id value);

View File

@ -18,6 +18,8 @@
#import "JSINSObjectConversion.h"
#import "../Frame Processor/Frame.h"
#import "../Frame Processor/FrameHostObject.h"
#import "../Frame Processor/SharedArray.h"
#import "JSITypedArray.h"
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
#import <ReactCommon/CallInvoker.h>
@ -58,6 +60,11 @@ jsi::Array convertNSArrayToJSIArray(jsi::Runtime& runtime, NSArray* value) {
return result;
}
jsi::Object convertSharedArrayToJSIArrayBuffer(jsi::Runtime& runtime, SharedArray* sharedArray) {
std::shared_ptr<vision::TypedArrayBase> array = sharedArray.typedArray;
return array->getBuffer(runtime);
}
jsi::Value convertObjCObjectToJSIValue(jsi::Runtime& runtime, id value) {
if (value == nil) {
return jsi::Value::undefined();
@ -77,6 +84,8 @@ jsi::Value convertObjCObjectToJSIValue(jsi::Runtime& runtime, id value) {
} else if ([value isKindOfClass:[Frame class]]) {
auto frameHostObject = std::make_shared<FrameHostObject>((Frame*)value);
return jsi::Object::createFromHostObject(runtime, frameHostObject);
} else if ([value isKindOfClass:[SharedArray class]]) {
return convertSharedArrayToJSIArrayBuffer(runtime, (SharedArray*)value);
}
return jsi::Value::undefined();
}
@ -132,36 +141,46 @@ NSDictionary* convertJSIObjectToNSDictionary(jsi::Runtime& runtime, const jsi::O
id convertJSIValueToObjCObject(jsi::Runtime& runtime, const jsi::Value& value, std::shared_ptr<CallInvoker> jsInvoker) {
if (value.isUndefined() || value.isNull()) {
// undefined/null
return nil;
}
if (value.isBool()) {
} else if (value.isBool()) {
// bool
return @(value.getBool());
}
if (value.isNumber()) {
} else if (value.isNumber()) {
// number
return @(value.getNumber());
}
if (value.isString()) {
} else if (value.isString()) {
// string
return convertJSIStringToNSString(runtime, value.getString(runtime));
}
if (value.isObject()) {
} else if (value.isObject()) {
// object
jsi::Object o = value.getObject(runtime);
if (o.isArray(runtime)) {
// array[]
return convertJSIArrayToNSArray(runtime, o.getArray(runtime), jsInvoker);
}
if (o.isFunction(runtime)) {
} else if (o.isFunction(runtime)) {
// function () => {}
return convertJSIFunctionToCallback(runtime, std::move(o.getFunction(runtime)), jsInvoker);
}
if (o.isHostObject(runtime)) {
auto hostObject = o.asHostObject(runtime);
auto frame = dynamic_cast<FrameHostObject*>(hostObject.get());
if (frame != nullptr) {
return frame->frame;
} else if (o.isHostObject(runtime)) {
if (o.isHostObject<FrameHostObject>(runtime)) {
// Frame
auto hostObject = o.getHostObject<FrameHostObject>(runtime);
return hostObject->frame;
} else {
throw std::runtime_error("The given HostObject is not supported by a Frame Processor Plugin!");
}
} else if (o.isArrayBuffer(runtime)) {
// ArrayBuffer
auto typedArray = std::make_shared<vision::TypedArrayBase>(vision::getTypedArray(runtime, o));
return [[SharedArray alloc] initWithRuntime:runtime typedArray:typedArray];
} else {
// object
return convertJSIObjectToNSDictionary(runtime, o, jsInvoker);
}
return convertJSIObjectToNSDictionary(runtime, o, jsInvoker);
}
throw std::runtime_error("Unsupported jsi::jsi::Value kind");
auto stringRepresentation = value.toString(runtime).utf8(runtime);
throw std::runtime_error("Failed to convert jsi::Value to JNI value - unsupported type! " + stringRepresentation);
}
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime& runtime, const jsi::Function& value,

View File

@ -0,0 +1,48 @@
//
// SharedArray.h
// VisionCamera
//
// Created by Marc Rousavy on 12.01.24.
// Copyright © 2024 mrousavy. All rights reserved.
//
#pragma once
#import "VisionCameraProxy.h"
#import <Foundation/Foundation.h>
#ifdef __cplusplus
#import "JSITypedArray.h"
#import <jsi/jsi.h>
using namespace facebook;
#endif
// Needs to be in sync with JSITypedArray.h as the index is used
typedef NS_ENUM(NSInteger, SharedArrayType) {
Int8Array,
Int16Array,
Int32Array,
Uint8Array,
Uint8ClampedArray,
Uint16Array,
Uint32Array,
Float32Array,
Float64Array,
};
@interface SharedArray : NSObject
- (instancetype _Nonnull)init NS_UNAVAILABLE;
- (instancetype _Nonnull)initWithProxy:(VisionCameraProxyHolder* _Nonnull)proxy type:(SharedArrayType)type size:(NSInteger)size;
#ifdef __cplusplus
- (instancetype _Nonnull)initWithRuntime:(jsi::Runtime&)runtime typedArray:(std::shared_ptr<vision::TypedArrayBase>)typedArray;
- (std::shared_ptr<vision::TypedArrayBase>)typedArray;
#endif
@property(nonatomic, readonly, nonnull) uint8_t* data;
@property(nonatomic, readonly) NSInteger count;
@end

View File

@ -0,0 +1,58 @@
//
// SharedArray.mm
// VisionCamera
//
// Created by Marc Rousavy on 12.01.24.
// Copyright © 2024 mrousavy. All rights reserved.
//
#import "SharedArray.h"
#import "JSITypedArray.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
using namespace facebook;
@implementation SharedArray {
uint8_t* _data;
NSInteger _count;
std::shared_ptr<vision::TypedArrayBase> _array;
}
vision::TypedArrayKind getTypedArrayKind(int unsafeEnumValue) {
return static_cast<vision::TypedArrayKind>(unsafeEnumValue);
}
- (instancetype)initWithProxy:(VisionCameraProxyHolder*)proxy type:(SharedArrayType)type size:(NSInteger)size {
if (self = [super init]) {
jsi::Runtime& runtime = proxy.proxy->getWorkletRuntime();
vision::TypedArrayKind kind = getTypedArrayKind((int)type);
_array = std::make_shared<vision::TypedArrayBase>(vision::TypedArrayBase(runtime, size, kind));
_data = _array->getBuffer(runtime).data(runtime);
_count = size;
}
return self;
}
- (instancetype)initWithRuntime:(jsi::Runtime&)runtime typedArray:(std::shared_ptr<vision::TypedArrayBase>)typedArray {
if (self = [super init]) {
_array = typedArray;
_data = _array->getBuffer(runtime).data(runtime);
_count = _array->getBuffer(runtime).size(runtime);
}
return self;
}
- (std::shared_ptr<vision::TypedArrayBase>)typedArray {
return _array;
}
- (uint8_t*)data {
return _data;
}
- (NSInteger)count {
return _count;
}
@end

View File

@ -27,6 +27,10 @@ public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
jsi::Runtime& getWorkletRuntime() {
return _workletContext->getWorkletRuntime();
}
private:
void setFrameProcessor(jsi::Runtime& runtime, int viewTag, const jsi::Object& frameProcessor);
void removeFrameProcessor(jsi::Runtime& runtime, int viewTag);
@ -38,6 +42,18 @@ private:
};
#endif
@interface VisionCameraInstaller : NSObject
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge;
@interface VisionCameraProxyHolder : NSObject
- (_Nonnull instancetype)initWithProxy:(void* _Nonnull)proxy;
#ifdef __cplusplus
- (VisionCameraProxy* _Nonnull)proxy;
#endif
@end
@interface VisionCameraInstaller : NSObject
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge;
@end

View File

@ -97,7 +97,8 @@ void VisionCameraProxy::removeFrameProcessor(jsi::Runtime& runtime, int viewTag)
jsi::Value VisionCameraProxy::initFrameProcessorPlugin(jsi::Runtime& runtime, std::string name, const jsi::Object& options) {
NSString* key = [NSString stringWithUTF8String:name.c_str()];
NSDictionary* optionsObjc = JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, options, _callInvoker);
FrameProcessorPlugin* plugin = [FrameProcessorPluginRegistry getPlugin:key withOptions:optionsObjc];
VisionCameraProxyHolder* proxy = [[VisionCameraProxyHolder alloc] initWithProxy:this];
FrameProcessorPlugin* plugin = [FrameProcessorPluginRegistry getPlugin:key withProxy:proxy withOptions:optionsObjc];
if (plugin == nil) {
return jsi::Value::undefined();
}
@ -145,7 +146,25 @@ jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID&
return jsi::Value::undefined();
}
@implementation VisionCameraProxyHolder {
VisionCameraProxy* _proxy;
}
- (instancetype)initWithProxy:(void*)proxy {
if (self = [super init]) {
_proxy = (VisionCameraProxy*)proxy;
}
return self;
}
- (VisionCameraProxy*)proxy {
return _proxy;
}
@end
@implementation VisionCameraInstaller
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge {
RCTCxxBridge* cxxBridge = (RCTCxxBridge*)[RCTBridge currentBridge];
if (!cxxBridge.runtime) {
@ -160,4 +179,5 @@ jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID&
return YES;
}
@end

View File

@ -105,6 +105,8 @@
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B8207AAC2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+synchronizeBuffer.swift"; sourceTree = "<group>"; };
B8207AAE2B0E67460002990F /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoDataOutput+recommendedVideoSettings.swift"; sourceTree = "<group>"; };
B82186C72B514B5F00CE68CE /* SharedArray.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SharedArray.h; sourceTree = "<group>"; };
B82186C82B514B6D00CE68CE /* SharedArray.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SharedArray.mm; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraDevicesManager.swift; sourceTree = "<group>"; };
B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraDevicesManager.m; sourceTree = "<group>"; };
@ -336,6 +338,8 @@
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */,
B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */,
B89A79692B3EF60F005E0357 /* UIImageOrientation+descriptor.h */,
B82186C72B514B5F00CE68CE /* SharedArray.h */,
B82186C82B514B6D00CE68CE /* SharedArray.mm */,
);
path = "Frame Processor";
sourceTree = "<group>";