feat: Reintroduce Macros for Frame Processor Plugin registration (#2027)
in VisionCamera v1 & v2 there were two ObjC macros that were helping in creation/registration of Frame Processors, but these were removed with v3 This PR reintroduces such macros, which will not only make FP development easier, but also it will also fix issues people had with registration of Swift Frame Processors (+load vs +initialize issues) Docs were also updated to reflect that the macros should be used to correctly initialize and register ObjC/Swift Frame Processors
This commit is contained in:
parent
2666ac53a6
commit
a291642c53
@ -62,6 +62,8 @@ import com.mrousavy.camera.frameprocessor.Frame;
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin;
|
||||
|
||||
public class FaceDetectorFrameProcessorPlugin extends FrameProcessorPlugin {
|
||||
FaceDetectorFrameProcessorPlugin(@Nullable Map<String, Object> options) {}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object callback(@NonNull Frame frame, @Nullable Map<String, Object> arguments) {
|
||||
@ -87,7 +89,7 @@ import com.mrousavy.camera.frameprocessor.FrameProcessorPluginRegistry;
|
||||
public class FaceDetectorFrameProcessorPluginPackage implements ReactPackage {
|
||||
// highlight-start
|
||||
FaceDetectorFrameProcessorPluginPackage() {
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces", options -> new FaceDetectorFrameProcessorPlugin());
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces", options -> new FaceDetectorFrameProcessorPlugin(options));
|
||||
}
|
||||
// highlight-end
|
||||
|
||||
@ -134,9 +136,9 @@ The Frame Processor Plugin will be exposed to JS through the `VisionCameraProxy`
|
||||
import com.mrousavy.camera.frameprocessor.Frame
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
|
||||
|
||||
class FaceDetectorFrameProcessorPlugin: FrameProcessorPlugin() {
|
||||
class FaceDetectorFrameProcessorPlugin(options: Map<String, Any>?): FrameProcessorPlugin(options) {
|
||||
|
||||
override fun callback(frame: Frame, arguments: Map<String, Object>?): Any? {
|
||||
override fun callback(frame: Frame, arguments: Map<String, Any>?): Any? {
|
||||
// highlight-next-line
|
||||
// code goes here
|
||||
return null
|
||||
@ -158,7 +160,7 @@ class FaceDetectorFrameProcessorPluginPackage : ReactPackage {
|
||||
// highlight-start
|
||||
init {
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("detectFaces") { options ->
|
||||
FaceDetectorFrameProcessorPlugin()
|
||||
FaceDetectorFrameProcessorPlugin(options)
|
||||
}
|
||||
}
|
||||
// highlight-end
|
||||
|
@ -52,7 +52,7 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
|
||||
@implementation FaceDetectorFrameProcessorPlugin
|
||||
|
||||
- (instancetype) initWithOptions:(NSDictionary*)options; {
|
||||
self = [super init];
|
||||
self = [super initWithOptions:options];
|
||||
return self;
|
||||
}
|
||||
|
||||
@ -63,14 +63,9 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
|
||||
return nil;
|
||||
}
|
||||
|
||||
+ (void) load {
|
||||
// highlight-start
|
||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"detectFaces"
|
||||
withInitializer:^FrameProcessorPlugin*(NSDictionary* options) {
|
||||
return [[FaceDetectorFrameProcessorPlugin alloc] initWithOptions:options];
|
||||
}];
|
||||
// highlight-end
|
||||
}
|
||||
// highlight-start
|
||||
VISION_EXPORT_FRAME_PROCESSOR(FaceDetectorFrameProcessorPlugin, detectFaces)
|
||||
// highlight-end
|
||||
|
||||
@end
|
||||
```
|
||||
@ -96,6 +91,10 @@ import VisionCamera
|
||||
|
||||
@objc(FaceDetectorFrameProcessorPlugin)
|
||||
public class FaceDetectorFrameProcessorPlugin: FrameProcessorPlugin {
|
||||
public override init(options: [AnyHashable : Any]! = [:]) {
|
||||
super.init(options: options)
|
||||
}
|
||||
|
||||
public override func callback(_ frame: Frame, withArguments arguments: [AnyHashable : Any]?) -> Any {
|
||||
let buffer = frame.buffer
|
||||
let orientation = frame.orientation
|
||||
@ -113,22 +112,9 @@ public class FaceDetectorFrameProcessorPlugin: FrameProcessorPlugin {
|
||||
|
||||
#import "YOUR_XCODE_PROJECT_NAME-Swift.h" // <--- replace "YOUR_XCODE_PROJECT_NAME" with the actual value of your xcode project name
|
||||
|
||||
@interface FaceDetectorFrameProcessorPlugin (FrameProcessorPluginLoader)
|
||||
@end
|
||||
|
||||
@implementation FaceDetectorFrameProcessorPlugin (FrameProcessorPluginLoader)
|
||||
|
||||
+ (void)load
|
||||
{
|
||||
// highlight-start
|
||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"detectFaces"
|
||||
withInitializer:^FrameProcessorPlugin* (NSDictionary* options) {
|
||||
return [[FaceDetectorFrameProcessorPlugin alloc] initWithOptions:options];
|
||||
}];
|
||||
// highlight-end
|
||||
}
|
||||
|
||||
@end
|
||||
// highlight-start
|
||||
VISION_EXPORT_SWIFT_FRAME_PROCESSOR(FaceDetectorFrameProcessorPlugin, detectFaces)
|
||||
// highlight-end
|
||||
```
|
||||
|
||||
5. **Implement your frame processing.** See [Example Plugin (Swift)](https://github.com/mrousavy/react-native-vision-camera/blob/main/package/example/ios/Frame%20Processor%20Plugins/Example%20Plugin%20%28Swift%29) for reference.
|
||||
|
@ -12,6 +12,14 @@ import java.util.Map;
|
||||
@DoNotStrip
|
||||
@Keep
|
||||
public abstract class FrameProcessorPlugin {
|
||||
public FrameProcessorPlugin() {}
|
||||
|
||||
/**
|
||||
* The initializer for a Frame Processor Plugin class that takes optional object that consists
|
||||
* options passed from JS layer
|
||||
*/
|
||||
public FrameProcessorPlugin(@Nullable Map<String, Object> options) {}
|
||||
|
||||
/**
|
||||
* The actual Frame Processor plugin callback. Called for every frame the ImageAnalyzer receives.
|
||||
* @param frame The Frame from the Camera. Don't call .close() on this, as VisionCamera handles that.
|
||||
|
@ -40,7 +40,7 @@ public class ExampleFrameProcessorPlugin extends FrameProcessorPlugin {
|
||||
return map;
|
||||
}
|
||||
|
||||
ExampleFrameProcessorPlugin() {
|
||||
|
||||
ExampleFrameProcessorPlugin(@Nullable Map<String, Object> options) {
|
||||
Log.d("ExamplePlugin", " - options: " + options.toString());
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,11 @@ import android.util.Log
|
||||
import com.mrousavy.camera.frameprocessor.Frame
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
|
||||
|
||||
class ExampleKotlinFrameProcessorPlugin: FrameProcessorPlugin() {
|
||||
class ExampleKotlinFrameProcessorPlugin(options: Map<String, Any>?): FrameProcessorPlugin(options) {
|
||||
init {
|
||||
Log.d("ExampleKotlinPlugin", " - options" + options?.toString())
|
||||
}
|
||||
|
||||
override fun callback(frame: Frame, params: Map<String, Any>?): Any? {
|
||||
if (params == null) {
|
||||
return null
|
||||
|
@ -65,7 +65,7 @@ public class MainApplication extends Application implements ReactApplication {
|
||||
DefaultNewArchitectureEntryPoint.load();
|
||||
}
|
||||
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_plugin", options -> new ExampleFrameProcessorPlugin());
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_kotlin_swift_plugin", options -> new ExampleKotlinFrameProcessorPlugin());
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_plugin", options -> new ExampleFrameProcessorPlugin(options));
|
||||
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_kotlin_swift_plugin", options -> new ExampleKotlinFrameProcessorPlugin(options));
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,13 @@
|
||||
|
||||
@implementation ExampleFrameProcessorPlugin
|
||||
|
||||
- (instancetype)initWithOptions:(NSDictionary * _Nullable)options
|
||||
{
|
||||
self = [super initWithOptions:options];
|
||||
NSLog(@"ExamplePlugin - options: %@", options);
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id)callback:(Frame *)frame withArguments:(NSDictionary *)arguments {
|
||||
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
NSLog(@"ExamplePlugin: %zu x %zu Image. Logging %lu parameters:", CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer), (unsigned long)arguments.count);
|
||||
@ -38,12 +45,7 @@
|
||||
};
|
||||
}
|
||||
|
||||
+ (void) load {
|
||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"example_plugin"
|
||||
withInitializer:^FrameProcessorPlugin*(NSDictionary* options) {
|
||||
return [[ExampleFrameProcessorPlugin alloc] init];
|
||||
}];
|
||||
}
|
||||
VISION_EXPORT_FRAME_PROCESSOR(ExampleFrameProcessorPlugin, example_plugin)
|
||||
|
||||
@end
|
||||
#endif
|
||||
|
@ -6,26 +6,12 @@
|
||||
//
|
||||
|
||||
#if __has_include(<VisionCamera/FrameProcessorPlugin.h>)
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <VisionCamera/FrameProcessorPlugin.h>
|
||||
#import <VisionCamera/FrameProcessorPluginRegistry.h>
|
||||
#import <VisionCamera/Frame.h>
|
||||
|
||||
#import "VisionCameraExample-Swift.h"
|
||||
|
||||
// Example for a Swift Frame Processor plugin automatic registration
|
||||
@interface ExampleSwiftFrameProcessorPlugin (FrameProcessorPluginLoader)
|
||||
@end
|
||||
|
||||
@implementation ExampleSwiftFrameProcessorPlugin (FrameProcessorPluginLoader)
|
||||
|
||||
+ (void)initialize {
|
||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"example_kotlin_swift_plugin"
|
||||
withInitializer:^FrameProcessorPlugin* _Nonnull(NSDictionary* _Nullable options) {
|
||||
return [[ExampleSwiftFrameProcessorPlugin alloc] init];
|
||||
}];
|
||||
}
|
||||
|
||||
@end
|
||||
// // Example for a Swift Frame Processor plugin automatic registration
|
||||
VISION_EXPORT_SWIFT_FRAME_PROCESSOR(ExampleSwiftFrameProcessorPlugin, example_kotlin_swift_plugin)
|
||||
|
||||
#endif
|
||||
|
@ -11,6 +11,12 @@ import VisionCamera
|
||||
// Example for a Swift Frame Processor plugin
|
||||
@objc(ExampleSwiftFrameProcessorPlugin)
|
||||
public class ExampleSwiftFrameProcessorPlugin: FrameProcessorPlugin {
|
||||
public override init(options: [AnyHashable: Any]! = [:]) {
|
||||
super.init(options: options)
|
||||
|
||||
print("ExampleSwiftPlugin - options: \(String(describing: options))")
|
||||
}
|
||||
|
||||
public override func callback(_ frame: Frame, withArguments arguments: [AnyHashable: Any]?) -> Any? {
|
||||
let imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer)
|
||||
|
||||
|
@ -507,7 +507,7 @@ PODS:
|
||||
- libwebp (~> 1.0)
|
||||
- SDWebImage/Core (~> 5.10)
|
||||
- SocketRocket (0.6.1)
|
||||
- VisionCamera (3.3.1):
|
||||
- VisionCamera (3.4.0):
|
||||
- React
|
||||
- React-callinvoker
|
||||
- React-Core
|
||||
@ -747,7 +747,7 @@ SPEC CHECKSUMS:
|
||||
SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d
|
||||
SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d
|
||||
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
|
||||
VisionCamera: f386aee60abb07d979c506ea9e6d4831e596cafe
|
||||
VisionCamera: eead9df29ac5935d5685b5ecaea3ae8b6da84bff
|
||||
Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce
|
||||
|
||||
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
|
||||
|
@ -17,6 +17,7 @@ import type { Routes } from './Routes'
|
||||
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
|
||||
import { useIsFocused } from '@react-navigation/core'
|
||||
import { examplePlugin } from './frame-processors/ExamplePlugin'
|
||||
import { exampleKotlinSwiftPlugin } from './frame-processors/ExampleKotlinSwiftPlugin'
|
||||
import { usePreferredCameraDevice } from './hooks/usePreferredCameraDevice'
|
||||
|
||||
const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera)
|
||||
@ -166,6 +167,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
|
||||
console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
|
||||
examplePlugin(frame)
|
||||
exampleKotlinSwiftPlugin(frame)
|
||||
}, [])
|
||||
|
||||
return (
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { VisionCameraProxy, Frame } from 'react-native-vision-camera'
|
||||
|
||||
const plugin = VisionCameraProxy.getFrameProcessorPlugin('example_kotlin_swift_plugin')
|
||||
const plugin = VisionCameraProxy.getFrameProcessorPlugin('example_kotlin_swift_plugin', { foo: 'bar' })
|
||||
|
||||
export function exampleKotlinSwiftPlugin(frame: Frame): string[] {
|
||||
'worklet'
|
||||
|
@ -21,8 +21,39 @@
|
||||
/// VisionCamera Runtime.
|
||||
@interface FrameProcessorPlugin : NSObject
|
||||
|
||||
/// The initializer for a Frame Processor Plugin class that takes optional object that consists
|
||||
/// options passed from JS layer
|
||||
- (instancetype _Nonnull)initWithOptions:(NSDictionary* _Nullable)options;
|
||||
|
||||
/// The actual callback when calling this plugin. Any Frame Processing should be handled there.
|
||||
/// Make sure your code is optimized, as this is a hot path.
|
||||
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments;
|
||||
|
||||
@end
|
||||
|
||||
#define VISION_CONCAT2(A, B) A##B
|
||||
#define VISION_CONCAT(A, B) VISION_CONCAT2(A, B)
|
||||
|
||||
#define VISION_EXPORT_FRAME_PROCESSOR(frame_processor_class, frame_processor_plugin_name) \
|
||||
+(void)load { \
|
||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@ #frame_processor_plugin_name \
|
||||
withInitializer:^FrameProcessorPlugin*(NSDictionary* _Nullable options) { \
|
||||
return [[frame_processor_class alloc] initWithOptions:options]; \
|
||||
}]; \
|
||||
}
|
||||
|
||||
#define VISION_EXPORT_SWIFT_FRAME_PROCESSOR(frame_processor_class, frame_processor_plugin_name) \
|
||||
\
|
||||
@interface frame_processor_class (FrameProcessorPluginLoader) \
|
||||
@end \
|
||||
\
|
||||
@implementation frame_processor_class (FrameProcessorPluginLoader) \
|
||||
\
|
||||
__attribute__((constructor)) static void VISION_CONCAT(initialize_, frame_processor_plugin_name)(void) { \
|
||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@ #frame_processor_plugin_name \
|
||||
withInitializer:^FrameProcessorPlugin* _Nonnull(NSDictionary* _Nullable options) { \
|
||||
return [[frame_processor_class alloc] initWithOptions:options]; \
|
||||
}]; \
|
||||
} \
|
||||
\
|
||||
@end
|
||||
|
@ -11,6 +11,11 @@
|
||||
// Base implementation (empty)
|
||||
@implementation FrameProcessorPlugin
|
||||
|
||||
- (instancetype)initWithOptions:(NSDictionary* _Nullable)options {
|
||||
self = [super init];
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments {
|
||||
[NSException raise:NSInternalInconsistencyException
|
||||
format:@"Frame Processor Plugin does not override the `callback(frame:withArguments:)` method!"];
|
||||
|
@ -27,7 +27,7 @@ interface TVisionCameraProxy {
|
||||
* Creates a new instance of a Frame Processor Plugin.
|
||||
* The Plugin has to be registered on the native side, otherwise this returns `undefined`
|
||||
*/
|
||||
getFrameProcessorPlugin: (name: string) => FrameProcessorPlugin | undefined
|
||||
getFrameProcessorPlugin: (name: string, options?: Record<string, ParameterType>) => FrameProcessorPlugin | undefined
|
||||
}
|
||||
|
||||
let hasWorklets = false
|
||||
|
Loading…
Reference in New Issue
Block a user