diff --git a/docs/docs/guides/FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx b/docs/docs/guides/FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx index 4e5bd45..fd31aa6 100644 --- a/docs/docs/guides/FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx +++ b/docs/docs/guides/FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx @@ -41,29 +41,35 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera- 2. Create an Objective-C source file, for the QR Code Plugin this will be called `QRCodeFrameProcessorPlugin.m`. 3. Add the following code: -```objc {12} +```objc #import #import -@interface QRCodeFrameProcessorPlugin : NSObject +@interface QRCodeFrameProcessorPlugin : FrameProcessorPlugin @end @implementation QRCodeFrameProcessorPlugin -static inline id scanQRCodes(Frame* frame, NSArray* args) { +- (NSString *)name { + return @"scanQRCodes"; +} + +- (id)callback:(Frame *)frame withArguments:(NSArray *)arguments { CMSampleBufferRef buffer = frame.buffer; UIImageOrientation orientation = frame.orientation; // code goes here return @[]; } -VISION_EXPORT_FRAME_PROCESSOR(scanQRCodes) ++ (void) load { + [self registerPlugin:[[ExampleFrameProcessorPlugin alloc] init]]; +} @end ``` :::note -The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlugins` object using the same name as the Objective-C function. In this case, it would be `FrameProcessorPlugins.scanQRCodes(...)`. +The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlugins` object using the name returned from the `name` getter. In this case, it would be `FrameProcessorPlugins.scanQRCodes(...)`. ::: 4. **Implement your Frame Processing.** See the [Example Plugin (Objective-C)](https://github.com/mrousavy/react-native-vision-camera/blob/main/example/ios/Frame%20Processor%20Plugins/Example%20Plugin%20%28Objective%2DC%29) for reference. @@ -83,27 +89,16 @@ The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlug #import ``` -4. Create an Objective-C source file with the same name as the Swift file, for the QR Code Plugin this will be `QRCodeFrameProcessorPlugin.m`. Add the following code: +4. In the Swift file, add the following code: -```objc -#import - -@interface VISION_EXPORT_SWIFT_FRAME_PROCESSOR(scanQRCodes, QRCodeFrameProcessorPlugin) -@end -``` - -:::note -The first parameter in the Macro specifies the JS function name. Make sure it is unique across other Frame Processors. -::: - -5. In the Swift file, add the following code: - -```swift {8} +```swift @objc(QRCodeFrameProcessorPlugin) -public class QRCodeFrameProcessorPlugin: NSObject, FrameProcessorPluginBase { +public class QRCodeFrameProcessorPlugin: FrameProcessorPlugin { + override public func name() -> String! { + return "scanQRCodes" + } - @objc - public static func callback(_ frame: Frame!, withArgs _: [Any]!) -> Any! { + public override func callback(_ frame: Frame!, withArguments arguments: [Any]!) -> Any! { let buffer = frame.buffer let orientation = frame.orientation // code goes here @@ -112,7 +107,27 @@ public class QRCodeFrameProcessorPlugin: NSObject, FrameProcessorPluginBase { } ``` -6. **Implement your frame processing.** See [Example Plugin (Swift)](https://github.com/mrousavy/react-native-vision-camera/blob/main/example/ios/Frame%20Processor%20Plugins/Example%20Plugin%20%28Swift%29) for reference. +5. In your `AppDelegate.m`, add the following imports (you can skip this if your AppDelegate is in Swift): + +```objc +#import "YOUR_XCODE_PROJECT_NAME-Swift.h" +#import +``` + +6. In your `AppDelegate.m`, add the following code to `application:didFinishLaunchingWithOptions:`: + +```objc {5} +- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions +{ + ... + + [FrameProcessorPlugin registerPlugin:[[QRCodeFrameProcessorPlugin alloc] init]]; + + return [super application:application didFinishLaunchingWithOptions:launchOptions]; +} +``` + +7. **Implement your frame processing.** See [Example Plugin (Swift)](https://github.com/mrousavy/react-native-vision-camera/blob/main/example/ios/Frame%20Processor%20Plugins/Example%20Plugin%20%28Swift%29) for reference. diff --git a/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m b/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m index aca1b1f..dba0033 100644 --- a/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m +++ b/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m @@ -11,12 +11,16 @@ // Example for an Objective-C Frame Processor plugin -@interface ExampleFrameProcessorPlugin : NSObject +@interface ExampleFrameProcessorPlugin : FrameProcessorPlugin @end @implementation ExampleFrameProcessorPlugin -static inline id example_plugin(Frame* frame, NSArray* arguments) { +- (NSString *)name { + return @"example_plugin"; +} + +- (id)callback:(Frame *)frame withArguments:(NSArray *)arguments { CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer); NSLog(@"ExamplePlugin: %zu x %zu Image. Logging %lu parameters:", CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer), (unsigned long)arguments.count); @@ -36,6 +40,8 @@ static inline id example_plugin(Frame* frame, NSArray* arguments) { }; } -VISION_EXPORT_FRAME_PROCESSOR(example_plugin) ++ (void) load { + [self registerPlugin:[[ExampleFrameProcessorPlugin alloc] init]]; +} @end diff --git a/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.m b/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.m deleted file mode 100644 index 97b1bbe..0000000 --- a/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.m +++ /dev/null @@ -1,13 +0,0 @@ -// -// ExamplePluginSwift.m -// VisionCamera -// -// Created by Marc Rousavy on 01.05.21. -// Copyright © 2021 mrousavy. All rights reserved. -// - -#import -#import - -@interface VISION_EXPORT_SWIFT_FRAME_PROCESSOR(example_plugin_swift, ExamplePluginSwift) -@end diff --git a/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift b/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift index 84615a7..cdc70b5 100644 --- a/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift +++ b/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift @@ -9,34 +9,38 @@ import AVKit import Vision -@objc(ExamplePluginSwift) -public class ExamplePluginSwift: NSObject, FrameProcessorPluginBase { - @objc - public static func callback(_ frame: Frame!, withArgs args: [Any]!) -> Any! { - guard let imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer) else { - return nil - } - NSLog("ExamplePlugin: \(CVPixelBufferGetWidth(imageBuffer)) x \(CVPixelBufferGetHeight(imageBuffer)) Image. Logging \(args.count) parameters:") +@objc +public class ExamplePluginSwift : FrameProcessorPlugin { + + override public func name() -> String! { + return "example_plugin_swift" + } + + public override func callback(_ frame: Frame!, withArguments arguments: [Any]!) -> Any! { + guard let imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer) else { + return nil + } + NSLog("ExamplePlugin: \(CVPixelBufferGetWidth(imageBuffer)) x \(CVPixelBufferGetHeight(imageBuffer)) Image. Logging \(arguments.count) parameters:") - args.forEach { arg in - var string = "\(arg)" - if let array = arg as? NSArray { - string = (array as Array).description - } else if let map = arg as? NSDictionary { - string = (map as Dictionary).description - } - NSLog("ExamplePlugin: -> \(string) (\(type(of: arg)))") - } + arguments.forEach { arg in + var string = "\(arg)" + if let array = arg as? NSArray { + string = (array as Array).description + } else if let map = arg as? NSDictionary { + string = (map as Dictionary).description + } + NSLog("ExamplePlugin: -> \(string) (\(type(of: arg)))") + } - return [ - "example_str": "Test", - "example_bool": true, - "example_double": 5.3, - "example_array": [ - "Hello", - true, - 17.38, - ], - ] - } + return [ + "example_str": "Test", + "example_bool": true, + "example_double": 5.3, + "example_array": [ + "Hello", + true, + 17.38, + ], + ] + } } diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 7150e32..2798c6a 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -436,7 +436,7 @@ PODS: - React-Core - RNVectorIcons (9.2.0): - React-Core - - VisionCamera (3.0.0-rc.1): + - VisionCamera (3.0.0-rc.2): - React - React-callinvoker - React-Core @@ -644,7 +644,7 @@ SPEC CHECKSUMS: RNScreens: 218801c16a2782546d30bd2026bb625c0302d70f RNStaticSafeAreaInsets: 055ddbf5e476321720457cdaeec0ff2ba40ec1b8 RNVectorIcons: fcc2f6cb32f5735b586e66d14103a74ce6ad61f8 - VisionCamera: 0d154cd0ab9043a3c8a4908fb57ad65c9e1f3baf + VisionCamera: e4f19a6c22cfa146736bdfe8df057e2ed5ca8dd3 Yoga: 5ed1699acbba8863755998a4245daa200ff3817b PODFILE CHECKSUM: d53724fe402c2547f1dd1cc571bbe77d9820e636 diff --git a/example/ios/VisionCameraExample.xcodeproj/project.pbxproj b/example/ios/VisionCameraExample.xcodeproj/project.pbxproj index c04f1f3..b71fa32 100644 --- a/example/ios/VisionCameraExample.xcodeproj/project.pbxproj +++ b/example/ios/VisionCameraExample.xcodeproj/project.pbxproj @@ -11,10 +11,9 @@ 13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; }; 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; }; 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; }; - B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */ = {isa = PBXBuildFile; }; + B8DB3BD5263DE8B7004C18D7 /* (null) in Sources */ = {isa = PBXBuildFile; }; B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */; }; B8DB3BDD263DEA31004C18D7 /* ExamplePluginSwift.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BDA263DEA31004C18D7 /* ExamplePluginSwift.swift */; }; - B8DB3BDE263DEA31004C18D7 /* ExamplePluginSwift.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BDB263DEA31004C18D7 /* ExamplePluginSwift.m */; }; B8F0E10825E0199F00586F16 /* File.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F0E10725E0199F00586F16 /* File.swift */; }; C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */; }; /* End PBXBuildFile section */ @@ -31,7 +30,6 @@ 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = VisionCameraExample/LaunchScreen.storyboard; sourceTree = ""; }; B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ExampleFrameProcessorPlugin.m; sourceTree = ""; }; B8DB3BDA263DEA31004C18D7 /* ExamplePluginSwift.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExamplePluginSwift.swift; sourceTree = ""; }; - B8DB3BDB263DEA31004C18D7 /* ExamplePluginSwift.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ExamplePluginSwift.m; sourceTree = ""; }; B8F0E10625E0199F00586F16 /* VisionCameraExample-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "VisionCameraExample-Bridging-Header.h"; sourceTree = ""; }; B8F0E10725E0199F00586F16 /* File.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = File.swift; sourceTree = ""; }; C1D342AD8210E7627A632602 /* Pods-VisionCameraExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-VisionCameraExample.debug.xcconfig"; path = "Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample.debug.xcconfig"; sourceTree = ""; }; @@ -138,7 +136,6 @@ isa = PBXGroup; children = ( B8DB3BDA263DEA31004C18D7 /* ExamplePluginSwift.swift */, - B8DB3BDB263DEA31004C18D7 /* ExamplePluginSwift.m */, ); path = "Example Plugin (Swift)"; sourceTree = ""; @@ -383,11 +380,10 @@ files = ( 13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */, B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */, - B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */, + B8DB3BD5263DE8B7004C18D7 /* (null) in Sources */, B8DB3BDD263DEA31004C18D7 /* ExamplePluginSwift.swift in Sources */, B8F0E10825E0199F00586F16 /* File.swift in Sources */, 13B07FC11A68108700A75B9A /* main.m in Sources */, - B8DB3BDE263DEA31004C18D7 /* ExamplePluginSwift.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/example/ios/VisionCameraExample/AppDelegate.mm b/example/ios/VisionCameraExample/AppDelegate.mm index 5e74fe9..089ebc1 100644 --- a/example/ios/VisionCameraExample/AppDelegate.mm +++ b/example/ios/VisionCameraExample/AppDelegate.mm @@ -1,6 +1,8 @@ #import "AppDelegate.h" #import +#import "VisionCameraExample-Swift.h" +#import @implementation AppDelegate @@ -10,6 +12,8 @@ // You can add your custom initial props in the dictionary below. // They will be passed down to the ViewController used by React Native. self.initialProps = @{}; + + [FrameProcessorPlugin registerPlugin:[[ExamplePluginSwift alloc] init]]; return [super application:application didFinishLaunchingWithOptions:launchOptions]; } diff --git a/ios/Frame Processor/FrameProcessorPlugin.h b/ios/Frame Processor/FrameProcessorPlugin.h index 091a532..38ca26d 100644 --- a/ios/Frame Processor/FrameProcessorPlugin.h +++ b/ios/Frame Processor/FrameProcessorPlugin.h @@ -6,57 +6,27 @@ // Copyright © 2021 mrousavy. All rights reserved. // -#ifndef FrameProcessorPlugin_h -#define FrameProcessorPlugin_h +#pragma once #import -#import "FrameProcessorPluginRegistry.h" #import "Frame.h" -@protocol FrameProcessorPluginBase -+ (id) callback:(Frame*)frame withArgs:(NSArray*)args; +/// The base class for a Frame Processor Plugin which can be called synchronously from a JS Frame Processor. +/// +/// Subclass this class in a Swift or Objective-C class and override the `callback:withArguments:` method, and +/// implement your Frame Processing there. +/// Then, in your App's startup (AppDelegate.m), call `FrameProcessorPluginBase.registerPlugin(YourNewPlugin())` +@interface FrameProcessorPlugin : NSObject + +/// Get the name of the Frame Processor Plugin. +/// This will be exposed to JS under the `FrameProcessorPlugins` Proxy object. +- (NSString *)name; + +/// The actual callback when calling this plugin. Any Frame Processing should be handled there. +/// Make sure your code is optimized, as this is a hot path. +- (id) callback:(Frame*)frame withArguments:(NSArray*)arguments; + +/// Register the given plugin in the Plugin Registry. This should be called on App Startup. ++ (void) registerPlugin:(FrameProcessorPlugin*)plugin; + @end - - -#define VISION_CONCAT2(A, B) A##B -#define VISION_CONCAT(A, B) VISION_CONCAT2(A, B) - -/** - * Use this Macro to register the given function as a Frame Processor. - * * Make sure the given function is a C-style function with the following signature: static inline id callback(Frame* frame, NSArray* args) - * * Make sure the given function's name is unique across other frame processor plugins - * * Make sure your frame processor returns a Value that can be converted to JS - * * Make sure to use this Macro in an @implementation, not @interface - * - * The JS function will have the same name as the given Objective-C function. It can be accessed through the FrameProcessorPlugins object exposed by VisionCamera. - */ -#define VISION_EXPORT_FRAME_PROCESSOR(frame_processor) \ - \ -+(void)load \ -{ \ - [FrameProcessorPluginRegistry addFrameProcessorPlugin:@ #frame_processor callback:^id(Frame* frame, NSArray* args) { \ - return frame_processor(frame, args); \ - }]; \ -} - - -/** - * Same as VISION_EXPORT_FRAME_PROCESSOR, but uses __attribute__((constructor)) for - * registration. Useful for registering swift classes that forbids use of +(void)load. - */ -#define VISION_EXPORT_SWIFT_FRAME_PROCESSOR(name, objc_name) \ -objc_name : NSObject \ -@end \ - \ -@interface objc_name (FrameProcessorPlugin) \ -@end \ -@implementation objc_name (FrameProcessorPlugin) \ - \ -+(void)load \ -{ \ - [FrameProcessorPluginRegistry addFrameProcessorPlugin:@ #name callback:^id(Frame* frame, NSArray* args) { \ - return [objc_name callback:frame withArgs:args]; \ - }]; \ -} - -#endif /* FrameProcessorPlugin_h */ diff --git a/ios/Frame Processor/FrameProcessorPlugin.m b/ios/Frame Processor/FrameProcessorPlugin.m new file mode 100644 index 0000000..417b1d3 --- /dev/null +++ b/ios/Frame Processor/FrameProcessorPlugin.m @@ -0,0 +1,31 @@ +// +// FrameProcessorPlugin.m +// VisionCamera +// +// Created by Marc Rousavy on 24.02.23. +// Copyright © 2023 mrousavy. All rights reserved. +// + +#import +#import "FrameProcessorPlugin.h" +#import "FrameProcessorPluginRegistry.h" + +@implementation FrameProcessorPlugin + +- (NSString *)name { + [NSException raise:NSInternalInconsistencyException + format:@"Frame Processor Plugin \"%@\" does not override the `name` getter!", [self name]]; + return nil; +} + +- (id)callback:(Frame *)frame withArguments:(NSArray *)arguments { + [NSException raise:NSInternalInconsistencyException + format:@"Frame Processor Plugin \"%@\" does not override the `callback(frame:withArguments:)` method!", [self name]]; + return nil; +} + ++ (void)registerPlugin:(FrameProcessorPlugin *)plugin { + [FrameProcessorPluginRegistry addFrameProcessorPlugin:plugin]; +} + +@end diff --git a/ios/Frame Processor/FrameProcessorPluginRegistry.h b/ios/Frame Processor/FrameProcessorPluginRegistry.h index ce3b13f..326d9dd 100644 --- a/ios/Frame Processor/FrameProcessorPluginRegistry.h +++ b/ios/Frame Processor/FrameProcessorPluginRegistry.h @@ -10,12 +10,11 @@ #import #import "Frame.h" - -typedef id (^FrameProcessorPlugin) (Frame* frame, NSArray* arguments); +#import "FrameProcessorPlugin.h" @interface FrameProcessorPluginRegistry : NSObject -+ (NSMutableDictionary*)frameProcessorPlugins; -+ (void) addFrameProcessorPlugin:(NSString*)name callback:(FrameProcessorPlugin)callback; ++ (NSMutableDictionary*)frameProcessorPlugins; ++ (void) addFrameProcessorPlugin:(FrameProcessorPlugin*)plugin; @end diff --git a/ios/Frame Processor/FrameProcessorPluginRegistry.mm b/ios/Frame Processor/FrameProcessorPluginRegistry.mm index 3b57594..86df213 100644 --- a/ios/Frame Processor/FrameProcessorPluginRegistry.mm +++ b/ios/Frame Processor/FrameProcessorPluginRegistry.mm @@ -11,19 +11,19 @@ @implementation FrameProcessorPluginRegistry -+ (NSMutableDictionary*)frameProcessorPlugins { - static NSMutableDictionary* plugins = nil; ++ (NSMutableDictionary*)frameProcessorPlugins { + static NSMutableDictionary* plugins = nil; if (plugins == nil) { plugins = [[NSMutableDictionary alloc] init]; } return plugins; } -+ (void) addFrameProcessorPlugin:(NSString*)name callback:(FrameProcessorPlugin)callback { - BOOL alreadyExists = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:name] != nil; - NSAssert(!alreadyExists, @"Tried to two Frame Processor Plugins with the same name! Either choose unique names, or remove the unused plugin."); ++ (void) addFrameProcessorPlugin:(FrameProcessorPlugin*)plugin { + BOOL alreadyExists = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:plugin.name] != nil; + NSAssert(!alreadyExists, @"Tried to add a Frame Processor Plugin with a name that already exists! Either choose unique names, or remove the unused plugin. Name: %@", plugin.name); - [[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:callback forKey:name]; + [[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:plugin forKey:plugin.name]; } @end diff --git a/ios/Frame Processor/FrameProcessorRuntimeManager.mm b/ios/Frame Processor/FrameProcessorRuntimeManager.mm index 8cc3555..ef98055 100644 --- a/ios/Frame Processor/FrameProcessorRuntimeManager.mm +++ b/ios/Frame Processor/FrameProcessorRuntimeManager.mm @@ -9,6 +9,7 @@ #import #import "FrameProcessorRuntimeManager.h" #import "FrameProcessorPluginRegistry.h" +#import "FrameProcessorPlugin.h" #import "FrameHostObject.h" #import @@ -83,14 +84,14 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView"))) auto pluginName = [pluginKey UTF8String]; NSLog(@"FrameProcessorBindings: Installing Frame Processor plugin \"%s\"...", pluginName); - // Get the Plugin callback func - FrameProcessorPlugin callback = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey]; + // Get the Plugin + FrameProcessorPlugin* plugin = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey]; // Create the JSI host function - auto function = [callback, callInvoker](jsi::Runtime& runtime, - const jsi::Value& thisValue, - const jsi::Value* arguments, - size_t count) -> jsi::Value { + auto function = [plugin, callInvoker](jsi::Runtime& runtime, + const jsi::Value& thisValue, + const jsi::Value* arguments, + size_t count) -> jsi::Value { // Get the first parameter, which is always the native Frame Host Object. auto frameHostObject = arguments[0].asObject(runtime).asHostObject(runtime); auto frame = static_cast(frameHostObject.get()); @@ -101,7 +102,7 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView"))) count - 1, // use smaller count callInvoker); // Call the FP Plugin, which might return something. - id result = callback(frame->frame, args); + id result = [plugin callback:frame->frame withArguments:args]; // Convert the return value (or null) to a JS Value and return it to JS return convertObjCObjectToJSIValue(runtime, result); diff --git a/ios/VisionCamera.xcodeproj/project.pbxproj b/ios/VisionCamera.xcodeproj/project.pbxproj index 6e3f3db..bc20ec0 100644 --- a/ios/VisionCamera.xcodeproj/project.pbxproj +++ b/ios/VisionCamera.xcodeproj/project.pbxproj @@ -110,6 +110,7 @@ B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = ""; }; B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = ""; }; B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = ""; }; + B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = ""; }; B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSConsoleHelper.h; sourceTree = ""; }; B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSConsoleHelper.mm; sourceTree = ""; }; B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = ""; }; @@ -285,6 +286,7 @@ B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */, B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */, B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */, + B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */, ); path = "Frame Processor"; sourceTree = "";