docs: Add Kotlin & Swift FP plugins examples (#1902)

* chore(docs): add Kotlin FP example

* chore(docs): add Swift FP plugin
This commit is contained in:
Mateusz Mędrek 2023-10-03 11:33:48 +02:00 committed by GitHub
parent b24b1c808f
commit 62e786ad04
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 220 additions and 19 deletions

View File

@ -1,5 +1,6 @@
apply plugin: "com.android.application"
apply plugin: "com.facebook.react"
apply plugin: "kotlin-android"
import com.android.build.OutputFile

View File

@ -0,0 +1,35 @@
package com.mrousavy.camera.example
import android.util.Log
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
class ExampleKotlinFrameProcessorPlugin: FrameProcessorPlugin() {
override fun callback(frame: Frame, params: Map<String, Any>?): Any? {
if (params == null) {
return null
}
val image = frame.image
Log.d(
"ExampleKotlinPlugin",
image.width.toString() + " x " + image.height + " Image with format #" + image.format + ". Logging " + params.size + " parameters:"
)
for (key in params.keys) {
val value = params[key]
Log.d("ExampleKotlinPlugin", " -> " + if (value == null) "(null)" else value.toString() + " (" + value.javaClass.name + ")")
}
return hashMapOf<String, Any>(
"example_str" to "KotlinTest",
"example_bool" to false,
"example_double" to 6.7,
"example_array" to arrayListOf<Any>(
"Good bye",
false,
21.37
)
)
}
}

View File

@ -66,5 +66,6 @@ public class MainApplication extends Application implements ReactApplication {
}
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_plugin", options -> new ExampleFrameProcessorPlugin());
FrameProcessorPluginRegistry.addFrameProcessorPlugin("example_kotlin_swift_plugin", options -> new ExampleKotlinFrameProcessorPlugin());
}
}

View File

@ -15,5 +15,6 @@ buildscript {
dependencies {
classpath('com.android.tools.build:gradle:7.4.1')
classpath("com.facebook.react:react-native-gradle-plugin")
classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:1.7.22")
}
}

View File

@ -18,7 +18,7 @@
@implementation ExampleFrameProcessorPlugin
- (id)callback:(Frame *)frame withArguments:(NSArray<id> *)arguments {
- (id)callback:(Frame *)frame withArguments:(NSDictionary *)arguments {
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
NSLog(@"ExamplePlugin: %zu x %zu Image. Logging %lu parameters:", CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer), (unsigned long)arguments.count);
@ -28,11 +28,11 @@
return @{
@"example_str": @"Test",
@"example_bool": @true,
@"example_bool": @(YES),
@"example_double": @5.3,
@"example_array": @[
@"Hello",
@true,
@(YES),
@17.38
]
};

View File

@ -0,0 +1,31 @@
//
// ExampleSwiftFrameProcessor.m
// VisionCameraExample
//
// Created by Mateusz Medrek on 02/10/2023.
//
#if __has_include(<VisionCamera/FrameProcessorPlugin.h>)
#import <Foundation/Foundation.h>
#import <VisionCamera/FrameProcessorPlugin.h>
#import <VisionCamera/FrameProcessorPluginRegistry.h>
#import <VisionCamera/Frame.h>
#import "VisionCameraExample-Swift.h"
// Example for a Swift Frame Processor plugin automatic registration
@interface ExampleSwiftFrameProcessorPlugin (FrameProcessorPluginLoader)
@end
@implementation ExampleSwiftFrameProcessorPlugin (FrameProcessorPluginLoader)
+ (void)load
{
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"example_kotlin_swift_plugin" withInitializer:^FrameProcessorPlugin * _Nonnull(NSDictionary * _Nullable options) {
return [[ExampleSwiftFrameProcessorPlugin alloc] init];
}];
}
@end
#endif

View File

@ -0,0 +1,42 @@
//
// ExampleSwiftFrameProcessor.swift
// VisionCameraExample
//
// Created by Mateusz Medrek on 02/10/2023.
//
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
import VisionCamera
// Example for a Swift Frame Processor plugin
@objc(ExampleSwiftFrameProcessorPlugin)
public class ExampleSwiftFrameProcessorPlugin: FrameProcessorPlugin {
public override func callback(_ frame: Frame, withArguments arguments: [AnyHashable: Any]?) -> Any? {
let imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer)
if let arguments, let imageBuffer {
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let count = arguments.count
print(
"ExampleSwiftPlugin: \(width) x \(height) Image. Logging \(count) parameters:"
)
for key in arguments.keys {
let value = arguments[key]
let valueString = String(describing: value)
let valueClassString = String(describing: value.self)
print("ExampleSwiftPlugin: -> \(valueString) (\(valueClassString))")
}
}
return [
"example_str": "SwiftTest",
"example_bool": false,
"example_double": 6.7,
"example_array": ["Good bye", false, 21.37]
]
}
}
#endif

View File

@ -5,6 +5,8 @@ require Pod::Executable.execute_command('node', ['-p',
{paths: [process.argv[1]]},
)', __dir__]).strip
require_relative './vc_example_cocoapod_utils.rb'
platform :ios, min_ios_version_supported
prepare_react_native_project!
@ -46,5 +48,8 @@ target 'VisionCameraExample' do
:mac_catalyst_enabled => false
)
__apply_Xcode_12_5_M1_post_install_workaround(installer)
# Define "VISION_CAMERA_ENABLE_FRAME_PROCESSORS" Swift compiler flag if frame processor are enabled
set_frame_processor_swift_flag_in_example_project(installer)
end
end

View File

@ -736,6 +736,6 @@ SPEC CHECKSUMS:
VisionCamera: cb27c70cea1439e9a946bf2acdc059f8fb1bc68c
Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce
PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
COCOAPODS: 1.13.0

View File

@ -1,8 +1,3 @@
//
// Use this file to import your target's public headers that you would like to expose to Swift.
//
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import <VisionCamera/FrameProcessorPlugin.h>
#import <VisionCamera/Frame.h>
#endif

View File

@ -11,7 +11,8 @@
13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; };
13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; };
B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */ = {isa = PBXBuildFile; };
839E2C632ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m in Sources */ = {isa = PBXBuildFile; fileRef = 839E2C622ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m */; };
839E2C652ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 839E2C642ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift */; };
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */; };
B8F0E10825E0199F00586F16 /* File.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F0E10725E0199F00586F16 /* File.swift */; };
C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */; };
@ -27,6 +28,8 @@
13B07FB71A68108700A75B9A /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = VisionCameraExample/main.m; sourceTree = "<group>"; };
3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-VisionCameraExample.a"; sourceTree = BUILT_PRODUCTS_DIR; };
81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = VisionCameraExample/LaunchScreen.storyboard; sourceTree = "<group>"; };
839E2C622ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ExampleSwiftFrameProcessor.m; sourceTree = "<group>"; };
839E2C642ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExampleSwiftFrameProcessor.swift; sourceTree = "<group>"; };
B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ExampleFrameProcessorPlugin.m; sourceTree = "<group>"; };
B8F0E10625E0199F00586F16 /* VisionCameraExample-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "VisionCameraExample-Bridging-Header.h"; sourceTree = "<group>"; };
B8F0E10725E0199F00586F16 /* File.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = File.swift; sourceTree = "<group>"; };
@ -91,6 +94,15 @@
name = Libraries;
sourceTree = "<group>";
};
839E2C612ACB2E140037BC2B /* Example Swift Plugin */ = {
isa = PBXGroup;
children = (
839E2C622ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m */,
839E2C642ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift */,
);
path = "Example Swift Plugin";
sourceTree = "<group>";
};
83CBB9F61A601CBA00E9B192 = {
isa = PBXGroup;
children = (
@ -117,6 +129,7 @@
isa = PBXGroup;
children = (
B8DB3BD7263DEA31004C18D7 /* Example Plugin */,
839E2C612ACB2E140037BC2B /* Example Swift Plugin */,
);
path = "Frame Processor Plugins";
sourceTree = "<group>";
@ -375,7 +388,8 @@
files = (
13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */,
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */,
B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */,
839E2C652ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift in Sources */,
839E2C632ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m in Sources */,
B8F0E10825E0199F00586F16 /* File.swift in Sources */,
13B07FC11A68108700A75B9A /* main.m in Sources */,
);
@ -396,6 +410,7 @@
INFOPLIST_FILE = VisionCameraExample/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography";
IPHONEOS_DEPLOYMENT_TARGET = 12.4;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
OTHER_LDFLAGS = (
"$(inherited)",
@ -426,6 +441,7 @@
INFOPLIST_FILE = VisionCameraExample/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography";
IPHONEOS_DEPLOYMENT_TARGET = 12.4;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
OTHER_LDFLAGS = (
"$(inherited)",
@ -504,6 +520,7 @@
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "$(inherited)";
OTHER_CPLUSPLUSFLAGS = "$(inherited)";
OTHER_SWIFT_FLAGS = "$(inherited) -DVISION_CAMERA_ENABLE_FRAME_PROCESSORS";
REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native";
SDKROOT = iphoneos;
};
@ -561,6 +578,7 @@
MTL_ENABLE_DEBUG_INFO = NO;
OTHER_CFLAGS = "$(inherited)";
OTHER_CPLUSPLUSFLAGS = "$(inherited)";
OTHER_SWIFT_FLAGS = "$(inherited) -DVISION_CAMERA_ENABLE_FRAME_PROCESSORS";
REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native";
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;

View File

@ -1,10 +1,6 @@
#import "AppDelegate.h"
#import <React/RCTBundleURLProvider.h>
#import "VisionCameraExample-Swift.h"
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import <VisionCamera/FrameProcessorPlugin.h>
#endif
@implementation AppDelegate
@ -15,10 +11,6 @@
// They will be passed down to the ViewController used by React Native.
self.initialProps = @{};
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
[FrameProcessorPlugin registerPlugin:[[ExamplePluginSwift alloc] init]];
#endif
return [super application:application didFinishLaunchingWithOptions:launchOptions];
}

View File

@ -0,0 +1,63 @@
# Let's use ReactNativePodsUtils utility class to make it more maintainable
require Pod::Executable.execute_command('node', ['-p',
'require.resolve(
"react-native/scripts/cocoapods/utils.rb",
{paths: [process.argv[1]]},
)', __dir__]).strip
#
# In order to compile the project with disabled frame processors mode
# the codebase of example swift frame processor plugin needs to be put
# behind the swift compiler flag
#
# Let's set the flag based on the same logic like in library's podspec file
#
def set_frame_processor_swift_flag_in_example_project(installer)
has_worklets = does_example_have_worklets_enabled(installer)
set_swift_compiler_flag_in_project(installer, has_worklets)
end
def does_example_have_worklets_enabled(installer)
forceDisableFrameProcessors = false
if defined?($VCDisableFrameProcessors)
forceDisableFrameProcessors = true
end
has_worklets = ReactNativePodsUtils.has_pod(installer, "react-native-worklets-core") && !forceDisableFrameProcessors
end
#
# It's similar to:
# - [`ReactNativePodsUtils.add_compiler_flag_to_project`](https://github.com/facebook/react-native/blob/ad5213718377017ec6d2a057541f6d4c57e0507d/packages/react-native/scripts/cocoapods/utils.rb#L384)
# - [`ReactNativePodsUtils.remove_compiler_flag_from_project`](https://github.com/facebook/react-native/blob/ad5213718377017ec6d2a057541f6d4c57e0507d/packages/react-native/scripts/cocoapods/utils.rb#L395)
# but uses utils for setting "OTHER_SWIFT_FLAGS" compiler flag, which is unsupported by ReactNativePodsUtils
#
def set_swift_compiler_flag_in_project(installer, has_worklets)
projects = ReactNativePodsUtils.extract_projects(installer)
projects.each do |project|
project.build_configurations.each do |config|
flag = "-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS"
if has_worklets
set_swift_flag_in_config(config, flag)
else
remove_swift_flag_in_config(config, flag)
end
end
project.save()
end
end
#
# It's similar to [`ReactNativePodsUtils.set_flag_in_config`](https://github.com/facebook/react-native/blob/ad5213718377017ec6d2a057541f6d4c57e0507d/packages/react-native/scripts/cocoapods/utils.rb#L414),
# but supports "OTHER_SWIFT_FLAGS" compiler flag
#
def set_swift_flag_in_config(config, flag)
ReactNativePodsUtils.add_flag_for_key(config, flag, "OTHER_SWIFT_FLAGS")
end
#
# It's similar to [`ReactNativePodsUtils.remove_flag_in_config`](https://github.com/facebook/react-native/blob/ad5213718377017ec6d2a057541f6d4c57e0507d/packages/react-native/scripts/cocoapods/utils.rb#L421),
# but supports "OTHER_SWIFT_FLAGS" compiler flag
#
def remove_swift_flag_in_config(config, flag)
ReactNativePodsUtils.remove_flag_for_key(config, flag, "OTHER_SWIFT_FLAGS")
end

View File

@ -0,0 +1,17 @@
import { VisionCameraProxy, Frame } from 'react-native-vision-camera'
const plugin = VisionCameraProxy.getFrameProcessorPlugin('example_kotlin_swift_plugin')
export function exampleKotlinSwiftPlugin(frame: Frame): string[] {
'worklet'
if (plugin == null) throw new Error('Failed to load Frame Processor Plugin "example_kotlin_swift_plugin"!')
return plugin.call(frame, {
someString: 'hello!',
someBoolean: true,
someNumber: 42,
someObject: { test: 0, second: 'test' },
someArray: ['another test', 5],
}) as string[]
}