feat: Expose unified VisionCameraProxy object, make FrameProcessorPlugins object-oriented (#1660)

* feat: Replace `FrameProcessorRuntimeManager` with `VisionCameraProxy` (iOS)

* Make `FrameProcessorPlugin` a constructable HostObject

* fix: Fix `name` override

* Simplify `useFrameProcessor

* fix: Fix lint errors

* Remove FrameProcessorPlugin::name

* JSIUtils -> JSINSObjectConversion
This commit is contained in:
Marc Rousavy 2023-07-21 17:52:30 +02:00 committed by GitHub
parent 375e894038
commit 44ed42d5d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 762 additions and 607 deletions

View File

@ -10,14 +10,25 @@ while !Dir.exist?(File.join(nodeModules, "node_modules")) && tries < 10
end
nodeModules = File.join(nodeModules, "node_modules")
puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}")
forceDisableFrameProcessors = false
if defined?($VCDisableFrameProcessors)
Pod::UI.puts "[VisionCamera] $VCDisableFrameProcesors is set to #{$VCDisableFrameProcessors}!"
forceDisableFrameProcessors = $VCDisableFrameProcessors
end
forceDisableSkia = false
if defined?($VCDisableSkia)
Pod::UI.puts "[VisionCamera] $VCDisableSkia is set to #{$VCDisableSkia}!"
forceDisableSkia = $VCDisableSkia
end
Pod::UI.puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}")
workletsPath = File.join(nodeModules, "react-native-worklets")
hasWorklets = File.exist?(workletsPath)
puts "[VisionCamera] react-native-worklets #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!"
hasWorklets = File.exist?(workletsPath) && !forceDisableFrameProcessors
Pod::UI.puts("[VisionCamera] react-native-worklets #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!")
skiaPath = File.join(nodeModules, "@shopify", "react-native-skia")
hasSkia = hasWorklets && File.exist?(skiaPath)
puts "[VisionCamera] react-native-skia #{hasSkia ? "found" : "not found"}, Skia Frame Processors #{hasSkia ? "enabled" : "disabled"}!"
hasSkia = hasWorklets && File.exist?(skiaPath) && !forceDisableSkia
Pod::UI.puts("[VisionCamera] react-native-skia #{hasSkia ? "found" : "not found"}, Skia Frame Processors #{hasSkia ? "enabled" : "disabled"}!")
Pod::Spec.new do |s|
s.name = "VisionCamera"
@ -54,8 +65,9 @@ Pod::Spec.new do |s|
hasWorklets ? "ios/Frame Processor/*.{m,mm,swift}" : "",
hasWorklets ? "ios/Frame Processor/Frame.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessor.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorRuntimeManager.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorPlugin.h" : "",
hasWorklets ? "ios/Frame Processor/FrameProcessorPluginRegistry.h" : "",
hasWorklets ? "ios/Frame Processor/VisionCameraProxy.h" : "",
hasWorklets ? "cpp/**/*.{cpp}" : "",
# Skia Frame Processors

View File

@ -73,10 +73,8 @@ class PropNameIDCache {
PropNameIDCache propNameIDCache;
InvalidateCacheOnDestroy::InvalidateCacheOnDestroy(jsi::Runtime &runtime) {
key = reinterpret_cast<uintptr_t>(&runtime);
}
InvalidateCacheOnDestroy::~InvalidateCacheOnDestroy() {
void invalidateArrayBufferCache(jsi::Runtime& runtime) {
auto key = reinterpret_cast<uintptr_t>(&runtime);
propNameIDCache.invalidate(key);
}

View File

@ -74,24 +74,7 @@ struct typedArrayTypeMap<TypedArrayKind::Float64Array> {
typedef double type;
};
// Instance of this class will invalidate PropNameIDCache when destructor is called.
// Attach this object to global in specific jsi::Runtime to make sure lifecycle of
// the cache object is connected to the lifecycle of the js runtime
class InvalidateCacheOnDestroy : public jsi::HostObject {
public:
explicit InvalidateCacheOnDestroy(jsi::Runtime &runtime);
virtual ~InvalidateCacheOnDestroy();
virtual jsi::Value get(jsi::Runtime &, const jsi::PropNameID &name) {
return jsi::Value::null();
}
virtual void set(jsi::Runtime &, const jsi::PropNameID &name, const jsi::Value &value) {}
virtual std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) {
return {};
}
private:
uintptr_t key;
};
void invalidateArrayBufferCache(jsi::Runtime& runtime);
class TypedArrayBase : public jsi::Object {
public:

View File

@ -120,6 +120,36 @@ const frameProcessor = useFrameProcessor((frame) => {
}, [onQRCodeDetected])
```
### Running asynchronously
Since Frame Processors run synchronously with the Camera Pipeline, anything taking longer than one Frame interval might block the Camera from streaming new Frames. To avoid this, you can use `runAsync` to run code asynchronously on a different Thread:
```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log('I'm running synchronously at 60 FPS!')
runAsync(() => {
'worklet'
console.log('I'm running asynchronously, possibly at a lower FPS rate!')
})
}, [])
```
### Running at a throttled FPS rate
Some Frame Processor Plugins don't need to run on every Frame, for example a Frame Processor that detects the brightness in a Frame only needs to run twice per second:
```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log('I'm running synchronously at 60 FPS!')
runAtTargetFps(2, () => {
'worklet'
console.log('I'm running synchronously at 2 FPS!')
})
}, [])
```
### Using Frame Processor Plugins
Frame Processor Plugins are distributed through npm. To install the [**vision-camera-image-labeler**](https://github.com/mrousavy/vision-camera-image-labeler) plugin, run:
@ -204,7 +234,7 @@ The Frame Processor API spawns a secondary JavaScript Runtime which consumes a s
Inside your `gradle.properties` file, add the `disableFrameProcessors` flag:
```
```groovy
disableFrameProcessors=true
```
@ -212,18 +242,12 @@ Then, clean and rebuild your project.
#### iOS
Inside your `project.pbxproj`, find the `GCC_PREPROCESSOR_DEFINITIONS` group and add the flag:
Inside your `Podfile`, add the `VCDisableFrameProcessors` flag:
```txt {3}
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"VISION_CAMERA_DISABLE_FRAME_PROCESSORS=1",
"$(inherited)",
);
```ruby
$VCDisableFrameProcessors = true
```
Make sure to add this to your Debug-, as well as your Release-configuration.
</TabItem>
<TabItem value="expo">

View File

@ -12,14 +12,14 @@ import TabItem from '@theme/TabItem';
Frame Processor Plugins are **native functions** which can be directly called from a JS Frame Processor. (See ["Frame Processors"](frame-processors))
They **receive a frame from the Camera** as an input and can return any kind of output. For example, a `scanQRCodes` function returns an array of detected QR code strings in the frame:
They **receive a frame from the Camera** as an input and can return any kind of output. For example, a `detectFaces` function returns an array of detected faces in the frame:
```tsx {4-5}
function App() {
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const qrCodes = scanQRCodes(frame)
console.log(`QR Codes in Frame: ${qrCodes}`)
const faces = detectFaces(frame)
console.log(`Faces in Frame: ${faces}`)
}, [])
return (
@ -28,7 +28,7 @@ function App() {
}
```
To achieve **maximum performance**, the `scanQRCodes` function is written in a native language (e.g. Objective-C), but it will be directly called from the VisionCamera Frame Processor JavaScript-Runtime.
To achieve **maximum performance**, the `detectFaces` function is written in a native language (e.g. Objective-C), but it will be directly called from the VisionCamera Frame Processor JavaScript-Runtime.
### Types
@ -43,7 +43,7 @@ Similar to a TurboModule, the Frame Processor Plugin Registry API automatically
| `{}` | `NSDictionary*` | `ReadableNativeMap` |
| `undefined` / `null` | `nil` | `null` |
| `(any, any) => void` | [`RCTResponseSenderBlock`][4] | `(Object, Object) -> void` |
| [`Frame`][1] | [`Frame*`][2] | [`ImageProxy`][3] |
| [`Frame`][1] | [`Frame*`][2] | [`Frame`][3] |
### Return values
@ -51,7 +51,7 @@ Return values will automatically be converted to JS values, assuming they are re
```java
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(Frame frame, Object[] params) {
return "cat";
}
```
@ -66,13 +66,13 @@ export function detectObject(frame: Frame): string {
}
```
You can also manipulate the buffer and return it (or a copy of it) by returning a [`Frame`][2]/[`ImageProxy`][3] instance:
You can also manipulate the buffer and return it (or a copy of it) by returning a [`Frame`][2]/[`Frame`][3] instance:
```java
@Override
public Object callback(ImageProxy image, Object[] params) {
ImageProxy resizedImage = new ImageProxy(/* ... */);
return resizedImage;
public Object callback(Frame frame, Object[] params) {
Frame resizedFrame = new Frame(/* ... */);
return resizedFrame;
}
```
@ -97,16 +97,7 @@ Frame Processors can also accept parameters, following the same type convention
```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const codes = scanCodes(frame, ['qr', 'barcode'])
}, [])
```
Or with multiple ("variadic") parameters:
```ts
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const codes = scanCodes(frame, true, 'hello-world', 42)
const codes = scanCodes(frame, { codes: ['qr', 'barcode'] })
}, [])
```
@ -116,7 +107,7 @@ To let the user know that something went wrong you can use Exceptions:
```java
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(Frame frame, Object[] params) {
if (params[0] instanceof String) {
// ...
} else {
@ -152,13 +143,13 @@ For example, a realtime video chat application might use WebRTC to send the fram
```java
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(Frame frame, Object[] params) {
String serverURL = (String)params[0];
ImageProxy imageCopy = new ImageProxy(/* ... */);
Frame frameCopy = new Frame(/* ... */);
uploaderQueue.runAsync(() -> {
WebRTC.uploadImage(imageCopy, serverURL);
imageCopy.close();
WebRTC.uploadImage(frameCopy, serverURL);
frameCopy.close();
});
return null;
@ -195,14 +186,7 @@ This way you can handle queueing up the frames yourself and asynchronously call
### Benchmarking Frame Processor Plugins
Your Frame Processor Plugins have to be fast. VisionCamera automatically detects slow Frame Processors and outputs relevant information in the native console (Xcode: **Debug Area**, Android Studio: **Logcat**):
<div align="center">
<img src={useBaseUrl("img/slow-log.png")} width="80%" />
</div>
<div align="center">
<img src={useBaseUrl("img/slow-log-2.png")} width="80%" />
</div>
Your Frame Processor Plugins have to be fast. Use the FPS Graph (`enableFpsGraph`) to see how fast your Camera is running, if it is not running at the target FPS, your Frame Processor is too slow.
<br />

View File

@ -0,0 +1,110 @@
---
id: frame-processors-skia
title: Skia Frame Processors
sidebar_label: Skia Frame Processors
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import useBaseUrl from '@docusaurus/useBaseUrl';
<div>
<svg xmlns="http://www.w3.org/2000/svg" width="283" height="535" style={{ float: 'right' }}>
<image href={useBaseUrl("img/frame-processors.gif")} x="18" y="33" width="247" height="469" />
<image href={useBaseUrl("img/frame.png")} width="283" height="535" />
</svg>
</div>
### What are Skia Frame Processors?
Skia Frame Processors are [Frame Processors](frame-processors) that allow you to draw onto the Frame using [react-native-skia](https://github.com/Shopify/react-native-skia).
For example, you might want to draw a rectangle around a user's face **without writing any native code**, while still **achieving native performance**:
```jsx
function App() {
const frameProcessor = useSkiaFrameProcessor((frame) => {
'worklet'
const faces = detectFaces(frame)
faces.forEach((face) => {
frame.drawRect(face.rectangle, redPaint)
})
}, [])
return (
<Camera
{...cameraProps}
frameProcessor={frameProcessor}
/>
)
}
```
With Skia, you can also implement realtime filters, blurring, shaders, and much more. For example, this is how you invert the colors in a Frame:
```jsx
const INVERTED_COLORS_SHADER = `
uniform shader image;
half4 main(vec2 pos) {
vec4 color = image.eval(pos);
return vec4(1.0 - color.rgb, 1.0);
}
`;
function App() {
const imageFilter = Skia.ImageFilter.MakeRuntimeShader(/* INVERTED_COLORS_SHADER */)
const paint = Skia.Paint()
paint.setImageFilter(imageFilter)
const frameProcessor = useSkiaFrameProcessor((frame) => {
'worklet'
frame.render(paint)
}, [])
return (
<Camera
{...cameraProps}
frameProcessor={frameProcessor}
/>
)
}
```
### Rendered outputs
The rendered results of the Skia Frame Processor are rendered to an offscreen context and will be displayed in the Camera Preview, recorded to a video file (`startRecording()`) and captured in a photo (`takePhoto()`). In other words, you draw into the Frame, not just ontop of it.
### Performance
VisionCamera sets up an additional Skia rendering context which requires a few resources.
On iOS, Metal is used for GPU Acceleration. On Android, OpenGL is used for GPU Acceleration.
C++/JSI is used for highly efficient communication between JS and Skia.
### Disabling Skia Frame Processors
Skia Frame Processors ship with additional C++ files which might slightly increase the app's build time. If you're not using Skia Frame Processors at all, you can disable them:
#### Android
Inside your `gradle.properties` file, add the `disableSkia` flag:
```groovy
disableSkia=true
```
Then, clean and rebuild your project.
#### iOS
Inside your `Podfile`, add the `VCDisableSkia` flag:
```ruby
$VCDisableSkia = true
```
<br />
#### 🚀 Next section: [Zooming](/docs/guides/zooming) (or [creating a Frame Processor Plugin](/docs/guides/frame-processors-plugins-overview))

View File

@ -9,14 +9,16 @@ sidebar_label: Finish creating your Frame Processor Plugin
To make the Frame Processor Plugin available to the Frame Processor Worklet Runtime, create the following wrapper function in JS/TS:
```ts
import { FrameProcessorPlugins, Frame } from 'react-native-vision-camera'
import { VisionCameraProxy, Frame } from 'react-native-vision-camera'
const plugin = VisionCameraProxy.getFrameProcessorPlugin('scanFaces')
/**
* Scans QR codes.
* Scans faces.
*/
export function scanQRCodes(frame: Frame): string[] {
export function scanFaces(frame: Frame): object {
'worklet'
return FrameProcessorPlugins.scanQRCodes(frame)
return plugin.call(frame)
}
```
@ -28,8 +30,8 @@ Simply call the wrapper Worklet in your Frame Processor:
function App() {
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
const qrCodes = scanQRCodes(frame)
console.log(`QR Codes in Frame: ${qrCodes}`)
const faces = scanFaces(frame)
console.log(`Faces in Frame: ${faces}`)
}, [])
return (

View File

@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
## Creating a Frame Processor Plugin for Android
The Frame Processor Plugin API is built to be as extensible as possible, which allows you to create custom Frame Processor Plugins.
In this guide we will create a custom QR Code Scanner Plugin which can be used from JS.
In this guide we will create a custom Face Detector Plugin which can be used from JS.
Android Frame Processor Plugins can be written in either **Java**, **Kotlin** or **C++ (JNI)**.
@ -23,7 +23,7 @@ npx vision-camera-plugin-builder android
```
:::info
The CLI will ask you for the path to project's Android Manifest file, name of the plugin (e.g. `QRCodeFrameProcessor`), name of the exposed method (e.g. `scanQRCodes`) and language you want to use for plugin development (Java or Kotlin).
The CLI will ask you for the path to project's Android Manifest file, name of the plugin (e.g. `FaceDetectorFrameProcessorPlugin`), name of the exposed method (e.g. `detectFaces`) and language you want to use for plugin development (Java or Kotlin).
For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-plugin-builder#%EF%B8%8F-options).
:::
@ -35,7 +35,7 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
...
packages.add(new QRCodeFrameProcessorPluginPackage()); // <- add
packages.add(new FaceDetectorFrameProcessorPluginPackage()); // <- add
return packages;
}
```
@ -51,33 +51,34 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
<TabItem value="java">
1. Open your Project in Android Studio
2. Create a Java source file, for the QR Code Plugin this will be called `QRCodeFrameProcessorPlugin.java`.
2. Create a Java source file, for the Face Detector Plugin this will be called `FaceDetectorFrameProcessorPlugin.java`.
3. Add the following code:
```java {8}
import androidx.camera.core.ImageProxy;
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin;
public class QRCodeFrameProcessorPlugin extends FrameProcessorPlugin {
public class FaceDetectorFrameProcessorPlugin extends FrameProcessorPlugin {
@Override
public Object callback(ImageProxy image, Object[] params) {
public Object callback(ImageProxy image, ReadableNativeMap arguments) {
// code goes here
return null;
}
QRCodeFrameProcessorPlugin() {
super("scanQRCodes");
@Override
public String getName() {
return "detectFaces";
}
}
```
:::note
The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlugins` object using the name you pass to the `super(...)` call. In this case, it would be `FrameProcessorPlugins.scanQRCodes(...)`.
The Frame Processor Plugin will be exposed to JS through the `VisionCameraProxy` object. In this case, it would be `VisionCameraProxy.getFrameProcessorPlugin("detectFaces")`.
:::
4. **Implement your Frame Processing.** See the [Example Plugin (Java)](https://github.com/mrousavy/react-native-vision-camera/blob/main/example/android/app/src/main/java/com/mrousavy/camera/example/ExampleFrameProcessorPlugin.java) for reference.
5. Create a new Java file which registers the Frame Processor Plugin in a React Package, for the QR Code Scanner plugin this file will be called `QRCodeFrameProcessorPluginPackage.java`:
5. Create a new Java file which registers the Frame Processor Plugin in a React Package, for the Face Detector plugin this file will be called `FaceDetectorFrameProcessorPluginPackage.java`:
```java {12}
import com.facebook.react.ReactPackage;
@ -87,11 +88,11 @@ import com.facebook.react.uimanager.ViewManager;
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin;
import javax.annotation.Nonnull;
public class QRCodeFrameProcessorPluginPackage implements ReactPackage {
public class FaceDetectorFrameProcessorPluginPackage implements ReactPackage {
@NonNull
@Override
public List<NativeModule> createNativeModules(@NonNull ReactApplicationContext reactContext) {
FrameProcessorPlugin.register(new QRCodeFrameProcessorPlugin());
FrameProcessorPlugin.register(new FaceDetectorFrameProcessorPlugin());
return Collections.emptyList();
}
@ -111,7 +112,7 @@ public class QRCodeFrameProcessorPluginPackage implements ReactPackage {
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
...
packages.add(new QRCodeFrameProcessorPluginPackage()); // <- add
packages.add(new FaceDetectorFrameProcessorPluginPackage()); // <- add
return packages;
}
```
@ -120,28 +121,32 @@ public class QRCodeFrameProcessorPluginPackage implements ReactPackage {
<TabItem value="kotlin">
1. Open your Project in Android Studio
2. Create a Kotlin source file, for the QR Code Plugin this will be called `QRCodeFrameProcessorPlugin.kt`.
2. Create a Kotlin source file, for the Face Detector Plugin this will be called `FaceDetectorFrameProcessorPlugin.kt`.
3. Add the following code:
```kotlin {7}
import androidx.camera.core.ImageProxy
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
class ExampleFrameProcessorPluginKotlin: FrameProcessorPlugin("scanQRCodes") {
class FaceDetectorFrameProcessorPlugin: FrameProcessorPlugin() {
override fun callback(image: ImageProxy, params: Array<Any>): Any? {
override fun callback(image: ImageProxy, arguments: ReadableNativeMap): Any? {
// code goes here
return null
}
override fun getName(): String {
return "detectFaces"
}
}
```
:::note
The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlugins` object using the name you pass to the `FrameProcessorPlugin(...)` call. In this case, it would be `FrameProcessorPlugins.scanQRCodes(...)`.
The Frame Processor Plugin will be exposed to JS through the `VisionCameraProxy` object. In this case, it would be `VisionCameraProxy.getFrameProcessorPlugin("detectFaces")`.
:::
4. **Implement your Frame Processing.** See the [Example Plugin (Java)](https://github.com/mrousavy/react-native-vision-camera/blob/main/example/android/app/src/main/java/com/mrousavy/camera/example/ExampleFrameProcessorPlugin.java) for reference.
5. Create a new Kotlin file which registers the Frame Processor Plugin in a React Package, for the QR Code Scanner plugin this file will be called `QRCodeFrameProcessorPluginPackage.kt`:
5. Create a new Kotlin file which registers the Frame Processor Plugin in a React Package, for the Face Detector plugin this file will be called `FaceDetectorFrameProcessorPluginPackage.kt`:
```kotlin {9}
import com.facebook.react.ReactPackage
@ -150,9 +155,9 @@ import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.uimanager.ViewManager
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
class QRCodeFrameProcessorPluginPackage : ReactPackage {
class FaceDetectorFrameProcessorPluginPackage : ReactPackage {
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> {
FrameProcessorPlugin.register(ExampleFrameProcessorPluginKotlin())
FrameProcessorPlugin.register(FaceDetectorFrameProcessorPlugin())
return emptyList()
}
@ -170,7 +175,7 @@ class QRCodeFrameProcessorPluginPackage : ReactPackage {
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
...
packages.add(new QRCodeFrameProcessorPluginPackage()); // <- add
packages.add(new FaceDetectorFrameProcessorPluginPackage()); // <- add
return packages;
}
```

View File

@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem';
## Creating a Frame Processor Plugin for iOS
The Frame Processor Plugin API is built to be as extensible as possible, which allows you to create custom Frame Processor Plugins.
In this guide we will create a custom QR Code Scanner Plugin which can be used from JS.
In this guide we will create a custom Face Detector Plugin which can be used from JS.
iOS Frame Processor Plugins can be written in either **Objective-C** or **Swift**.
@ -23,7 +23,7 @@ npx vision-camera-plugin-builder ios
```
:::info
The CLI will ask you for the path to project's .xcodeproj file, name of the plugin (e.g. `QRCodeFrameProcessor`), name of the exposed method (e.g. `scanQRCodes`) and language you want to use for plugin development (Objective-C, Objective-C++ or Swift).
The CLI will ask you for the path to project's .xcodeproj file, name of the plugin (e.g. `FaceDetectorFrameProcessorPlugin`), name of the exposed method (e.g. `detectFaces`) and language you want to use for plugin development (Objective-C, Objective-C++ or Swift).
For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-plugin-builder#%EF%B8%8F-options).
:::
@ -38,23 +38,25 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
<TabItem value="objc">
1. Open your Project in Xcode
2. Create an Objective-C source file, for the QR Code Plugin this will be called `QRCodeFrameProcessorPlugin.m`.
2. Create an Objective-C source file, for the Face Detector Plugin this will be called `FaceDetectorFrameProcessorPlugin.m`.
3. Add the following code:
```objc
#import <VisionCamera/FrameProcessorPlugin.h>
#import <VisionCamera/FrameProcessorPluginRegistry.h>
#import <VisionCamera/Frame.h>
@interface QRCodeFrameProcessorPlugin : FrameProcessorPlugin
@interface FaceDetectorFrameProcessorPlugin : FrameProcessorPlugin
@end
@implementation QRCodeFrameProcessorPlugin
@implementation FaceDetectorFrameProcessorPlugin
- (NSString *)name {
return @"scanQRCodes";
- (instancetype) initWithOptions:(NSDictionary*)options; {
self = [super init];
return self;
}
- (id)callback:(Frame *)frame withArguments:(NSArray<id> *)arguments {
- (id)callback:(Frame*)frame withArguments:(NSDictionary*)arguments {
CMSampleBufferRef buffer = frame.buffer;
UIImageOrientation orientation = frame.orientation;
// code goes here
@ -62,14 +64,17 @@ For reference see the [CLI's docs](https://github.com/mateusz1913/vision-camera-
}
+ (void) load {
[self registerPlugin:[[ExampleFrameProcessorPlugin alloc] init]];
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"detectFaces"
withInitializer:^FrameProcessorPlugin*(NSDictionary* options) {
return [[FaceDetectorFrameProcessorPlugin alloc] initWithOptions:options];
}];
}
@end
```
:::note
The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlugins` object using the name returned from the `name` getter. In this case, it would be `FrameProcessorPlugins.scanQRCodes(...)`.
The Frame Processor Plugin will be exposed to JS through the `VisionCameraProxy` object. In this case, it would be `VisionCameraProxy.getFrameProcessorPlugin("detectFaces")`.
:::
4. **Implement your Frame Processing.** See the [Example Plugin (Objective-C)](https://github.com/mrousavy/react-native-vision-camera/blob/main/example/ios/Frame%20Processor%20Plugins/Example%20Plugin%20%28Objective%2DC%29) for reference.
@ -78,7 +83,7 @@ The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlug
<TabItem value="swift">
1. Open your Project in Xcode
2. Create a Swift file, for the QR Code Plugin this will be `QRCodeFrameProcessorPlugin.swift`. If Xcode asks you to create a Bridging Header, press **create**.
2. Create a Swift file, for the Face Detector Plugin this will be `FaceDetectorFrameProcessorPlugin.swift`. If Xcode asks you to create a Bridging Header, press **create**.
![Xcode "Create Bridging Header" alert](https://docs-assets.developer.apple.com/published/7ebca7212c/2a065d1a-7e53-4907-a889-b7fa4f2206c9.png)
@ -92,13 +97,9 @@ The Frame Processor Plugin will be exposed to JS through the `FrameProcessorPlug
4. In the Swift file, add the following code:
```swift
@objc(QRCodeFrameProcessorPlugin)
public class QRCodeFrameProcessorPlugin: FrameProcessorPlugin {
override public func name() -> String! {
return "scanQRCodes"
}
public override func callback(_ frame: Frame!, withArguments arguments: [Any]!) -> Any! {
@objc(FaceDetectorFrameProcessorPlugin)
public class FaceDetectorFrameProcessorPlugin: FrameProcessorPlugin {
public override func callback(_ frame: Frame!, withArguments arguments: [String:Any]) -> Any {
let buffer = frame.buffer
let orientation = frame.orientation
// code goes here
@ -107,11 +108,12 @@ public class QRCodeFrameProcessorPlugin: FrameProcessorPlugin {
}
```
5. In your `AppDelegate.m`, add the following imports (you can skip this if your AppDelegate is in Swift):
5. In your `AppDelegate.m`, add the following imports:
```objc
#import "YOUR_XCODE_PROJECT_NAME-Swift.h"
#import <VisionCamera/FrameProcessorPlugin.h>
#import <VisionCamera/FrameProcessorPluginRegistry.h>
```
6. In your `AppDelegate.m`, add the following code to `application:didFinishLaunchingWithOptions:`:
@ -121,7 +123,10 @@ public class QRCodeFrameProcessorPlugin: FrameProcessorPlugin {
{
...
[FrameProcessorPlugin registerPlugin:[[QRCodeFrameProcessorPlugin alloc] init]];
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"detectFaces"
withInitializer:^FrameProcessorPlugin*(NSDictionary* options) {
return [[FaceDetectorFrameProcessorPlugin alloc] initWithOptions:options];
}];
return [super application:application didFinishLaunchingWithOptions:launchOptions];
}

View File

@ -21,10 +21,10 @@ Before opening an issue, make sure you try the following:
npm i # or "yarn"
cd ios && pod repo update && pod update && pod install
```
2. Check your minimum iOS version. VisionCamera requires a minimum iOS version of **11.0**.
2. Check your minimum iOS version. VisionCamera requires a minimum iOS version of **12.4**.
1. Open your `Podfile`
2. Make sure `platform :ios` is set to `11.0` or higher
3. Make sure `iOS Deployment Target` is set to `11.0` or higher (`IPHONEOS_DEPLOYMENT_TARGET` in `project.pbxproj`)
2. Make sure `platform :ios` is set to `12.4` or higher
3. Make sure `iOS Deployment Target` is set to `12.4` or higher (`IPHONEOS_DEPLOYMENT_TARGET` in `project.pbxproj`)
3. Check your Swift version. VisionCamera requires a minimum Swift version of **5.2**.
1. Open `project.pbxproj` in a Text Editor
2. If the `LIBRARY_SEARCH_PATH` value is set, make sure there is no explicit reference to Swift-5.0. If there is, remove it. See [this StackOverflow answer](https://stackoverflow.com/a/66281846/1123156).
@ -35,9 +35,12 @@ Before opening an issue, make sure you try the following:
3. Select **Swift File** and press **Next**
4. Choose whatever name you want, e.g. `File.swift` and press **Create**
5. Press **Create Bridging Header** when promted.
5. If you're having runtime issues, check the logs in Xcode to find out more. In Xcode, go to **View** > **Debug Area** > **Activate Console** (<kbd>⇧</kbd>+<kbd>⌘</kbd>+<kbd>C</kbd>).
5. If you're having build issues, try:
1. Building without Skia. Set `$VCDisableSkia = true` in the top of your Podfile, and try rebuilding.
2. Building without Frame Processors. Set `$VCDisableFrameProcessors = true` in the top of your Podfile, and try rebuilding.
6. If you're having runtime issues, check the logs in Xcode to find out more. In Xcode, go to **View** > **Debug Area** > **Activate Console** (<kbd>⇧</kbd>+<kbd>⌘</kbd>+<kbd>C</kbd>).
* For errors without messages, there's often an error code attached. Look up the error code on [osstatus.com](https://www.osstatus.com) to get more information about a specific error.
6. If your Frame Processor is not running, make sure you check the native Xcode logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
7. If your Frame Processor is not running, make sure you check the native Xcode logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
## Android
@ -64,9 +67,12 @@ Before opening an issue, make sure you try the following:
```
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
```
5. If you're having runtime issues, check the logs in Android Studio/Logcat to find out more. In Android Studio, go to **View** > **Tool Windows** > **Logcat** (<kbd>⌘</kbd>+<kbd>6</kbd>) or run `adb logcat` in Terminal.
6. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information.
7. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
5. If you're having build issues, try:
1. Building without Skia. Set `disableSkia = true` in your `gradle.properties`, and try rebuilding.
2. Building without Frame Processors. Set `disableFrameProcessors = true` in your `gradle.properties`, and try rebuilding.
6. If you're having runtime issues, check the logs in Android Studio/Logcat to find out more. In Android Studio, go to **View** > **Tool Windows** > **Logcat** (<kbd>⌘</kbd>+<kbd>6</kbd>) or run `adb logcat` in Terminal.
7. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information.
8. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
## Issues

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

View File

@ -1,48 +0,0 @@
//
// ExamplePluginSwift.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.04.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import AVKit
import Vision
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc
public class ExamplePluginSwift : FrameProcessorPlugin {
override public func name() -> String! {
return "example_plugin_swift"
}
public override func callback(_ frame: Frame!, withArguments arguments: [Any]!) -> Any! {
guard let imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer) else {
return nil
}
NSLog("ExamplePlugin: \(CVPixelBufferGetWidth(imageBuffer)) x \(CVPixelBufferGetHeight(imageBuffer)) Image. Logging \(arguments.count) parameters:")
arguments.forEach { arg in
var string = "\(arg)"
if let array = arg as? NSArray {
string = (array as Array).description
} else if let map = arg as? NSDictionary {
string = (map as Dictionary).description
}
NSLog("ExamplePlugin: -> \(string) (\(type(of: arg)))")
}
return [
"example_str": "Test",
"example_bool": true,
"example_double": 5.3,
"example_array": [
"Hello",
true,
17.38,
],
]
}
}
#endif

View File

@ -8,6 +8,7 @@
#if __has_include(<VisionCamera/FrameProcessorPlugin.h>)
#import <Foundation/Foundation.h>
#import <VisionCamera/FrameProcessorPlugin.h>
#import <VisionCamera/FrameProcessorPluginRegistry.h>
#import <VisionCamera/Frame.h>
// Example for an Objective-C Frame Processor plugin
@ -17,18 +18,14 @@
@implementation ExampleFrameProcessorPlugin
- (NSString *)name {
return @"example_plugin";
}
- (id)callback:(Frame *)frame withArguments:(NSArray<id> *)arguments {
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
NSLog(@"ExamplePlugin: %zu x %zu Image. Logging %lu parameters:", CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer), (unsigned long)arguments.count);
for (id param in arguments) {
NSLog(@"ExamplePlugin: -> %@ (%@)", param == nil ? @"(nil)" : [param description], NSStringFromClass([param classForCoder]));
}
return @{
@"example_str": @"Test",
@"example_bool": @true,
@ -42,7 +39,10 @@
}
+ (void) load {
[self registerPlugin:[[ExampleFrameProcessorPlugin alloc] init]];
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"example_plugin"
withInitializer:^FrameProcessorPlugin*(NSDictionary* options) {
return [[ExampleFrameProcessorPlugin alloc] initWithOptions:options];
}];
}
@end

View File

@ -713,7 +713,7 @@ SPEC CHECKSUMS:
RNStaticSafeAreaInsets: 055ddbf5e476321720457cdaeec0ff2ba40ec1b8
RNVectorIcons: fcc2f6cb32f5735b586e66d14103a74ce6ad61f8
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
VisionCamera: b4e91836f577249470ae42707782f4b44d875cd9
VisionCamera: 29727c3ed48328b246e3a7448f7c14cc12d2fd11
Yoga: 65286bb6a07edce5e4fe8c90774da977ae8fc009
PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c

View File

@ -13,7 +13,6 @@
81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; };
B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */ = {isa = PBXBuildFile; };
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */; };
B8DB3BDD263DEA31004C18D7 /* ExamplePluginSwift.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BDA263DEA31004C18D7 /* ExamplePluginSwift.swift */; };
B8F0E10825E0199F00586F16 /* File.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F0E10725E0199F00586F16 /* File.swift */; };
C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */; };
/* End PBXBuildFile section */
@ -29,7 +28,6 @@
3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-VisionCameraExample.a"; sourceTree = BUILT_PRODUCTS_DIR; };
81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = VisionCameraExample/LaunchScreen.storyboard; sourceTree = "<group>"; };
B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ExampleFrameProcessorPlugin.m; sourceTree = "<group>"; };
B8DB3BDA263DEA31004C18D7 /* ExamplePluginSwift.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ExamplePluginSwift.swift; sourceTree = "<group>"; };
B8F0E10625E0199F00586F16 /* VisionCameraExample-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "VisionCameraExample-Bridging-Header.h"; sourceTree = "<group>"; };
B8F0E10725E0199F00586F16 /* File.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = File.swift; sourceTree = "<group>"; };
C1D342AD8210E7627A632602 /* Pods-VisionCameraExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-VisionCameraExample.debug.xcconfig"; path = "Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample.debug.xcconfig"; sourceTree = "<group>"; };
@ -118,26 +116,17 @@
B8DB3BD6263DEA31004C18D7 /* Frame Processor Plugins */ = {
isa = PBXGroup;
children = (
B8DB3BD7263DEA31004C18D7 /* Example Plugin (Objective-C) */,
B8DB3BD9263DEA31004C18D7 /* Example Plugin (Swift) */,
B8DB3BD7263DEA31004C18D7 /* Example Plugin */,
);
path = "Frame Processor Plugins";
sourceTree = "<group>";
};
B8DB3BD7263DEA31004C18D7 /* Example Plugin (Objective-C) */ = {
B8DB3BD7263DEA31004C18D7 /* Example Plugin */ = {
isa = PBXGroup;
children = (
B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */,
);
path = "Example Plugin (Objective-C)";
sourceTree = "<group>";
};
B8DB3BD9263DEA31004C18D7 /* Example Plugin (Swift) */ = {
isa = PBXGroup;
children = (
B8DB3BDA263DEA31004C18D7 /* ExamplePluginSwift.swift */,
);
path = "Example Plugin (Swift)";
path = "Example Plugin";
sourceTree = "<group>";
};
/* End PBXGroup section */
@ -381,7 +370,6 @@
13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */,
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */,
B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */,
B8DB3BDD263DEA31004C18D7 /* ExamplePluginSwift.swift in Sources */,
B8F0E10825E0199F00586F16 /* File.swift in Sources */,
13B07FC11A68108700A75B9A /* main.m in Sources */,
);

View File

@ -1,16 +1,17 @@
import { FrameProcessorPlugins, Frame } from 'react-native-vision-camera';
import { VisionCameraProxy, Frame } from 'react-native-vision-camera';
export function examplePluginSwift(frame: Frame): string[] {
'worklet';
// @ts-expect-error because this function is dynamically injected by VisionCamera
return FrameProcessorPlugins.example_plugin_swift(frame, 'hello!', 'parameter2', true, 42, { test: 0, second: 'test' }, [
'another test',
5,
]);
}
const plugin = VisionCameraProxy.getFrameProcessorPlugin('example_plugin');
export function examplePlugin(frame: Frame): string[] {
'worklet';
// @ts-expect-error because this function is dynamically injected by VisionCamera
return FrameProcessorPlugins.example_plugin(frame, 'hello!', 'parameter2', true, 42, { test: 0, second: 'test' }, ['another test', 5]);
if (plugin == null) throw new Error('Failed to load Frame Processor Plugin "example_plugin"!');
return plugin.call(frame, {
someString: 'hello!',
someBoolean: true,
someNumber: 42,
someObject: { test: 0, second: 'test' },
someArray: ['another test', 5],
}) as string[];
}

View File

@ -17,6 +17,6 @@
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import "FrameProcessor.h"
#import "FrameProcessorRuntimeManager.h"
#import "Frame.h"
#import "VisionCameraProxy.h"
#endif

View File

@ -13,10 +13,6 @@ import Foundation
final class CameraViewManager: RCTViewManager {
// pragma MARK: Properties
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
private var runtimeManager: FrameProcessorRuntimeManager?
#endif
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
@ -34,10 +30,9 @@ final class CameraViewManager: RCTViewManager {
@objc
final func installFrameProcessorBindings() -> NSNumber {
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Runs on JS Thread
runtimeManager = FrameProcessorRuntimeManager()
runtimeManager!.installFrameProcessorBindings()
return true as NSNumber
// Called on JS Thread (blocking sync method)
let result = VisionCameraInstaller.install(to: bridge)
return NSNumber(value: result)
#else
return false as NSNumber
#endif

View File

@ -22,8 +22,8 @@
@interface FrameProcessor : NSObject
#ifdef __cplusplus
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet;
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context;
- (void)callWithFrameHostObject:(std::shared_ptr<FrameHostObject>)frameHostObject;
#endif

View File

@ -21,11 +21,11 @@ using namespace facebook;
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
}
- (instancetype)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet {
- (instancetype)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context {
if (self = [super init]) {
_workletContext = context;
_workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
_workletContext = context;
}
return self;
}

View File

@ -15,18 +15,24 @@
///
/// Subclass this class in a Swift or Objective-C class and override the `callback:withArguments:` method, and
/// implement your Frame Processing there.
/// Then, in your App's startup (AppDelegate.m), call `FrameProcessorPluginBase.registerPlugin(YourNewPlugin())`
///
/// Use `[FrameProcessorPluginRegistry addFrameProcessorPlugin:]` to register the Plugin to the VisionCamera Runtime.
@interface FrameProcessorPlugin : NSObject
/// Get the name of the Frame Processor Plugin.
/// This will be exposed to JS under the `FrameProcessorPlugins` Proxy object.
- (NSString * _Nonnull)name;
/// The actual callback when calling this plugin. Any Frame Processing should be handled there.
/// Make sure your code is optimized, as this is a hot path.
- (id _Nullable) callback:(Frame* _Nonnull)frame withArguments:(NSArray<id>* _Nullable)arguments;
/// Register the given plugin in the Plugin Registry. This should be called on App Startup.
+ (void) registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
- (id _Nullable) callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments;
@end
// Base implementation (empty)
@implementation FrameProcessorPlugin
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin does not override the `callback(frame:withArguments:)` method!"];
return nil;
}
@end

View File

@ -1,31 +0,0 @@
//
// FrameProcessorPlugin.m
// VisionCamera
//
// Created by Marc Rousavy on 24.02.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "FrameProcessorPlugin.h"
#import "FrameProcessorPluginRegistry.h"
@implementation FrameProcessorPlugin
- (NSString *)name {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin \"%@\" does not override the `name` getter!", [self name]];
return nil;
}
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSArray<id>* _Nullable)arguments {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin \"%@\" does not override the `callback(frame:withArguments:)` method!", [self name]];
return nil;
}
+ (void)registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin {
[FrameProcessorPluginRegistry addFrameProcessorPlugin:plugin];
}
@end

View File

@ -0,0 +1,32 @@
//
// FrameProcessorPluginHostObject.h
// VisionCamera
//
// Created by Marc Rousavy on 21.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <jsi/jsi.h>
#import "FrameProcessorPlugin.h"
#import <memory>
#import <ReactCommon/CallInvoker.h>
using namespace facebook;
class FrameProcessorPluginHostObject: public jsi::HostObject {
public:
explicit FrameProcessorPluginHostObject(FrameProcessorPlugin* plugin,
std::shared_ptr<react::CallInvoker> callInvoker):
_plugin(plugin), _callInvoker(callInvoker) { }
~FrameProcessorPluginHostObject() { }
public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
private:
FrameProcessorPlugin* _plugin;
std::shared_ptr<react::CallInvoker> _callInvoker;
};

View File

@ -0,0 +1,52 @@
//
// FrameProcessorPluginHostObject.mm
// VisionCamera
//
// Created by Marc Rousavy on 21.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "FrameProcessorPluginHostObject.h"
#import <Foundation/Foundation.h>
#import <vector>
#import "FrameHostObject.h"
#import "JSINSObjectConversion.h"
std::vector<jsi::PropNameID> FrameProcessorPluginHostObject::getPropertyNames(jsi::Runtime& runtime) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("call")));
return result;
}
jsi::Value FrameProcessorPluginHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "call") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "call"),
2,
[=](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
// Frame is first argument
auto frameHostObject = arguments[0].asObject(runtime).asHostObject<FrameHostObject>(runtime);
Frame* frame = frameHostObject->frame;
// Options are second argument (possibly undefined)
NSDictionary* options = nil;
if (count > 1) {
auto optionsObject = arguments[1].asObject(runtime);
options = JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, optionsObject, _callInvoker);
}
// Call actual Frame Processor Plugin
id result = [_plugin callback:frame withArguments:nil];
// Convert result value to jsi::Value (possibly undefined)
return JSINSObjectConversion::convertObjCObjectToJSIValue(runtime, result);
});
}
return jsi::Value::undefined();
}

View File

@ -14,7 +14,12 @@
@interface FrameProcessorPluginRegistry : NSObject
+ (NSMutableDictionary<NSString*, FrameProcessorPlugin*>*)frameProcessorPlugins;
+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
typedef FrameProcessorPlugin* _Nonnull (^PluginInitializerFunction)(NSDictionary* _Nullable options);
+ (void)addFrameProcessorPlugin:(NSString* _Nonnull)name
withInitializer:(PluginInitializerFunction _Nonnull)pluginInitializer;
+ (FrameProcessorPlugin* _Nullable)getPlugin:(NSString* _Nonnull)name
withOptions:(NSDictionary* _Nullable)options;
@end

View File

@ -1,5 +1,5 @@
//
// FrameProcessorPluginRegistry.mm
// FrameProcessorPluginRegistry.m
// VisionCamera
//
// Created by Marc Rousavy on 24.03.21.
@ -11,19 +11,28 @@
@implementation FrameProcessorPluginRegistry
+ (NSMutableDictionary<NSString*, FrameProcessorPlugin*>*)frameProcessorPlugins {
static NSMutableDictionary<NSString*, FrameProcessorPlugin*>* plugins = nil;
+ (NSMutableDictionary<NSString*, PluginInitializerFunction>*)frameProcessorPlugins {
static NSMutableDictionary<NSString*, PluginInitializerFunction>* plugins = nil;
if (plugins == nil) {
plugins = [[NSMutableDictionary alloc] init];
}
return plugins;
}
+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin*)plugin {
BOOL alreadyExists = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:plugin.name] != nil;
NSAssert(!alreadyExists, @"Tried to add a Frame Processor Plugin with a name that already exists! Either choose unique names, or remove the unused plugin. Name: %@", plugin.name);
+ (void) addFrameProcessorPlugin:(NSString *)name withInitializer:(PluginInitializerFunction)pluginInitializer {
BOOL alreadyExists = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:name] != nil;
NSAssert(!alreadyExists, @"Tried to add a Frame Processor Plugin with a name that already exists! Either choose unique names, or remove the unused plugin. Name: %@", name);
[[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:plugin forKey:plugin.name];
[[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:pluginInitializer forKey:name];
}
+ (FrameProcessorPlugin*)getPlugin:(NSString* _Nonnull)name withOptions:(NSDictionary* _Nullable)options {
PluginInitializerFunction initializer = [[FrameProcessorPluginRegistry frameProcessorPlugins] objectForKey:name];
if (initializer == nil) {
return nil;
}
return initializer(options);
}
@end

View File

@ -1,18 +0,0 @@
//
// FrameProcessorRuntimeManager.h
// VisionCamera
//
// Created by Marc Rousavy on 23.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
@interface FrameProcessorRuntimeManager : NSObject
- (void) installFrameProcessorBindings;
@end

View File

@ -1,203 +0,0 @@
//
// FrameProcessorRuntimeManager.m
// VisionCamera
//
// Created by Marc Rousavy on 23.03.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "FrameProcessorRuntimeManager.h"
#import "FrameProcessorPluginRegistry.h"
#import "FrameProcessorPlugin.h"
#import "FrameProcessor.h"
#import "FrameHostObject.h"
#import <memory>
#import <React/RCTBridge.h>
#import <ReactCommon/RCTTurboModule.h>
#import <React/RCTBridge+Private.h>
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#import "WKTJsiWorkletContext.h"
#import "WKTJsiWorklet.h"
#import "../React Utils/JSIUtils.h"
#import "../../cpp/JSITypedArray.h"
#if VISION_CAMERA_ENABLE_SKIA
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Forward declarations for the Swift classes
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues : NSObject
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView : UIView
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
@end
@implementation FrameProcessorRuntimeManager {
// Separate Camera Worklet Context
std::shared_ptr<RNWorklet::JsiWorkletContext> workletContext;
}
- (void) setupWorkletContext:(jsi::Runtime&)runtime {
NSLog(@"FrameProcessorBindings: Creating Worklet Context...");
auto callInvoker = RCTBridge.currentBridge.jsCallInvoker;
auto runOnJS = [callInvoker](std::function<void()>&& f) {
// Run on React JS Runtime
callInvoker->invokeAsync(std::move(f));
};
auto runOnWorklet = [](std::function<void()>&& f) {
// Run on Frame Processor Worklet Runtime
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)](){
f();
});
};
workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
&runtime,
runOnJS,
runOnWorklet);
NSLog(@"FrameProcessorBindings: Worklet Context Created!");
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugins...");
jsi::Object frameProcessorPlugins(runtime);
// Iterate through all registered plugins (+init)
for (NSString* pluginKey in [FrameProcessorPluginRegistry frameProcessorPlugins]) {
auto pluginName = [pluginKey UTF8String];
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugin \"%s\"...", pluginName);
// Get the Plugin
FrameProcessorPlugin* plugin = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey];
// Create the JSI host function
auto function = [plugin, callInvoker](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
// Get the first parameter, which is always the native Frame Host Object.
auto frameHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
auto frame = static_cast<FrameHostObject*>(frameHostObject.get());
// Convert any additional parameters to the Frame Processor to ObjC objects
auto args = convertJSICStyleArrayToNSArray(runtime,
arguments + 1, // start at index 1 since first arg = Frame
count - 1, // use smaller count
callInvoker);
// Call the FP Plugin, which might return something.
id result = [plugin callback:frame->frame withArguments:args];
// Convert the return value (or null) to a JS Value and return it to JS
return convertObjCObjectToJSIValue(runtime, result);
};
// Assign it to the Proxy.
// A FP Plugin called "example_plugin" can be now called from JS using "FrameProcessorPlugins.example_plugin(frame)"
frameProcessorPlugins.setProperty(runtime,
pluginName,
jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forAscii(runtime, pluginName),
1, // frame
function));
}
// global.FrameProcessorPlugins Proxy
runtime.global().setProperty(runtime, "FrameProcessorPlugins", frameProcessorPlugins);
NSLog(@"FrameProcessorBindings: Frame Processor plugins installed!");
}
- (void) installFrameProcessorBindings {
NSLog(@"FrameProcessorBindings: Installing Frame Processor Bindings for Bridge...");
RCTCxxBridge *cxxBridge = (RCTCxxBridge *)[RCTBridge currentBridge];
if (!cxxBridge.runtime) {
return;
}
jsi::Runtime& jsiRuntime = *(jsi::Runtime*)cxxBridge.runtime;
// HostObject that attaches the cache to the lifecycle of the Runtime. On Runtime destroy, we destroy the cache.
auto propNameCacheObject = std::make_shared<vision::InvalidateCacheOnDestroy>(jsiRuntime);
jsiRuntime.global().setProperty(jsiRuntime,
"__visionCameraArrayBufferCache",
jsi::Object::createFromHostObject(jsiRuntime, propNameCacheObject));
// Install the Worklet Runtime in the main React JS Runtime
[self setupWorkletContext:jsiRuntime];
NSLog(@"FrameProcessorBindings: Installing global functions...");
// setFrameProcessor(viewTag: number, frameProcessor: (frame: Frame) => void)
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
auto viewTag = arguments[0].asNumber();
auto object = arguments[1].asObject(runtime);
auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, object.getProperty(runtime, "frameProcessor"));
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
if (frameProcessorType == "frame-processor") {
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:self->workletContext
worklet:worklet];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:self->workletContext
worklet:worklet
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
});
return jsi::Value::undefined();
};
jsiRuntime.global().setProperty(jsiRuntime, "setFrameProcessor", jsi::Function::createFromHostFunction(jsiRuntime,
jsi::PropNameID::forAscii(jsiRuntime, "setFrameProcessor"),
2, // viewTag, frameProcessor
setFrameProcessor));
// unsetFrameProcessor(viewTag: number)
auto unsetFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Removing frame processor...");
auto viewTag = arguments[0].asNumber();
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
if (!currentBridge) return;
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
view.frameProcessor = nil;
});
return jsi::Value::undefined();
};
jsiRuntime.global().setProperty(jsiRuntime, "unsetFrameProcessor", jsi::Function::createFromHostFunction(jsiRuntime,
jsi::PropNameID::forAscii(jsiRuntime, "unsetFrameProcessor"),
1, // viewTag
unsetFrameProcessor));
NSLog(@"FrameProcessorBindings: Finished installing bindings.");
}
@end

View File

@ -1,5 +1,5 @@
//
// JSIUtils.h
// JSINSObjectConversion.h
// VisionCamera
//
// Created by Marc Rousavy on 30.04.21.
@ -12,6 +12,8 @@
#import <ReactCommon/CallInvoker.h>
#import <React/RCTBridgeModule.h>
namespace JSINSObjectConversion {
using namespace facebook;
using namespace facebook::react;
@ -53,3 +55,5 @@ id convertJSIValueToObjCObject(jsi::Runtime& runtime, const jsi::Value& value, s
// (any...) => any -> (void)(id, id)
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime& runtime, const jsi::Function& value, std::shared_ptr<CallInvoker> jsInvoker);
} // namespace JSINSObjectConversion

View File

@ -1,5 +1,5 @@
//
// JSIUtils.mm
// JSINSObjectConversion.mm
// VisionCamera
//
// Forked and Adjusted by Marc Rousavy on 02.05.21.
@ -14,7 +14,7 @@
// LICENSE file in the root directory of this source tree.
//
#import "JSIUtils.h"
#import "JSINSObjectConversion.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import <ReactCommon/CallInvoker.h>

View File

@ -0,0 +1,44 @@
//
// VisionCameraProxy.h
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <React/RCTBridge.h>
#ifdef __cplusplus
#import <jsi/jsi.h>
#import "WKTJsiWorkletContext.h"
#import <ReactCommon/CallInvoker.h>
using namespace facebook;
class VisionCameraProxy: public jsi::HostObject {
public:
explicit VisionCameraProxy(jsi::Runtime& runtime,
std::shared_ptr<react::CallInvoker> callInvoker);
~VisionCameraProxy();
public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
private:
void setFrameProcessor(jsi::Runtime& runtime, int viewTag, const jsi::Object& frameProcessor);
void removeFrameProcessor(jsi::Runtime& runtime, int viewTag);
jsi::Value getFrameProcessorPlugin(jsi::Runtime& runtime, std::string name, const jsi::Object& options);
private:
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
std::shared_ptr<react::CallInvoker> _callInvoker;
};
#endif
@interface VisionCameraInstaller : NSObject
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge;
@end

View File

@ -0,0 +1,211 @@
//
// VisionCameraProxy.mm
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "VisionCameraProxy.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import "FrameProcessorPluginRegistry.h"
#import "FrameProcessorPluginHostObject.h"
#import "FrameProcessor.h"
#import "FrameHostObject.h"
#import "JSINSObjectConversion.h"
#import "../../cpp/JSITypedArray.h"
#import "WKTJsiWorklet.h"
#import <React/RCTUtils.h>
#import <React/RCTBridge.h>
#import <React/RCTBridge+Private.h>
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#if VISION_CAMERA_ENABLE_SKIA
#import "SkiaRenderer.h"
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Swift forward-declarations
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues: NSObject
@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView: UIView
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
#if VISION_CAMERA_ENABLE_SKIA
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
#endif
@end
using namespace facebook;
VisionCameraProxy::VisionCameraProxy(jsi::Runtime& runtime,
std::shared_ptr<react::CallInvoker> callInvoker) {
_callInvoker = callInvoker;
NSLog(@"VisionCameraProxy: Creating Worklet Context...");
auto runOnJS = [callInvoker](std::function<void()>&& f) {
// Run on React JS Runtime
callInvoker->invokeAsync(std::move(f));
};
auto runOnWorklet = [](std::function<void()>&& f) {
// Run on Frame Processor Worklet Runtime
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)](){
f();
});
};
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera",
&runtime,
runOnJS,
runOnWorklet);
NSLog(@"VisionCameraProxy: Worklet Context Created!");
}
VisionCameraProxy::~VisionCameraProxy() {
NSLog(@"VisionCameraProxy: Destroying context...");
// Destroy ArrayBuffer cache for both the JS and the Worklet Runtime.
vision::invalidateArrayBufferCache(*_workletContext->getJsRuntime());
vision::invalidateArrayBufferCache(_workletContext->getWorkletRuntime());
}
std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& runtime) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("isSkiaEnabled")));
return result;
}
void VisionCameraProxy::setFrameProcessor(jsi::Runtime& runtime, int viewTag, const jsi::Object& object) {
auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(runtime, object.getProperty(runtime, "frameProcessor"));
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
if (frameProcessorType == "frame-processor") {
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:worklet
context:_workletContext];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:worklet
context:_workletContext
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
});
}
void VisionCameraProxy::removeFrameProcessor(jsi::Runtime& runtime, int viewTag) {
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast<CameraView*>(anonymousView);
view.frameProcessor = nil;
});
}
jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime, std::string name, const jsi::Object& options) {
NSString* key = [NSString stringWithUTF8String:name.c_str()];
NSDictionary* optionsObjc = JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, options, _callInvoker);
FrameProcessorPlugin* plugin = [FrameProcessorPluginRegistry getPlugin:key withOptions:optionsObjc];
if (plugin == nil) {
return jsi::Value::undefined();
}
auto pluginHostObject = std::make_shared<FrameProcessorPluginHostObject>(plugin, _callInvoker);
return jsi::Object::createFromHostObject(runtime, pluginHostObject);
}
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "isSkiaEnabled") {
#ifdef VISION_CAMERA_ENABLE_SKIA
return jsi::Value(true);
#else
return jsi::Value(false);
#endif
}
if (name == "setFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"),
1,
[this](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto viewTag = arguments[0].asNumber();
auto object = arguments[1].asObject(runtime);
this->setFrameProcessor(runtime, static_cast<int>(viewTag), object);
return jsi::Value::undefined();
});
}
if (name == "removeFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "removeFrameProcessor"),
1,
[this](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto viewTag = arguments[0].asNumber();
this->removeFrameProcessor(runtime, static_cast<int>(viewTag));
return jsi::Value::undefined();
});
}
if (name == "getFrameProcessorPlugin") {
return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "getFrameProcessorPlugin"),
1,
[this](jsi::Runtime& runtime,
const jsi::Value& thisValue,
const jsi::Value* arguments,
size_t count) -> jsi::Value {
if (count != 1 || !arguments[0].isString()) {
throw jsi::JSError(runtime, "First argument needs to be a string (pluginName)!");
}
auto pluginName = arguments[0].asString(runtime).utf8(runtime);
auto options = count > 1 ? arguments[1].asObject(runtime) : jsi::Object(runtime);
return this->getFrameProcessorPlugin(runtime, pluginName, options);
});
}
return jsi::Value::undefined();
}
@implementation VisionCameraInstaller
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge {
RCTCxxBridge* cxxBridge = (RCTCxxBridge*)[RCTBridge currentBridge];
if (!cxxBridge.runtime) {
return NO;
}
jsi::Runtime& runtime = *(jsi::Runtime*)cxxBridge.runtime;
// global.VisionCameraProxy
auto visionCameraProxy = std::make_shared<VisionCameraProxy>(runtime, bridge.jsCallInvoker);
runtime.global().setProperty(runtime,
"VisionCameraProxy",
jsi::Object::createFromHostObject(runtime, visionCameraProxy));
return YES;
}
@end

View File

@ -19,8 +19,8 @@
@interface SkiaFrameProcessor: FrameProcessor
#ifdef __cplusplus
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer;
#endif

View File

@ -25,11 +25,11 @@ using namespace facebook;
std::shared_ptr<RNSkia::JsiSkCanvas> _skiaCanvas;
}
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
worklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer {
if (self = [super initWithWorklet:context
worklet:worklet]) {
if (self = [super initWithWorklet:worklet
context:context]) {
_skiaRenderer = skiaRenderer;
auto platformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(context->getJsRuntime(),
RCTBridge.currentBridge);

View File

@ -54,8 +54,7 @@
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */; };
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
@ -80,13 +79,15 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderContext.h; sourceTree = "<group>"; };
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewView.swift; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
@ -100,7 +101,6 @@
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
@ -137,11 +137,9 @@
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; };
B89A28742A68795E0092207F /* SkiaRenderer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaRenderer.mm; sourceTree = "<group>"; };
B89A28752A68796A0092207F /* SkiaRenderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderer.h; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaFrameProcessor.h; sourceTree = "<group>"; };
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaFrameProcessor.mm; sourceTree = "<group>"; };
@ -151,6 +149,8 @@
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = DrawableFrameHostObject.mm; sourceTree = "<group>"; };
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DrawableFrameHostObject.h; sourceTree = "<group>"; };
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; };
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
@ -237,8 +237,6 @@
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
B887517025E0102000DB86D6 /* Promise.swift */,
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
B81D41EF263C86F900B041FD /* JSIUtils.h */,
B8994E6B263F03E100069589 /* JSIUtils.mm */,
);
path = "React Utils";
sourceTree = "<group>";
@ -273,12 +271,15 @@
B8F7DDD1266F715D00120533 /* Frame.m */,
B84760A22608EE38004C3180 /* FrameHostObject.h */,
B84760A52608EE7C004C3180 /* FrameHostObject.mm */,
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */,
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */,
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */,
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */,
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */,
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */,
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */,
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */,
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */,
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */,
);
path = "Frame Processor";
sourceTree = "<group>";
@ -436,7 +437,6 @@
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */,
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
@ -450,7 +450,7 @@
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */,
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */,
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */,

View File

@ -1,16 +1,16 @@
import React from 'react';
import { requireNativeComponent, NativeSyntheticEvent, findNodeHandle, NativeMethods, Platform } from 'react-native';
import type { VideoFileType } from '.';
import type { CameraDevice } from './CameraDevice';
import type { ErrorWithCause } from './CameraError';
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
import type { CameraProps, FrameProcessor } from './CameraProps';
import { assertFrameProcessorsAvailable, assertJSIAvailable } from './JSIHelper';
import { assertJSIAvailable } from './JSIHelper';
import { CameraModule } from './NativeCameraModule';
import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
import type { Point } from './Point';
import type { TakeSnapshotOptions } from './Snapshot';
import type { CameraVideoCodec, RecordVideoOptions, VideoFile } from './VideoFile';
import type { CameraVideoCodec, RecordVideoOptions, VideoFile, VideoFileType } from './VideoFile';
import { VisionCameraProxy } from './FrameProcessorPlugins';
//#region Types
export type CameraPermissionStatus = 'authorized' | 'not-determined' | 'denied' | 'restricted';
@ -82,7 +82,7 @@ export class Camera extends React.PureComponent<CameraProps> {
this.lastFrameProcessor = undefined;
}
private get handle(): number | null {
private get handle(): number {
const nodeHandle = findNodeHandle(this.ref.current);
if (nodeHandle == null || nodeHandle === -1) {
throw new CameraRuntimeError(
@ -312,7 +312,8 @@ export class Camera extends React.PureComponent<CameraProps> {
public static installFrameProcessorBindings(): void {
assertJSIAvailable();
const result = CameraModule.installFrameProcessorBindings() as unknown;
if (result !== true) throw new Error('Failed to install Frame Processor JSI bindings!');
if (result !== true)
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Failed to install Frame Processor JSI bindings!');
}
/**
@ -418,15 +419,11 @@ export class Camera extends React.PureComponent<CameraProps> {
//#region Lifecycle
private setFrameProcessor(frameProcessor: FrameProcessor): void {
assertFrameProcessorsAvailable();
// @ts-expect-error JSI functions aren't typed
global.setFrameProcessor(this.handle, frameProcessor);
VisionCameraProxy.setFrameProcessor(this.handle, frameProcessor);
}
private unsetFrameProcessor(): void {
assertFrameProcessorsAvailable();
// @ts-expect-error JSI functions aren't typed
global.unsetFrameProcessor(this.handle);
VisionCameraProxy.removeFrameProcessor(this.handle);
}
private onViewReady(): void {

View File

@ -1,26 +1,50 @@
import type { Frame, FrameInternal } from './Frame';
import type { FrameProcessor } from './CameraProps';
import { Camera } from './Camera';
import { Worklets } from 'react-native-worklets/src';
import { CameraRuntimeError } from './CameraError';
type BasicParameterType = string | number | boolean | undefined;
type ParameterType = BasicParameterType | BasicParameterType[] | Record<string, BasicParameterType | undefined>;
interface FrameProcessorPlugin {
/**
* Call the native Frame Processor Plugin with the given Frame and options.
* @param frame The Frame from the Frame Processor.
* @param options (optional) Additional options. Options will be converted to a native dictionary
* @returns (optional) A value returned from the native Frame Processor Plugin (or undefined)
*/
call: (frame: Frame, options?: Record<string, ParameterType>) => ParameterType;
}
interface TVisionCameraProxy {
setFrameProcessor: (viewTag: number, frameProcessor: FrameProcessor) => void;
removeFrameProcessor: (viewTag: number) => void;
/**
* Creates a new instance of a Frame Processor Plugin.
* The Plugin has to be registered on the native side, otherwise this returns `undefined`
*/
getFrameProcessorPlugin: (name: string) => FrameProcessorPlugin | undefined;
isSkiaEnabled: boolean;
}
// Install VisionCamera Frame Processor JSI Bindings and Plugins
Camera.installFrameProcessorBindings();
// @ts-expect-error global is untyped, it's a C++ host-object
export const VisionCameraProxy = global.VisionCameraProxy as TVisionCameraProxy;
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (VisionCameraProxy == null) {
throw new CameraRuntimeError(
'system/frame-processors-unavailable',
'Failed to install VisionCameraProxy. Are Frame Processors properly enabled?',
);
}
declare global {
// eslint-disable-next-line no-var
var __frameProcessorRunAtTargetFpsMap: Record<string, number | undefined> | undefined;
}
type BasicParameterType = string | number | boolean | undefined;
type ParameterType = BasicParameterType | BasicParameterType[] | Record<string, BasicParameterType | undefined>;
type FrameProcessor = (frame: Frame, parameters?: Record<string, ParameterType | undefined>) => unknown;
type TFrameProcessorPlugins = Record<string, FrameProcessor>;
/**
* All natively installed Frame Processor Plugins.
*/
// @ts-expect-error The global JSI Proxy object is not typed.
export const FrameProcessorPlugins = global.FrameProcessorPlugins as TFrameProcessorPlugins;
function getLastFrameProcessorCall(frameProcessorFuncId: string): number {
'worklet';
return global.__frameProcessorRunAtTargetFpsMap?.[frameProcessorFuncId] ?? 0;

View File

@ -10,15 +10,3 @@ export function assertJSIAvailable(): void {
);
}
}
export function assertFrameProcessorsAvailable(): void {
assertJSIAvailable();
// @ts-expect-error JSI functions aren't typed
if (global.setFrameProcessor == null || global.unsetFrameProcessor == null) {
throw new CameraRuntimeError(
'system/frame-processors-unavailable',
'Frame Processors are not enabled. See https://mrousavy.github.io/react-native-vision-camera/docs/guides/troubleshooting',
);
}
}

View File

@ -1,28 +1,13 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { ConfigPlugin, withXcodeProject, XcodeProject } from '@expo/config-plugins';
import { ConfigPlugin, withPodfileProperties } from '@expo/config-plugins';
/**
* Set the `disableFrameProcessors` inside of the XcodeProject.
* This is used to disable frame processors if you don't need it on iOS. (will save CPU and Memory)
*/
export const withDisableFrameProcessorsIOS: ConfigPlugin = (c) => {
return withXcodeProject(c, (config) => {
const xcodeProject: XcodeProject = config.modResults;
const configurations = xcodeProject.pbxXCBuildConfigurationSection();
const inheritKey = '"$(inherited)"';
const valueKey = '"VISION_CAMERA_DISABLE_FRAME_PROCESSORS=1"';
for (const key in configurations) {
const buildSettings = configurations[key].buildSettings;
if (buildSettings == null) continue;
const preprocessorDefinitions = (buildSettings.GCC_PREPROCESSOR_DEFINITIONS ?? [inheritKey]) as string[];
if (!preprocessorDefinitions.includes(valueKey)) preprocessorDefinitions.push(valueKey);
buildSettings.GCC_PREPROCESSOR_DEFINITIONS = preprocessorDefinitions;
}
return withPodfileProperties(c, (config) => {
// TODO: Implement Podfile writing
config.ios = config.ios;
return config;
});
};

View File

@ -4,6 +4,25 @@ import { FrameProcessor } from '../CameraProps';
// Install RN Worklets by importing it
import 'react-native-worklets/src';
export function createFrameProcessor(frameProcessor: FrameProcessor['frameProcessor'], type: FrameProcessor['type']): FrameProcessor {
return {
frameProcessor: (frame: Frame | DrawableFrame) => {
'worklet';
// Increment ref-count by one
(frame as FrameInternal).incrementRefCount();
try {
// Call sync frame processor
// @ts-expect-error the frame type is ambiguous here
frameProcessor(frame);
} finally {
// Potentially delete Frame if we were the last ref (no runAsync)
(frame as FrameInternal).decrementRefCount();
}
},
type: type,
};
}
/**
* Returns a memoized Frame Processor function wich you can pass to the `<Camera>`.
* (See ["Frame Processors"](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors))
@ -23,25 +42,8 @@ import 'react-native-worklets/src';
* ```
*/
export function useFrameProcessor(frameProcessor: (frame: Frame) => void, dependencies: DependencyList): FrameProcessor {
return useMemo(
() => ({
frameProcessor: (frame: Frame) => {
'worklet';
// Increment ref-count by one
(frame as FrameInternal).incrementRefCount();
try {
// Call sync frame processor
frameProcessor(frame);
} finally {
// Potentially delete Frame if we were the last ref (no runAsync)
(frame as FrameInternal).decrementRefCount();
}
},
type: 'frame-processor',
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
dependencies,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
return useMemo(() => createFrameProcessor(frameProcessor, 'frame-processor'), dependencies);
}
/**
@ -65,23 +67,6 @@ export function useFrameProcessor(frameProcessor: (frame: Frame) => void, depend
* ```
*/
export function useSkiaFrameProcessor(frameProcessor: (frame: DrawableFrame) => void, dependencies: DependencyList): FrameProcessor {
return useMemo(
() => ({
frameProcessor: (frame: DrawableFrame) => {
'worklet';
// Increment ref-count by one
(frame as FrameInternal).incrementRefCount();
try {
// Call sync frame processor
frameProcessor(frame);
} finally {
// Potentially delete Frame if we were the last ref (no runAsync)
(frame as FrameInternal).decrementRefCount();
}
},
type: 'skia-frame-processor',
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
dependencies,
);
// eslint-disable-next-line react-hooks/exhaustive-deps
return useMemo(() => createFrameProcessor(frameProcessor, 'skia-frame-processor'), dependencies);
}