feat: native Frame
type to provide Orientation (#186)
* Use Frame.h * Add orientation * Determine buffer orientation * Replace plugins * fix calls * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * format * Update CameraPage.tsx * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Add links to docs * Use `.` syntax * Make properties `readonly` * Fix `@synthesize` backing store
This commit is contained in:
parent
7025fc1cbe
commit
68a716b506
@ -25,7 +25,7 @@ Pod::Spec.new do |s|
|
|||||||
s.source_files = [
|
s.source_files = [
|
||||||
"ios/**/*.{m,mm,swift}",
|
"ios/**/*.{m,mm,swift}",
|
||||||
"ios/CameraBridge.h",
|
"ios/CameraBridge.h",
|
||||||
"ios/Frame Processor/CMSampleBufferRefHolder.h",
|
"ios/Frame Processor/Frame.h",
|
||||||
"ios/Frame Processor/FrameProcessorCallback.h",
|
"ios/Frame Processor/FrameProcessorCallback.h",
|
||||||
"ios/Frame Processor/FrameProcessorRuntimeManager.h",
|
"ios/Frame Processor/FrameProcessorRuntimeManager.h",
|
||||||
"ios/Frame Processor/FrameProcessorPluginRegistry.h",
|
"ios/Frame Processor/FrameProcessorPluginRegistry.h",
|
||||||
|
@ -32,23 +32,23 @@ To achieve **maximum performance**, the `scanQRCodes` function is written in a n
|
|||||||
|
|
||||||
The Frame Processor Plugin Registry API automatically manages type conversion from JS <-> native. They are converted into the most efficient data-structures, as seen here:
|
The Frame Processor Plugin Registry API automatically manages type conversion from JS <-> native. They are converted into the most efficient data-structures, as seen here:
|
||||||
|
|
||||||
| JS Type | Objective-C Type | Java Type |
|
| JS Type | Objective-C Type | Java Type |
|
||||||
|----------------------|---------------------------|----------------------------|
|
|----------------------|-------------------------------|----------------------------|
|
||||||
| `number` | `NSNumber*` (double) | `double` |
|
| `number` | `NSNumber*` (double) | `double` |
|
||||||
| `boolean` | `NSNumber*` (boolean) | `boolean` |
|
| `boolean` | `NSNumber*` (boolean) | `boolean` |
|
||||||
| `string` | `NSString*` | `String` |
|
| `string` | `NSString*` | `String` |
|
||||||
| `[]` | `NSArray*` | `Array<Object>` |
|
| `[]` | `NSArray*` | `Array<Object>` |
|
||||||
| `{}` | `NSDictionary*` | `HashMap<Object>` |
|
| `{}` | `NSDictionary*` | `HashMap<Object>` |
|
||||||
| `undefined` / `null` | `nil` | `null` |
|
| `undefined` / `null` | `nil` | `null` |
|
||||||
| `(any, any) => void` | `RCTResponseSenderBlock` | `(Object, Object) -> void` |
|
| `(any, any) => void` | [`RCTResponseSenderBlock`][4] | `(Object, Object) -> void` |
|
||||||
| `Frame` | `CMSampleBufferRefHolder` | `ImageProxy` |
|
| [`Frame`][1] | [`Frame*`][2] | [`ImageProxy`][3] |
|
||||||
|
|
||||||
### Return values
|
### Return values
|
||||||
|
|
||||||
Return values will automatically be converted to JS values, assuming they are representable in the ["Types" table](#types). So the following Objective-C frame processor:
|
Return values will automatically be converted to JS values, assuming they are representable in the ["Types" table](#types). So the following Objective-C frame processor:
|
||||||
|
|
||||||
```objc
|
```objc
|
||||||
static inline id detectObject(CMSampleBufferRef buffer, NSArray args) {
|
static inline id detectObject(Frame* frame, NSArray args) {
|
||||||
return @"cat";
|
return @"cat";
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -63,15 +63,17 @@ export function detectObject(frame: Frame): string {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also manipulate the buffer and return it (or a copy) by using the `CMSampleBufferRefHolder` class:
|
You can also manipulate the buffer and return it (or a copy) by using the `Frame` class:
|
||||||
|
|
||||||
```objc
|
```objc
|
||||||
static inline id resize(CMSampleBufferRef buffer, NSArray args) {
|
#import <VisionCamera/Frame.h>
|
||||||
|
|
||||||
|
static inline id resize(Frame* frame, NSArray args) {
|
||||||
NSNumber* width = [arguments objectAtIndex:0];
|
NSNumber* width = [arguments objectAtIndex:0];
|
||||||
NSNumber* height = [arguments objectAtIndex:1];
|
NSNumber* height = [arguments objectAtIndex:1];
|
||||||
|
|
||||||
CMSampleBufferRef resizedBuffer = CMSampleBufferCopyAndResize(buffer, width, height);
|
CMSampleBufferRef resizedBuffer = CMSampleBufferCopyAndResize(frame.buffer, width, height);
|
||||||
return [[CMSampleBufferRefHolder alloc] initWithBuffer:resizedBuffer];
|
return [[Frame alloc] initWithBuffer:resizedBuffer orientation:frame.orientation];
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -116,9 +118,9 @@ For example, a realtime video chat application might use WebRTC to send the fram
|
|||||||
```objc
|
```objc
|
||||||
static dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0ul);
|
static dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0ul);
|
||||||
|
|
||||||
static inline id sendFrameToWebRTC(CMSampleBufferRef buffer, NSArray args) {
|
static inline id sendFrameToWebRTC(Frame* frame, NSArray args) {
|
||||||
CMSampleBufferRef bufferCopy;
|
CMSampleBufferRef bufferCopy;
|
||||||
CMSampleBufferCreateCopy(kCFAllocatorDefault, buffer, &bufferCopy);
|
CMSampleBufferCreateCopy(kCFAllocatorDefault, frame.buffer, &bufferCopy);
|
||||||
|
|
||||||
dispatch_async(queue, ^{
|
dispatch_async(queue, ^{
|
||||||
NSString* serverURL = (NSString*)args[0];
|
NSString* serverURL = (NSString*)args[0];
|
||||||
@ -171,3 +173,8 @@ Your Frame Processor Plugins have to be fast. VisionCamera automatically detects
|
|||||||
<br />
|
<br />
|
||||||
|
|
||||||
#### 🚀 Create your first Frame Processor Plugin for [iOS](frame-processors-plugins-ios) or [Android](frame-processors-plugins-android)!
|
#### 🚀 Create your first Frame Processor Plugin for [iOS](frame-processors-plugins-ios) or [Android](frame-processors-plugins-android)!
|
||||||
|
|
||||||
|
[1]: https://github.com/cuvent/react-native-vision-camera/blob/main/src/Frame.ts
|
||||||
|
[2]: https://github.com/cuvent/react-native-vision-camera/blob/main/ios/Frame%20Processor/Frame.h
|
||||||
|
[3]: https://developer.android.com/reference/androidx/camera/core/ImageProxy
|
||||||
|
[4]: https://github.com/facebook/react-native/blob/9a43eac7a32a6ba3164a048960101022a92fcd5a/React/Base/RCTBridgeModule.h#L20-L24
|
||||||
|
@ -27,15 +27,18 @@ iOS Frame Processor Plugins can be written in either **Objective-C** or **Swift*
|
|||||||
2. Create an Objective-C source file, for the QR Code Plugin this will be called `QRCodeFrameProcessorPlugin.m`.
|
2. Create an Objective-C source file, for the QR Code Plugin this will be called `QRCodeFrameProcessorPlugin.m`.
|
||||||
3. Add the following code:
|
3. Add the following code:
|
||||||
|
|
||||||
```objc {9}
|
```objc {11}
|
||||||
#import <VisionCamera/FrameProcessorPlugin.h>
|
#import <VisionCamera/FrameProcessorPlugin.h>
|
||||||
|
#import <VisionCamera/Frame.h>
|
||||||
|
|
||||||
@interface QRCodeFrameProcessorPlugin : NSObject
|
@interface QRCodeFrameProcessorPlugin : NSObject
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation QRCodeFrameProcessorPlugin
|
@implementation QRCodeFrameProcessorPlugin
|
||||||
|
|
||||||
static inline id scanQRCodes(CMSampleBufferRef buffer, NSArray args) {
|
static inline id scanQRCodes(Frame* frame, NSArray args) {
|
||||||
|
CMSampleBufferRef buffer = frame.buffer;
|
||||||
|
UIImageOrientation orientation = frame.orientation;
|
||||||
// code goes here
|
// code goes here
|
||||||
return @[];
|
return @[];
|
||||||
}
|
}
|
||||||
@ -62,6 +65,7 @@ The JS function name will be equal to the Objective-C function name you choose (
|
|||||||
|
|
||||||
```objc
|
```objc
|
||||||
#import <VisionCamera/FrameProcessorPlugin.h>
|
#import <VisionCamera/FrameProcessorPlugin.h>
|
||||||
|
#import <VisionCamera/Frame.h>
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Create an Objective-C source file with the same name as the Swift file, for the QR Code Plugin this will be `QRCodeFrameProcessorPlugin.m`. Add the following code:
|
3. Create an Objective-C source file with the same name as the Swift file, for the QR Code Plugin this will be `QRCodeFrameProcessorPlugin.m`. Add the following code:
|
||||||
@ -79,12 +83,14 @@ The first parameter in the Macro specifies the JS function name. Make sure it is
|
|||||||
|
|
||||||
4. In the Swift file, add the following code:
|
4. In the Swift file, add the following code:
|
||||||
|
|
||||||
```swift {6}
|
```swift {8}
|
||||||
@objc(QRCodeFrameProcessorPlugin)
|
@objc(QRCodeFrameProcessorPlugin)
|
||||||
public class QRCodeFrameProcessorPlugin: NSObject, FrameProcessorPluginBase {
|
public class QRCodeFrameProcessorPlugin: NSObject, FrameProcessorPluginBase {
|
||||||
|
|
||||||
@objc
|
@objc
|
||||||
public static func callback(_: CMSampleBuffer!, withArgs _: [Any]!) -> Any! {
|
public static func callback(_ frame: Frame!, withArgs _: [Any]!) -> Any! {
|
||||||
|
let buffer = frame.buffer
|
||||||
|
let orientation = frame.orientation
|
||||||
// code goes here
|
// code goes here
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import <VisionCamera/FrameProcessorPlugin.h>
|
#import <VisionCamera/FrameProcessorPlugin.h>
|
||||||
#import <Vision/VNDetectBarcodesRequest.h>
|
#import <VisionCamera/Frame.h>
|
||||||
|
|
||||||
// Example for an Objective-C Frame Processor plugin
|
// Example for an Objective-C Frame Processor plugin
|
||||||
|
|
||||||
@ -16,8 +16,8 @@
|
|||||||
|
|
||||||
@implementation QRCodeFrameProcessorPluginObjC
|
@implementation QRCodeFrameProcessorPluginObjC
|
||||||
|
|
||||||
static inline id exampleObjC___scanQRCodes(CMSampleBufferRef buffer, NSArray* arguments) {
|
static inline id exampleObjC___scanQRCodes(Frame* frame, NSArray* arguments) {
|
||||||
// TODO: Use some AI to detect QR codes in the CMSampleBufferRef
|
// TODO: Use some AI to detect QR codes in the frame
|
||||||
return @[];
|
return @[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ import Vision
|
|||||||
@objc(QRCodeFrameProcessorPluginSwift)
|
@objc(QRCodeFrameProcessorPluginSwift)
|
||||||
public class QRCodeFrameProcessorPluginSwift: NSObject, FrameProcessorPluginBase {
|
public class QRCodeFrameProcessorPluginSwift: NSObject, FrameProcessorPluginBase {
|
||||||
@objc
|
@objc
|
||||||
public static func callback(_: CMSampleBuffer!, withArgs _: [Any]!) -> Any! {
|
public static func callback(_: Frame!, withArgs _: [Any]!) -> Any! {
|
||||||
// TODO: Use some AI to detect QR codes in the CMSampleBufferRef
|
// TODO: Use some AI to detect QR codes in the CMSampleBufferRef
|
||||||
[]
|
[]
|
||||||
}
|
}
|
||||||
|
@ -490,7 +490,7 @@ SPEC CHECKSUMS:
|
|||||||
RNReanimated: 9c13c86454bfd54dab7505c1a054470bfecd2563
|
RNReanimated: 9c13c86454bfd54dab7505c1a054470bfecd2563
|
||||||
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
|
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
|
||||||
RNVectorIcons: 31cebfcf94e8cf8686eb5303ae0357da64d7a5a4
|
RNVectorIcons: 31cebfcf94e8cf8686eb5303ae0357da64d7a5a4
|
||||||
VisionCamera: 9886518481961e1c5d94cedb9b7513c28b8368c1
|
VisionCamera: 60f51b9c8e5074fda9952a603311338039f7bf28
|
||||||
Yoga: 575c581c63e0d35c9a83f4b46d01d63abc1100ac
|
Yoga: 575c581c63e0d35c9a83f4b46d01d63abc1100ac
|
||||||
|
|
||||||
PODFILE CHECKSUM: 4b093c1d474775c2eac3268011e4b0b80929d3a2
|
PODFILE CHECKSUM: 4b093c1d474775c2eac3268011e4b0b80929d3a2
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
#import "FrameProcessorCallback.h"
|
#import "FrameProcessorCallback.h"
|
||||||
#import "FrameProcessorRuntimeManager.h"
|
#import "FrameProcessorRuntimeManager.h"
|
||||||
|
#import "Frame.h"
|
||||||
#import "RCTBridge+runOnJS.h"
|
#import "RCTBridge+runOnJS.h"
|
||||||
#import "JSConsoleHelper.h"
|
#import "JSConsoleHelper.h"
|
||||||
|
|
||||||
|
@ -169,10 +169,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||||
|
// Video Recording runs in the same queue
|
||||||
if isRecording {
|
if isRecording {
|
||||||
guard let recordingSession = recordingSession else {
|
guard let recordingSession = recordingSession else {
|
||||||
return invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
return invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
||||||
}
|
}
|
||||||
|
|
||||||
switch captureOutput {
|
switch captureOutput {
|
||||||
case is AVCaptureVideoDataOutput:
|
case is AVCaptureVideoDataOutput:
|
||||||
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
||||||
@ -191,8 +193,10 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
let diff = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
let diff = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
||||||
let secondsPerFrame = 1.0 / frameProcessorFps.doubleValue
|
let secondsPerFrame = 1.0 / frameProcessorFps.doubleValue
|
||||||
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
||||||
|
|
||||||
if diff > UInt64(nanosecondsPerFrame) {
|
if diff > UInt64(nanosecondsPerFrame) {
|
||||||
frameProcessor(sampleBuffer)
|
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
|
||||||
|
frameProcessor(frame)
|
||||||
lastFrameProcessorCall = DispatchTime.now()
|
lastFrameProcessorCall = DispatchTime.now()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -221,4 +225,32 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
return String(describing: reason)
|
return String(describing: reason)
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/**
|
||||||
|
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
||||||
|
*/
|
||||||
|
var bufferOrientation: UIImage.Orientation {
|
||||||
|
guard let cameraPosition = videoDeviceInput?.device.position else {
|
||||||
|
return .up
|
||||||
|
}
|
||||||
|
|
||||||
|
switch UIDevice.current.orientation {
|
||||||
|
case .portrait:
|
||||||
|
return cameraPosition == .front ? .leftMirrored : .right
|
||||||
|
|
||||||
|
case .landscapeLeft:
|
||||||
|
return cameraPosition == .front ? .downMirrored : .up
|
||||||
|
|
||||||
|
case .portraitUpsideDown:
|
||||||
|
return cameraPosition == .front ? .rightMirrored : .left
|
||||||
|
|
||||||
|
case .landscapeRight:
|
||||||
|
return cameraPosition == .front ? .upMirrored : .down
|
||||||
|
|
||||||
|
case .unknown, .faceUp, .faceDown:
|
||||||
|
fallthrough
|
||||||
|
@unknown default:
|
||||||
|
return .up
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,22 +0,0 @@
|
|||||||
//
|
|
||||||
// CMSampleBufferRefHolder.h
|
|
||||||
// VisionCamera
|
|
||||||
//
|
|
||||||
// Created by Marc Rousavy on 15.03.21.
|
|
||||||
// Copyright © 2021 mrousavy. All rights reserved.
|
|
||||||
//
|
|
||||||
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
|
||||||
|
|
||||||
@interface CMSampleBufferRefHolder : NSObject {
|
|
||||||
CMSampleBufferRef buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer;
|
|
||||||
|
|
||||||
@property (nonatomic) CMSampleBufferRef buffer;
|
|
||||||
|
|
||||||
@end
|
|
@ -1,25 +0,0 @@
|
|||||||
//
|
|
||||||
// CMSampleBufferRefHolder.m
|
|
||||||
// VisionCamera
|
|
||||||
//
|
|
||||||
// Created by Marc Rousavy on 08.06.21.
|
|
||||||
// Copyright © 2021 mrousavy. All rights reserved.
|
|
||||||
//
|
|
||||||
|
|
||||||
#import "CMSampleBufferRefHolder.h"
|
|
||||||
#import <Foundation/Foundation.h>
|
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
|
||||||
|
|
||||||
@implementation CMSampleBufferRefHolder
|
|
||||||
|
|
||||||
- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer {
|
|
||||||
self = [super init];
|
|
||||||
if (self) {
|
|
||||||
self.buffer = buffer;
|
|
||||||
}
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
@synthesize buffer;
|
|
||||||
|
|
||||||
@end
|
|
22
ios/Frame Processor/Frame.h
Normal file
22
ios/Frame Processor/Frame.h
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
//
|
||||||
|
// Frame.h
|
||||||
|
// VisionCamera
|
||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 15.03.21.
|
||||||
|
// Copyright © 2021 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <CoreMedia/CMSampleBuffer.h>
|
||||||
|
#import <UIKit/UIImage.h>
|
||||||
|
|
||||||
|
@interface Frame : NSObject
|
||||||
|
|
||||||
|
- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation;
|
||||||
|
|
||||||
|
@property (nonatomic, readonly) CMSampleBufferRef buffer;
|
||||||
|
@property (nonatomic, readonly) UIImageOrientation orientation;
|
||||||
|
|
||||||
|
@end
|
30
ios/Frame Processor/Frame.m
Normal file
30
ios/Frame Processor/Frame.m
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
//
|
||||||
|
// Frame.m
|
||||||
|
// VisionCamera
|
||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 08.06.21.
|
||||||
|
// Copyright © 2021 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#import "Frame.h"
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <CoreMedia/CMSampleBuffer.h>
|
||||||
|
|
||||||
|
@implementation Frame {
|
||||||
|
CMSampleBufferRef buffer;
|
||||||
|
UIImageOrientation orientation;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation {
|
||||||
|
self = [super init];
|
||||||
|
if (self) {
|
||||||
|
_buffer = buffer;
|
||||||
|
_orientation = orientation;
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
@synthesize buffer = _buffer;
|
||||||
|
@synthesize orientation = _orientation;
|
||||||
|
|
||||||
|
@end
|
@ -10,12 +10,13 @@
|
|||||||
|
|
||||||
#import <jsi/jsi.h>
|
#import <jsi/jsi.h>
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
#import <CoreMedia/CMSampleBuffer.h>
|
||||||
|
#import "Frame.h"
|
||||||
|
|
||||||
using namespace facebook;
|
using namespace facebook;
|
||||||
|
|
||||||
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
|
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
|
||||||
public:
|
public:
|
||||||
explicit FrameHostObject(CMSampleBufferRef buffer): buffer(buffer) {}
|
explicit FrameHostObject(Frame* frame): frame(frame) {}
|
||||||
~FrameHostObject();
|
~FrameHostObject();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@ -24,5 +25,5 @@ public:
|
|||||||
void destroyBuffer();
|
void destroyBuffer();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
CMSampleBufferRef buffer;
|
Frame* frame;
|
||||||
};
|
};
|
||||||
|
@ -37,7 +37,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
|||||||
}
|
}
|
||||||
if (name == "toString") {
|
if (name == "toString") {
|
||||||
auto toString = [this] (jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value {
|
auto toString = [this] (jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value {
|
||||||
auto imageBuffer = CMSampleBufferGetImageBuffer(buffer);
|
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||||
auto width = CVPixelBufferGetWidth(imageBuffer);
|
auto width = CVPixelBufferGetWidth(imageBuffer);
|
||||||
auto height = CVPixelBufferGetHeight(imageBuffer);
|
auto height = CVPixelBufferGetHeight(imageBuffer);
|
||||||
|
|
||||||
@ -48,30 +48,30 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (name == "isValid") {
|
if (name == "isValid") {
|
||||||
auto isValid = buffer != nil && CMSampleBufferIsValid(buffer);
|
auto isValid = frame != nil && CMSampleBufferIsValid(frame.buffer);
|
||||||
return jsi::Value(isValid);
|
return jsi::Value(isValid);
|
||||||
}
|
}
|
||||||
if (name == "isReady") {
|
if (name == "isReady") {
|
||||||
auto isReady = buffer != nil && CMSampleBufferDataIsReady(buffer);
|
auto isReady = frame != nil && CMSampleBufferDataIsReady(frame.buffer);
|
||||||
return jsi::Value(isReady);
|
return jsi::Value(isReady);
|
||||||
}
|
}
|
||||||
if (name == "width") {
|
if (name == "width") {
|
||||||
auto imageBuffer = CMSampleBufferGetImageBuffer(buffer);
|
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||||
auto width = CVPixelBufferGetWidth(imageBuffer);
|
auto width = CVPixelBufferGetWidth(imageBuffer);
|
||||||
return jsi::Value((double) width);
|
return jsi::Value((double) width);
|
||||||
}
|
}
|
||||||
if (name == "height") {
|
if (name == "height") {
|
||||||
auto imageBuffer = CMSampleBufferGetImageBuffer(buffer);
|
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||||
auto height = CVPixelBufferGetHeight(imageBuffer);
|
auto height = CVPixelBufferGetHeight(imageBuffer);
|
||||||
return jsi::Value((double) height);
|
return jsi::Value((double) height);
|
||||||
}
|
}
|
||||||
if (name == "bytesPerRow") {
|
if (name == "bytesPerRow") {
|
||||||
auto imageBuffer = CMSampleBufferGetImageBuffer(buffer);
|
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||||
auto bytesPerRow = CVPixelBufferGetPlaneCount(imageBuffer);
|
auto bytesPerRow = CVPixelBufferGetPlaneCount(imageBuffer);
|
||||||
return jsi::Value((double) bytesPerRow);
|
return jsi::Value((double) bytesPerRow);
|
||||||
}
|
}
|
||||||
if (name == "planesCount") {
|
if (name == "planesCount") {
|
||||||
auto imageBuffer = CMSampleBufferGetImageBuffer(buffer);
|
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||||
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
|
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
|
||||||
return jsi::Value((double) planesCount);
|
return jsi::Value((double) planesCount);
|
||||||
}
|
}
|
||||||
@ -85,5 +85,5 @@ FrameHostObject::~FrameHostObject() {
|
|||||||
|
|
||||||
void FrameHostObject::destroyBuffer() {
|
void FrameHostObject::destroyBuffer() {
|
||||||
// ARC will hopefully delete it lol
|
// ARC will hopefully delete it lol
|
||||||
this->buffer = nil;
|
this->frame = nil;
|
||||||
}
|
}
|
||||||
|
@ -9,6 +9,6 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
#import "Frame.h"
|
||||||
|
|
||||||
typedef void (^FrameProcessorCallback) (CMSampleBufferRef buffer);
|
typedef void (^FrameProcessorCallback) (Frame* frame);
|
||||||
|
@ -11,10 +11,10 @@
|
|||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import "FrameProcessorPluginRegistry.h"
|
#import "FrameProcessorPluginRegistry.h"
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
#import "Frame.h"
|
||||||
|
|
||||||
@protocol FrameProcessorPluginBase
|
@protocol FrameProcessorPluginBase
|
||||||
+ (id) callback:(CMSampleBufferRef)buffer withArgs:(NSArray<id>*)args;
|
+ (id) callback:(Frame*)frame withArgs:(NSArray<id>*)args;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|
||||||
@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Use this Macro to register the given function as a Frame Processor.
|
* Use this Macro to register the given function as a Frame Processor.
|
||||||
* * Make sure the given function is a C-style function with the following signature: static inline id callback(CMSampleBufferRef buffer)
|
* * Make sure the given function is a C-style function with the following signature: static inline id callback(Frame* frame, NSArray* args)
|
||||||
* * Make sure the given function's name is unique across other frame processor plugins
|
* * Make sure the given function's name is unique across other frame processor plugins
|
||||||
* * Make sure your frame processor returns a Value that can be converted to JS
|
* * Make sure your frame processor returns a Value that can be converted to JS
|
||||||
* * Make sure to use this Macro in an @implementation, not @interface
|
* * Make sure to use this Macro in an @implementation, not @interface
|
||||||
@ -35,8 +35,8 @@
|
|||||||
\
|
\
|
||||||
+(void)load \
|
+(void)load \
|
||||||
{ \
|
{ \
|
||||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"__" @ #frame_processor callback:^id(CMSampleBufferRef buffer, NSArray<id>* args) { \
|
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"__" @ #frame_processor callback:^id(Frame* frame, NSArray<id>* args) { \
|
||||||
return frame_processor(buffer, args); \
|
return frame_processor(frame, args); \
|
||||||
}]; \
|
}]; \
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,8 +55,8 @@ objc_name : NSObject<FrameProcessorPluginBase>
|
|||||||
\
|
\
|
||||||
__attribute__((constructor)) static void VISION_CONCAT(initialize_, objc_name)() \
|
__attribute__((constructor)) static void VISION_CONCAT(initialize_, objc_name)() \
|
||||||
{ \
|
{ \
|
||||||
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"__" @ #name callback:^id(CMSampleBufferRef buffer, NSArray<id>* args) { \
|
[FrameProcessorPluginRegistry addFrameProcessorPlugin:@"__" @ #name callback:^id(Frame* frame, NSArray<id>* args) { \
|
||||||
return [objc_name callback:buffer withArgs:args]; \
|
return [objc_name callback:frame withArgs:args]; \
|
||||||
}]; \
|
}]; \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,9 +9,9 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
#import "Frame.h"
|
||||||
|
|
||||||
typedef id (^FrameProcessorPlugin) (CMSampleBufferRef buffer, NSArray<id>* arguments);
|
typedef id (^FrameProcessorPlugin) (Frame* frame, NSArray<id>* arguments);
|
||||||
|
|
||||||
@interface FrameProcessorPluginRegistry : NSObject
|
@interface FrameProcessorPluginRegistry : NSObject
|
||||||
|
|
||||||
|
@ -85,15 +85,22 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
|||||||
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugin \"%s\"...", pluginName);
|
NSLog(@"FrameProcessorBindings: Installing Frame Processor plugin \"%s\"...", pluginName);
|
||||||
FrameProcessorPlugin callback = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey];
|
FrameProcessorPlugin callback = [[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:pluginKey];
|
||||||
|
|
||||||
auto function = [callback, callInvoker](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value {
|
auto function = [callback, callInvoker](jsi::Runtime& runtime,
|
||||||
|
const jsi::Value& thisValue,
|
||||||
|
const jsi::Value* arguments,
|
||||||
|
size_t count) -> jsi::Value {
|
||||||
|
|
||||||
auto frameHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
|
auto frameHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
|
||||||
auto frame = static_cast<FrameHostObject*>(frameHostObject.get());
|
auto frame = static_cast<FrameHostObject*>(frameHostObject.get());
|
||||||
|
|
||||||
auto args = convertJSICStyleArrayToNSArray(runtime,
|
auto args = convertJSICStyleArrayToNSArray(runtime,
|
||||||
arguments + 1, // start at index 1 since first arg = Frame
|
arguments + 1, // start at index 1 since first arg = Frame
|
||||||
count - 1, // use smaller count
|
count - 1, // use smaller count
|
||||||
callInvoker);
|
callInvoker);
|
||||||
id result = callback(frame->buffer, args);
|
id result = callback(frame->frame, args);
|
||||||
|
|
||||||
return convertObjCObjectToJSIValue(runtime, result);
|
return convertObjCObjectToJSIValue(runtime, result);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
visionGlobal.setProperty(visionRuntime, pluginName, jsi::Function::createFromHostFunction(visionRuntime,
|
visionGlobal.setProperty(visionRuntime, pluginName, jsi::Function::createFromHostFunction(visionRuntime,
|
||||||
@ -129,7 +136,10 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
|||||||
NSLog(@"FrameProcessorBindings: Installing global functions...");
|
NSLog(@"FrameProcessorBindings: Installing global functions...");
|
||||||
|
|
||||||
// setFrameProcessor(viewTag: number, frameProcessor: (frame: Frame) => void)
|
// setFrameProcessor(viewTag: number, frameProcessor: (frame: Frame) => void)
|
||||||
auto setFrameProcessor = [self](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value {
|
auto setFrameProcessor = [self](jsi::Runtime& runtime,
|
||||||
|
const jsi::Value& thisValue,
|
||||||
|
const jsi::Value* arguments,
|
||||||
|
size_t count) -> jsi::Value {
|
||||||
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
|
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
|
||||||
if (!arguments[0].isNumber()) throw jsi::JSError(runtime, "Camera::setFrameProcessor: First argument ('viewTag') must be a number!");
|
if (!arguments[0].isNumber()) throw jsi::JSError(runtime, "Camera::setFrameProcessor: First argument ('viewTag') must be a number!");
|
||||||
if (!arguments[1].isObject()) throw jsi::JSError(runtime, "Camera::setFrameProcessor: Second argument ('frameProcessor') must be a function!");
|
if (!arguments[1].isObject()) throw jsi::JSError(runtime, "Camera::setFrameProcessor: Second argument ('frameProcessor') must be a function!");
|
||||||
@ -163,7 +173,10 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
|||||||
setFrameProcessor));
|
setFrameProcessor));
|
||||||
|
|
||||||
// unsetFrameProcessor(viewTag: number)
|
// unsetFrameProcessor(viewTag: number)
|
||||||
auto unsetFrameProcessor = [](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value {
|
auto unsetFrameProcessor = [](jsi::Runtime& runtime,
|
||||||
|
const jsi::Value& thisValue,
|
||||||
|
const jsi::Value* arguments,
|
||||||
|
size_t count) -> jsi::Value {
|
||||||
NSLog(@"FrameProcessorBindings: Removing frame processor...");
|
NSLog(@"FrameProcessorBindings: Removing frame processor...");
|
||||||
if (!arguments[0].isNumber()) throw jsi::JSError(runtime, "Camera::unsetFrameProcessor: First argument ('viewTag') must be a number!");
|
if (!arguments[0].isNumber()) throw jsi::JSError(runtime, "Camera::unsetFrameProcessor: First argument ('viewTag') must be a number!");
|
||||||
auto viewTag = arguments[0].asNumber();
|
auto viewTag = arguments[0].asNumber();
|
||||||
|
@ -7,22 +7,22 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
#import "FrameProcessorUtils.h"
|
#import "FrameProcessorUtils.h"
|
||||||
#import <CoreMedia/CMSampleBuffer.h>
|
|
||||||
#import <chrono>
|
#import <chrono>
|
||||||
#import <memory>
|
#import <memory>
|
||||||
#import "FrameHostObject.h"
|
#import "FrameHostObject.h"
|
||||||
|
#import "Frame.h"
|
||||||
|
|
||||||
FrameProcessorCallback convertJSIFunctionToFrameProcessorCallback(jsi::Runtime &runtime, const jsi::Function &value) {
|
FrameProcessorCallback convertJSIFunctionToFrameProcessorCallback(jsi::Runtime &runtime, const jsi::Function &value) {
|
||||||
__block auto cb = value.getFunction(runtime);
|
__block auto cb = value.getFunction(runtime);
|
||||||
|
|
||||||
return ^(CMSampleBufferRef buffer) {
|
return ^(Frame* frame) {
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
std::chrono::steady_clock::time_point begin = std::chrono::steady_clock::now();
|
std::chrono::steady_clock::time_point begin = std::chrono::steady_clock::now();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
auto frame = std::make_shared<FrameHostObject>(buffer);
|
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
|
||||||
try {
|
try {
|
||||||
cb.call(runtime, jsi::Object::createFromHostObject(runtime, frame));
|
cb.call(runtime, jsi::Object::createFromHostObject(runtime, frameHostObject));
|
||||||
} catch (jsi::JSError& jsError) {
|
} catch (jsi::JSError& jsError) {
|
||||||
NSLog(@"Frame Processor threw an error: %s", jsError.getMessage().c_str());
|
NSLog(@"Frame Processor threw an error: %s", jsError.getMessage().c_str());
|
||||||
}
|
}
|
||||||
@ -39,6 +39,6 @@ FrameProcessorCallback convertJSIFunctionToFrameProcessorCallback(jsi::Runtime &
|
|||||||
// 1. we are sure we don't need it anymore, the frame processor worklet has finished executing.
|
// 1. we are sure we don't need it anymore, the frame processor worklet has finished executing.
|
||||||
// 2. we don't know when the JS runtime garbage collects this object, it might be holding it for a few more frames
|
// 2. we don't know when the JS runtime garbage collects this object, it might be holding it for a few more frames
|
||||||
// which then blocks the camera queue from pushing new frames (memory limit)
|
// which then blocks the camera queue from pushing new frames (memory limit)
|
||||||
frame->destroyBuffer();
|
frameHostObject->destroyBuffer();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
#import <ReactCommon/CallInvoker.h>
|
#import <ReactCommon/CallInvoker.h>
|
||||||
#import <React/RCTBridge.h>
|
#import <React/RCTBridge.h>
|
||||||
#import <ReactCommon/TurboModuleUtils.h>
|
#import <ReactCommon/TurboModuleUtils.h>
|
||||||
#import "../Frame Processor/CMSampleBufferRefHolder.h"
|
#import "../Frame Processor/Frame.h"
|
||||||
#import "../Frame Processor/FrameHostObject.h"
|
#import "../Frame Processor/FrameHostObject.h"
|
||||||
|
|
||||||
using namespace facebook;
|
using namespace facebook;
|
||||||
@ -68,11 +68,9 @@ jsi::Value convertObjCObjectToJSIValue(jsi::Runtime &runtime, id value)
|
|||||||
return convertNSArrayToJSIArray(runtime, (NSArray *)value);
|
return convertNSArrayToJSIArray(runtime, (NSArray *)value);
|
||||||
} else if (value == (id)kCFNull) {
|
} else if (value == (id)kCFNull) {
|
||||||
return jsi::Value::null();
|
return jsi::Value::null();
|
||||||
} else if ([value isKindOfClass:[CMSampleBufferRefHolder class]]) {
|
} else if ([value isKindOfClass:[Frame class]]) {
|
||||||
// it's boxed in a CMSampleBufferRefHolder because CMSampleBufferRef is not an NSObject
|
auto frameHostObject = std::make_shared<FrameHostObject>((Frame*)value);
|
||||||
CMSampleBufferRef buffer = [(CMSampleBufferRefHolder*)value buffer];
|
return jsi::Object::createFromHostObject(runtime, frameHostObject);
|
||||||
auto frame = std::make_shared<FrameHostObject>(buffer);
|
|
||||||
return jsi::Object::createFromHostObject(runtime, frame);
|
|
||||||
}
|
}
|
||||||
return jsi::Value::undefined();
|
return jsi::Value::undefined();
|
||||||
}
|
}
|
||||||
@ -155,7 +153,7 @@ id convertJSIValueToObjCObject(jsi::Runtime &runtime, const jsi::Value &value, s
|
|||||||
auto hostObject = o.asHostObject(runtime);
|
auto hostObject = o.asHostObject(runtime);
|
||||||
auto frame = dynamic_cast<FrameHostObject*>(hostObject.get());
|
auto frame = dynamic_cast<FrameHostObject*>(hostObject.get());
|
||||||
if (frame != nullptr) {
|
if (frame != nullptr) {
|
||||||
return [[CMSampleBufferRefHolder alloc] initWithBuffer:frame->buffer];
|
return frame->frame;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return convertJSIObjectToNSDictionary(runtime, o, jsInvoker);
|
return convertJSIObjectToNSDictionary(runtime, o, jsInvoker);
|
||||||
|
@ -81,7 +81,7 @@
|
|||||||
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
|
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
|
||||||
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorUtils.mm; sourceTree = "<group>"; };
|
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorUtils.mm; sourceTree = "<group>"; };
|
||||||
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorUtils.h; sourceTree = "<group>"; };
|
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorUtils.h; sourceTree = "<group>"; };
|
||||||
B8103E5725FF56F0007A1684 /* CMSampleBufferRefHolder.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CMSampleBufferRefHolder.h; sourceTree = "<group>"; };
|
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
|
||||||
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
|
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
|
||||||
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = "<group>"; };
|
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = "<group>"; };
|
||||||
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = "<group>"; };
|
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = "<group>"; };
|
||||||
@ -140,7 +140,7 @@
|
|||||||
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
|
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
|
||||||
B8DCF09125EA7BEE00EA5C72 /* SpeedChecker.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SpeedChecker.h; sourceTree = "<group>"; };
|
B8DCF09125EA7BEE00EA5C72 /* SpeedChecker.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SpeedChecker.h; sourceTree = "<group>"; };
|
||||||
B8DCF14425EA817D00EA5C72 /* MakeJSIRuntime.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MakeJSIRuntime.h; sourceTree = "<group>"; };
|
B8DCF14425EA817D00EA5C72 /* MakeJSIRuntime.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MakeJSIRuntime.h; sourceTree = "<group>"; };
|
||||||
B8F7DDD1266F715D00120533 /* CMSampleBufferRefHolder.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CMSampleBufferRefHolder.m; sourceTree = "<group>"; };
|
B8F7DDD1266F715D00120533 /* Frame.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Frame.m; sourceTree = "<group>"; };
|
||||||
/* End PBXFileReference section */
|
/* End PBXFileReference section */
|
||||||
|
|
||||||
/* Begin PBXFrameworksBuildPhase section */
|
/* Begin PBXFrameworksBuildPhase section */
|
||||||
@ -263,8 +263,8 @@
|
|||||||
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */,
|
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */,
|
||||||
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */,
|
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */,
|
||||||
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */,
|
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */,
|
||||||
B8103E5725FF56F0007A1684 /* CMSampleBufferRefHolder.h */,
|
B8103E5725FF56F0007A1684 /* Frame.h */,
|
||||||
B8F7DDD1266F715D00120533 /* CMSampleBufferRefHolder.m */,
|
B8F7DDD1266F715D00120533 /* Frame.m */,
|
||||||
B84760A22608EE38004C3180 /* FrameHostObject.h */,
|
B84760A22608EE38004C3180 /* FrameHostObject.h */,
|
||||||
B84760A52608EE7C004C3180 /* FrameHostObject.mm */,
|
B84760A52608EE7C004C3180 /* FrameHostObject.mm */,
|
||||||
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */,
|
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */,
|
||||||
|
Loading…
Reference in New Issue
Block a user