* Make Frame Processors an extra subspec * Update VisionCamera.podspec * Make optional * Make VisionCamera compile without Skia * Fix * Add skia again * Update VisionCamera.podspec * Make VisionCamera build without Frame Processors * Rename error to `system/frame-processors-unavailable` * Fix Frame Processor returning early * Remove `preset`, FP partial rewrite * Only warn on frame drop * Fix wrong queue * fix: Run on CameraQueue again * Update CameraView.swift * fix: Activate audio session asynchronously on audio queue * Update CameraView+RecordVideo.swift * Update PreviewView.h * Cleanups * Cleanup * fix cast * feat: Add LiDAR Depth Camera support * Upgrade Ruby * Add vector icons type * Update Gemfile.lock * fix: Stop queues on deinit * Also load `builtInTrueDepthCamera` * Update CameraViewManager.swift * Update SkImageHelpers.mm * Extract FrameProcessorCallback to FrameProcessor Holds more context now :) * Rename to .m * fix: Add `RCTLog` import * Create SkiaFrameProcessor * Update CameraBridge.h * Call Frame Processor * Fix defines * fix: Allow deleting callback funcs * fix Skia build * batch * Just call `setSkiaFrameProcessor` * Rewrite in Swift * Pass `SkiaRenderer` * Fix Import * Move `PreviewView` to Swift * Fix Layer * Set Skia Canvas to Frame Host Object * Make `DrawableFrameHostObject` subclass * Fix TS types * Use same MTLDevice and apply scale * Make getter * Extract `setTorch` and `Preview` * fix: Fix nil metal device * Don't wait for session stop in deinit * Use main pixel ratio * Use unique_ptr for Render Contexts * fix: Fix SkiaPreviewDisplayLink broken after deinit * inline `getTextureCache` * Update CameraPage.tsx * chore: Format iOS * perf: Allow MTLLayer to be optimized for only frame buffers * Add RN Video types * fix: Fix Frame Processors if guard * Find nodeModules recursively * Create `Frame.isDrawable` * Add `cocoapods-check` dependency
		
			
				
	
	
		
			50 lines
		
	
	
		
			1.2 KiB
		
	
	
	
		
			Objective-C
		
	
	
	
	
	
			
		
		
	
	
			50 lines
		
	
	
		
			1.2 KiB
		
	
	
	
		
			Objective-C
		
	
	
	
	
	
//
 | 
						|
//  ExampleFrameProcessorPlugin.m
 | 
						|
//  VisionCameraExample
 | 
						|
//
 | 
						|
//  Created by Marc Rousavy on 01.05.21.
 | 
						|
//
 | 
						|
 | 
						|
#if __has_include(<VisionCamera/FrameProcessorPlugin.h>)
 | 
						|
#import <Foundation/Foundation.h>
 | 
						|
#import <VisionCamera/FrameProcessorPlugin.h>
 | 
						|
#import <VisionCamera/Frame.h>
 | 
						|
 | 
						|
// Example for an Objective-C Frame Processor plugin
 | 
						|
 | 
						|
@interface ExampleFrameProcessorPlugin : FrameProcessorPlugin
 | 
						|
@end
 | 
						|
 | 
						|
@implementation ExampleFrameProcessorPlugin
 | 
						|
 | 
						|
- (NSString *)name {
 | 
						|
  return @"example_plugin";
 | 
						|
}
 | 
						|
 | 
						|
- (id)callback:(Frame *)frame withArguments:(NSArray<id> *)arguments {
 | 
						|
  CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
 | 
						|
  NSLog(@"ExamplePlugin: %zu x %zu Image. Logging %lu parameters:", CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer), (unsigned long)arguments.count);
 | 
						|
  
 | 
						|
  for (id param in arguments) {
 | 
						|
    NSLog(@"ExamplePlugin:   -> %@ (%@)", param == nil ? @"(nil)" : [param description], NSStringFromClass([param classForCoder]));
 | 
						|
  }
 | 
						|
  
 | 
						|
  return @{
 | 
						|
    @"example_str": @"Test",
 | 
						|
    @"example_bool": @true,
 | 
						|
    @"example_double": @5.3,
 | 
						|
    @"example_array": @[
 | 
						|
      @"Hello",
 | 
						|
      @true,
 | 
						|
      @17.38
 | 
						|
    ]
 | 
						|
  };
 | 
						|
}
 | 
						|
 | 
						|
+ (void) load {
 | 
						|
  [self registerPlugin:[[ExampleFrameProcessorPlugin alloc] init]];
 | 
						|
}
 | 
						|
 | 
						|
@end
 | 
						|
#endif
 |