react-native-vision-camera/package/example/ios/VisionCameraExample.xcodeproj/project.pbxproj

629 lines
28 KiB
Plaintext
Raw Permalink Normal View History

2021-02-19 08:07:53 -07:00
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB01A68108700A75B9A /* AppDelegate.mm */; };
2021-02-19 08:07:53 -07:00
13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; };
13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; };
839E2C632ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m in Sources */ = {isa = PBXBuildFile; fileRef = 839E2C622ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m */; };
839E2C652ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 839E2C642ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift */; };
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */; };
2021-02-19 09:10:46 -07:00
B8F0E10825E0199F00586F16 /* File.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F0E10725E0199F00586F16 /* File.swift */; };
C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */; };
2021-02-19 08:07:53 -07:00
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
008F07F21AC5B25A0029DE68 /* main.jsbundle */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = main.jsbundle; sourceTree = "<group>"; };
13B07F961A680F5B00A75B9A /* VisionCameraExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = VisionCameraExample.app; sourceTree = BUILT_PRODUCTS_DIR; };
13B07FAF1A68108700A75B9A /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = AppDelegate.h; path = VisionCameraExample/AppDelegate.h; sourceTree = "<group>"; };
13B07FB01A68108700A75B9A /* AppDelegate.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = AppDelegate.mm; path = VisionCameraExample/AppDelegate.mm; sourceTree = "<group>"; };
2021-02-19 08:07:53 -07:00
13B07FB51A68108700A75B9A /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Images.xcassets; path = VisionCameraExample/Images.xcassets; sourceTree = "<group>"; };
13B07FB61A68108700A75B9A /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = VisionCameraExample/Info.plist; sourceTree = "<group>"; };
13B07FB71A68108700A75B9A /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = VisionCameraExample/main.m; sourceTree = "<group>"; };
3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-VisionCameraExample.a"; sourceTree = BUILT_PRODUCTS_DIR; };
2021-02-19 08:07:53 -07:00
81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = VisionCameraExample/LaunchScreen.storyboard; sourceTree = "<group>"; };
839E2C622ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ExampleSwiftFrameProcessor.m; sourceTree = "<group>"; };
839E2C642ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExampleSwiftFrameProcessor.swift; sourceTree = "<group>"; };
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ExampleFrameProcessorPlugin.m; sourceTree = "<group>"; };
2021-02-19 09:10:46 -07:00
B8F0E10625E0199F00586F16 /* VisionCameraExample-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "VisionCameraExample-Bridging-Header.h"; sourceTree = "<group>"; };
B8F0E10725E0199F00586F16 /* File.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = File.swift; sourceTree = "<group>"; };
C1D342AD8210E7627A632602 /* Pods-VisionCameraExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-VisionCameraExample.debug.xcconfig"; path = "Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample.debug.xcconfig"; sourceTree = "<group>"; };
2021-02-19 08:07:53 -07:00
ED297162215061F000B7C4FE /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = System/Library/Frameworks/JavaScriptCore.framework; sourceTree = SDKROOT; };
ED2971642150620600B7C4FE /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS12.0.sdk/System/Library/Frameworks/JavaScriptCore.framework; sourceTree = DEVELOPER_DIR; };
F64CB73AE44CAC94E069BF68 /* Pods-VisionCameraExample.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-VisionCameraExample.release.xcconfig"; path = "Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample.release.xcconfig"; sourceTree = "<group>"; };
2021-02-19 08:07:53 -07:00
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
13B07F8C1A680F5B00A75B9A /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */,
2021-02-19 08:07:53 -07:00
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
13B07FAE1A68108700A75B9A /* VisionCameraExample */ = {
isa = PBXGroup;
children = (
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
B8DB3BD6263DEA31004C18D7 /* Frame Processor Plugins */,
2021-02-19 08:07:53 -07:00
008F07F21AC5B25A0029DE68 /* main.jsbundle */,
13B07FAF1A68108700A75B9A /* AppDelegate.h */,
13B07FB01A68108700A75B9A /* AppDelegate.mm */,
2021-02-19 08:07:53 -07:00
13B07FB51A68108700A75B9A /* Images.xcassets */,
13B07FB61A68108700A75B9A /* Info.plist */,
81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */,
13B07FB71A68108700A75B9A /* main.m */,
2021-02-19 09:10:46 -07:00
B8F0E10725E0199F00586F16 /* File.swift */,
B8F0E10625E0199F00586F16 /* VisionCameraExample-Bridging-Header.h */,
2021-02-19 08:07:53 -07:00
);
name = VisionCameraExample;
sourceTree = "<group>";
};
2D16E6871FA4F8E400B85C8A /* Frameworks */ = {
isa = PBXGroup;
children = (
ED297162215061F000B7C4FE /* JavaScriptCore.framework */,
ED2971642150620600B7C4FE /* JavaScriptCore.framework */,
3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */,
2021-02-19 08:07:53 -07:00
);
name = Frameworks;
sourceTree = "<group>";
};
6B9684456A2045ADE5A6E47E /* Pods */ = {
isa = PBXGroup;
children = (
C1D342AD8210E7627A632602 /* Pods-VisionCameraExample.debug.xcconfig */,
F64CB73AE44CAC94E069BF68 /* Pods-VisionCameraExample.release.xcconfig */,
2021-02-19 08:07:53 -07:00
);
path = Pods;
sourceTree = "<group>";
};
832341AE1AAA6A7D00B99B32 /* Libraries */ = {
isa = PBXGroup;
children = (
);
name = Libraries;
sourceTree = "<group>";
};
839E2C612ACB2E140037BC2B /* Example Swift Plugin */ = {
isa = PBXGroup;
children = (
839E2C622ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m */,
839E2C642ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift */,
);
path = "Example Swift Plugin";
sourceTree = "<group>";
};
2021-02-19 08:07:53 -07:00
83CBB9F61A601CBA00E9B192 = {
isa = PBXGroup;
children = (
13B07FAE1A68108700A75B9A /* VisionCameraExample */,
832341AE1AAA6A7D00B99B32 /* Libraries */,
83CBBA001A601CBA00E9B192 /* Products */,
2D16E6871FA4F8E400B85C8A /* Frameworks */,
6B9684456A2045ADE5A6E47E /* Pods */,
);
indentWidth = 2;
sourceTree = "<group>";
tabWidth = 2;
usesTabs = 0;
};
83CBBA001A601CBA00E9B192 /* Products */ = {
isa = PBXGroup;
children = (
13B07F961A680F5B00A75B9A /* VisionCameraExample.app */,
);
name = Products;
sourceTree = "<group>";
};
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
B8DB3BD6263DEA31004C18D7 /* Frame Processor Plugins */ = {
isa = PBXGroup;
children = (
B8DB3BD7263DEA31004C18D7 /* Example Plugin */,
839E2C612ACB2E140037BC2B /* Example Swift Plugin */,
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
);
path = "Frame Processor Plugins";
sourceTree = "<group>";
};
B8DB3BD7263DEA31004C18D7 /* Example Plugin */ = {
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
isa = PBXGroup;
children = (
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */,
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
);
path = "Example Plugin";
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
sourceTree = "<group>";
};
2021-02-19 08:07:53 -07:00
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
13B07F861A680F5B00A75B9A /* VisionCameraExample */ = {
isa = PBXNativeTarget;
buildConfigurationList = 13B07F931A680F5B00A75B9A /* Build configuration list for PBXNativeTarget "VisionCameraExample" */;
buildPhases = (
CF06330A830C3A8E3D0B576B /* [CP] Check Pods Manifest.lock */,
B81F12712604CF8800B08949 /* SwiftFormat */,
B81F12702604CF6600B08949 /* SwiftLint */,
2021-02-19 08:07:53 -07:00
FD10A7F022414F080027D42C /* Start Packager */,
13B07F871A680F5B00A75B9A /* Sources */,
13B07F8C1A680F5B00A75B9A /* Frameworks */,
13B07F8E1A680F5B00A75B9A /* Resources */,
00DD1BFF1BD5951E006B06BC /* Bundle React Native code and images */,
5E303E5293C2545D8F744D87 /* [CP] Copy Pods Resources */,
679552FCF377DC1EC4565BD1 /* [CP] Embed Pods Frameworks */,
2021-02-19 08:07:53 -07:00
);
buildRules = (
);
dependencies = (
);
name = VisionCameraExample;
productName = VisionCameraExample;
productReference = 13B07F961A680F5B00A75B9A /* VisionCameraExample.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
83CBB9F71A601CBA00E9B192 /* Project object */ = {
isa = PBXProject;
attributes = {
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
LastUpgradeCheck = 1250;
2021-02-19 08:07:53 -07:00
TargetAttributes = {
13B07F861A680F5B00A75B9A = {
2023-11-10 05:10:51 -07:00
DevelopmentTeam = CJW62Q77E7;
2021-02-19 09:10:46 -07:00
LastSwiftMigration = 1240;
2021-02-19 08:07:53 -07:00
};
};
};
buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VisionCameraExample" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 83CBB9F61A601CBA00E9B192;
productRefGroup = 83CBBA001A601CBA00E9B192 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
13B07F861A680F5B00A75B9A /* VisionCameraExample */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
13B07F8E1A680F5B00A75B9A /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */,
13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
00DD1BFF1BD5951E006B06BC /* Bundle React Native code and images */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Bundle React Native code and images";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "export NODE_BINARY=node\n../node_modules/react-native/scripts/react-native-xcode.sh";
};
5E303E5293C2545D8F744D87 /* [CP] Copy Pods Resources */ = {
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${PODS_ROOT}/Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample-resources.sh",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/AntDesign.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Entypo.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/EvilIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Feather.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome5_Brands.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome5_Regular.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome5_Solid.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome6_Brands.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome6_Regular.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome6_Solid.ttf",
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Fontisto.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Foundation.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Ionicons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/MaterialCommunityIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/MaterialIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Octicons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/SimpleLineIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Zocial.ttf",
"${PODS_CONFIGURATION_BUILD_DIR}/React-Core/AccessibilityResources.bundle",
);
name = "[CP] Copy Pods Resources";
outputPaths = (
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/AntDesign.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Entypo.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/EvilIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Feather.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome5_Brands.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome5_Regular.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome5_Solid.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome6_Brands.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome6_Regular.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome6_Solid.ttf",
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Fontisto.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Foundation.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Ionicons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/MaterialCommunityIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/MaterialIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Octicons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/SimpleLineIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Zocial.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/AccessibilityResources.bundle",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample-resources.sh\"\n";
showEnvVarsInLog = 0;
};
679552FCF377DC1EC4565BD1 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${PODS_ROOT}/Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample-frameworks.sh",
"${PODS_XCFRAMEWORKS_BUILD_DIR}/hermes-engine/Pre-built/hermes.framework/hermes",
);
name = "[CP] Embed Pods Frameworks";
outputPaths = (
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/hermes.framework",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
B81F12702604CF6600B08949 /* SwiftLint */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
);
name = SwiftLint;
outputFileListPaths = (
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "if which swiftlint >/dev/null; then\n swiftlint\nelse\n echo \"warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint\"\nfi\n";
};
B81F12712604CF8800B08949 /* SwiftFormat */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
);
name = SwiftFormat;
outputFileListPaths = (
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "if which swiftformat >/dev/null; then\n swiftformat .\nelse\n echo \"warning: SwiftFormat not installed, download from https://github.com/nicklockwood/SwiftFormat\"\nfi\n";
};
CF06330A830C3A8E3D0B576B /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputFileListPaths = (
);
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-VisionCameraExample-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
2021-02-19 08:07:53 -07:00
FD10A7F022414F080027D42C /* Start Packager */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
);
name = "Start Packager";
outputFileListPaths = (
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "export RCT_METRO_PORT=\"${RCT_METRO_PORT:=8081}\"\necho \"export RCT_METRO_PORT=${RCT_METRO_PORT}\" > \"${SRCROOT}/../node_modules/react-native/scripts/.packager.env\"\nif [ -z \"${RCT_NO_LAUNCH_PACKAGER+xxx}\" ] ; then\n if nc -w 5 -z localhost ${RCT_METRO_PORT} ; then\n if ! curl -s \"http://localhost:${RCT_METRO_PORT}/status\" | grep -q \"packager-status:running\" ; then\n echo \"Port ${RCT_METRO_PORT} already in use, packager is either not running or not running correctly\"\n exit 2\n fi\n else\n open \"$SRCROOT/../node_modules/react-native/scripts/launchPackager.command\" || echo \"Can't start packager automatically\"\n fi\nfi\n";
showEnvVarsInLog = 0;
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
13B07F871A680F5B00A75B9A /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */,
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */,
839E2C652ACB2E420037BC2B /* ExampleSwiftFrameProcessor.swift in Sources */,
839E2C632ACB2E330037BC2B /* ExampleSwiftFrameProcessor.m in Sources */,
2021-02-19 09:10:46 -07:00
B8F0E10825E0199F00586F16 /* File.swift in Sources */,
2021-02-19 08:07:53 -07:00
13B07FC11A68108700A75B9A /* main.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
13B07F941A680F5B00A75B9A /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = C1D342AD8210E7627A632602 /* Pods-VisionCameraExample.debug.xcconfig */;
2021-02-19 08:07:53 -07:00
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = 1;
2023-11-10 05:10:51 -07:00
DEVELOPMENT_TEAM = CJW62Q77E7;
2021-02-19 08:07:53 -07:00
ENABLE_BITCODE = NO;
INFOPLIST_FILE = VisionCameraExample/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography";
IPHONEOS_DEPLOYMENT_TARGET = 12.4;
2021-02-19 08:07:53 -07:00
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
OTHER_LDFLAGS = (
"$(inherited)",
"-ObjC",
"-lc++",
);
2021-02-19 12:50:41 -07:00
PRODUCT_BUNDLE_IDENTIFIER = com.mrousavy.camera.example;
2021-02-19 08:07:53 -07:00
PRODUCT_NAME = VisionCameraExample;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
2021-02-19 09:10:46 -07:00
SWIFT_OBJC_BRIDGING_HEADER = "VisionCameraExample-Bridging-Header.h";
2021-02-19 08:07:53 -07:00
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
2021-02-19 09:55:37 -07:00
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
2021-02-19 08:07:53 -07:00
VERSIONING_SYSTEM = "apple-generic";
};
name = Debug;
};
13B07F951A680F5B00A75B9A /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = F64CB73AE44CAC94E069BF68 /* Pods-VisionCameraExample.release.xcconfig */;
2021-02-19 08:07:53 -07:00
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = 1;
2023-11-10 05:10:51 -07:00
DEVELOPMENT_TEAM = CJW62Q77E7;
2021-02-19 08:07:53 -07:00
INFOPLIST_FILE = VisionCameraExample/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography";
IPHONEOS_DEPLOYMENT_TARGET = 12.4;
2021-02-19 08:07:53 -07:00
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
OTHER_LDFLAGS = (
"$(inherited)",
"-ObjC",
"-lc++",
);
2021-02-19 12:50:41 -07:00
PRODUCT_BUNDLE_IDENTIFIER = com.mrousavy.camera.example;
2021-02-19 08:07:53 -07:00
PRODUCT_NAME = VisionCameraExample;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
2021-02-19 09:10:46 -07:00
SWIFT_OBJC_BRIDGING_HEADER = "VisionCameraExample-Bridging-Header.h";
2021-02-19 09:55:37 -07:00
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
2021-02-19 08:07:53 -07:00
VERSIONING_SYSTEM = "apple-generic";
};
name = Release;
};
83CBBA201A601CBA00E9B192 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
2021-02-19 08:07:53 -07:00
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
2021-02-19 08:59:39 -07:00
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
2021-02-19 08:07:53 -07:00
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = i386;
2021-02-19 08:07:53 -07:00
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION,
2021-02-19 08:07:53 -07:00
);
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
2021-02-19 08:59:39 -07:00
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
2021-02-19 08:07:53 -07:00
LD_RUNPATH_SEARCH_PATHS = "/usr/lib/swift $(inherited)";
LIBRARY_SEARCH_PATHS = (
"$(SDKROOT)/usr/lib/swift",
2021-02-19 08:07:53 -07:00
"\"$(TOOLCHAIN_DIR)/usr/lib/swift/$(PLATFORM_NAME)\"",
"\"$(inherited)\"",
);
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "$(inherited)";
OTHER_CPLUSPLUSFLAGS = "$(inherited)";
OTHER_LDFLAGS = (
"$(inherited)",
"-Wl",
"-ld_classic",
);
OTHER_SWIFT_FLAGS = "$(inherited) -DVISION_CAMERA_ENABLE_FRAME_PROCESSORS";
REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native";
2021-02-19 08:07:53 -07:00
SDKROOT = iphoneos;
};
name = Debug;
};
83CBBA211A601CBA00E9B192 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
2021-02-19 08:07:53 -07:00
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
2021-02-19 08:59:39 -07:00
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
2021-02-19 08:07:53 -07:00
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = YES;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
"EXCLUDED_ARCHS[sdk=iphonesimulator*]" = i386;
2021-02-19 08:07:53 -07:00
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = (
"$(inherited)",
_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION,
);
2021-02-19 08:07:53 -07:00
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
2021-02-19 08:59:39 -07:00
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
2021-02-19 08:07:53 -07:00
LD_RUNPATH_SEARCH_PATHS = "/usr/lib/swift $(inherited)";
LIBRARY_SEARCH_PATHS = (
"$(SDKROOT)/usr/lib/swift",
2021-02-19 08:07:53 -07:00
"\"$(TOOLCHAIN_DIR)/usr/lib/swift/$(PLATFORM_NAME)\"",
"\"$(inherited)\"",
);
MTL_ENABLE_DEBUG_INFO = NO;
OTHER_CFLAGS = "$(inherited)";
OTHER_CPLUSPLUSFLAGS = "$(inherited)";
OTHER_LDFLAGS = (
"$(inherited)",
"-Wl",
"-ld_classic",
);
OTHER_SWIFT_FLAGS = "$(inherited) -DVISION_CAMERA_ENABLE_FRAME_PROCESSORS";
REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native";
2021-02-19 08:07:53 -07:00
SDKROOT = iphoneos;
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
SWIFT_COMPILATION_MODE = wholemodule;
2021-02-19 08:07:53 -07:00
VALIDATE_PRODUCT = YES;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
13B07F931A680F5B00A75B9A /* Build configuration list for PBXNativeTarget "VisionCameraExample" */ = {
isa = XCConfigurationList;
buildConfigurations = (
13B07F941A680F5B00A75B9A /* Debug */,
13B07F951A680F5B00A75B9A /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VisionCameraExample" */ = {
isa = XCConfigurationList;
buildConfigurations = (
83CBBA201A601CBA00E9B192 /* Debug */,
83CBBA211A601CBA00E9B192 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 83CBB9F71A601CBA00E9B192 /* Project object */;
}