react-native-vision-camera/src/CameraDevice.ts

285 lines
13 KiB
TypeScript
Raw Normal View History

2021-02-20 09:07:10 -07:00
import type { CameraPosition } from './CameraPosition';
import type { PixelFormat } from './PixelFormat';
2021-02-19 08:07:53 -07:00
/**
* Indentifiers for a physical camera (one that actually exists on the back/front of the device)
*
* * `"ultra-wide-angle-camera"`: A built-in camera with a shorter focal length than that of a wide-angle camera. (focal length between below 24mm)
* * `"wide-angle-camera"`: A built-in wide-angle camera. (focal length between 24mm and 35mm)
* * `"telephoto-camera"`: A built-in camera device with a longer focal length than a wide-angle camera. (focal length between above 85mm)
*/
2021-02-20 09:07:10 -07:00
export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-camera' | 'telephoto-camera';
2021-02-19 08:07:53 -07:00
/**
* Indentifiers for a logical camera (Combinations of multiple physical cameras to create a single logical camera).
*
* * `"dual-camera"`: A combination of wide-angle and telephoto cameras that creates a capture device.
* * `"dual-wide-camera"`: A device that consists of two cameras of fixed focal length, one ultrawide angle and one wide angle.
* * `"triple-camera"`: A device that consists of three cameras of fixed focal length, one ultrawide angle, one wide angle, and one telephoto.
*/
export type LogicalCameraDeviceType = 'dual-camera' | 'dual-wide-camera' | 'triple-camera';
2021-02-19 08:07:53 -07:00
/**
2021-03-08 10:51:53 -07:00
* Parses an array of physical device types into a single {@linkcode PhysicalCameraDeviceType} or {@linkcode LogicalCameraDeviceType}, depending what matches.
* @method
2021-02-19 08:07:53 -07:00
*/
export const parsePhysicalDeviceTypes = (
physicalDeviceTypes: PhysicalCameraDeviceType[],
): PhysicalCameraDeviceType | LogicalCameraDeviceType => {
2021-02-20 09:11:17 -07:00
if (physicalDeviceTypes.length === 1) {
// @ts-expect-error for very obvious reasons
return physicalDeviceTypes[0];
}
2021-02-20 09:07:10 -07:00
const hasWide = physicalDeviceTypes.includes('wide-angle-camera');
const hasUltra = physicalDeviceTypes.includes('ultra-wide-angle-camera');
const hasTele = physicalDeviceTypes.includes('telephoto-camera');
if (hasTele && hasWide && hasUltra) return 'triple-camera';
2021-02-20 09:07:10 -07:00
if (hasWide && hasUltra) return 'dual-wide-camera';
if (hasWide && hasTele) return 'dual-camera';
throw new Error(`Invalid physical device type combination! ${physicalDeviceTypes.join(' + ')}`);
2021-02-19 08:07:53 -07:00
};
/**
* Indicates a format's color space.
*
* #### The following colorspaces are available on iOS:
* * `"srgb"`: The sGRB color space.
2021-02-19 08:07:53 -07:00
* * `"p3-d65"`: The P3 D65 wide color space which uses Illuminant D65 as the white point
* * `"hlg-bt2020"`: The BT2020 wide color space which uses Illuminant D65 as the white point and Hybrid Log-Gamma as the transfer function
*
* > See ["AVCaptureColorSpace"](https://developer.apple.com/documentation/avfoundation/avcapturecolorspace) for more information.
*
2021-02-19 08:07:53 -07:00
* #### The following colorspaces are available on Android:
* * `"yuv"`: The Multi-plane Android YCbCr color space. (YUV 420_888, 422_888 or 444_888)
* * `"jpeg"`: The compressed JPEG color space.
* * `"jpeg-depth"`: The compressed JPEG color space including depth data.
* * `"raw"`: The Camera's RAW sensor color space. (Single-channel Bayer-mosaic image, usually 16 bit)
* * `"heic"`: The compressed HEIC color space.
* * `"private"`: The Android private opaque image format. (The choices of the actual format and pixel data layout are entirely up to the device-specific and framework internal implementations, and may vary depending on use cases even for the same device. These buffers are not directly accessible to the application)
* * `"depth-16"`: The Android dense depth image format (16 bit)
* * `"unknown"`: Placeholder for an unknown image/pixel format. [Edit this file](https://github.com/mrousavy/react-native-vision-camera/edit/main/android/src/main/java/com/mrousavy/camera/parsers/ImageFormat+String.kt) to add a name for the unknown format.
*
* > See ["Android Color Formats"](https://jbit.net/Android_Colors/) for more information.
2021-02-19 08:07:53 -07:00
*/
export type ColorSpace =
// ios
| 'hlg-bt2020'
| 'p3-d65'
| 'srgb'
// android
| 'yuv'
| 'jpeg'
| 'jpeg-depth'
| 'raw'
| 'heic'
| 'private'
| 'depth-16'
| 'unknown';
2021-02-19 08:07:53 -07:00
/**
* Indicates a format's autofocus system.
*
* * `"none"`: Indicates that autofocus is not available
* * `"contrast-detection"`: Indicates that autofocus is achieved by contrast detection. Contrast detection performs a focus scan to find the optimal position
* * `"phase-detection"`: Indicates that autofocus is achieved by phase detection. Phase detection has the ability to achieve focus in many cases without a focus scan. Phase detection autofocus is typically less visually intrusive than contrast detection autofocus
*/
2021-02-20 09:07:10 -07:00
export type AutoFocusSystem = 'contrast-detection' | 'phase-detection' | 'none';
2021-02-19 08:07:53 -07:00
/**
* Indicates a format's supported video stabilization mode
*
* * `"off"`: Indicates that video should not be stabilized
* * `"standard"`: Indicates that video should be stabilized using the standard video stabilization algorithm introduced with iOS 5.0. Standard video stabilization has a reduced field of view. Enabling video stabilization may introduce additional latency into the video capture pipeline
* * `"cinematic"`: Indicates that video should be stabilized using the cinematic stabilization algorithm for more dramatic results. Cinematic video stabilization has a reduced field of view compared to standard video stabilization. Enabling cinematic video stabilization introduces much more latency into the video capture pipeline than standard video stabilization and consumes significantly more system memory. Use narrow or identical min and max frame durations in conjunction with this mode
* * `"cinematic-extended"`: Indicates that the video should be stabilized using the extended cinematic stabilization algorithm. Enabling extended cinematic stabilization introduces longer latency into the video capture pipeline compared to the AVCaptureVideoStabilizationModeCinematic and consumes more memory, but yields improved stability. It is recommended to use identical or similar min and max frame durations in conjunction with this mode (iOS 13.0+)
* * `"auto"`: Indicates that the most appropriate video stabilization mode for the device and format should be chosen automatically
*/
2021-02-20 09:07:10 -07:00
export type VideoStabilizationMode = 'off' | 'standard' | 'cinematic' | 'cinematic-extended' | 'auto';
2021-02-19 08:07:53 -07:00
export interface FrameRateRange {
2021-02-19 08:07:53 -07:00
minFrameRate: number;
maxFrameRate: number;
}
2021-02-19 08:07:53 -07:00
/**
2021-03-08 10:51:53 -07:00
* A Camera Device's video format. Do not create instances of this type yourself, only use {@linkcode Camera.getAvailableCameraDevices | Camera.getAvailableCameraDevices()}.
2021-02-19 08:07:53 -07:00
*/
export interface CameraDeviceFormat {
2021-02-19 08:07:53 -07:00
/**
* The height of the highest resolution a still image (photo) can be produced in
*/
photoHeight: number;
/**
* The width of the highest resolution a still image (photo) can be produced in
*/
photoWidth: number;
/**
* The video resolutions's height
*/
videoHeight: number;
2021-02-19 08:07:53 -07:00
/**
* The video resolution's width
*/
videoWidth: number;
2021-02-19 08:07:53 -07:00
/**
* A boolean value specifying whether this format supports the highest possible photo quality that can be delivered on the current platform.
*
* @platform iOS 13.0+
*/
isHighestPhotoQualitySupported?: boolean;
/**
* Maximum supported ISO value
*/
maxISO: number;
/**
* Minimum supported ISO value
*/
minISO: number;
/**
* The video field of view in degrees
*/
fieldOfView: number;
/**
* The maximum zoom factor (e.g. `128`)
2021-02-19 08:07:53 -07:00
*/
maxZoom: number;
/**
* The available color spaces.
*
* Note: On Android, this will always be only `["yuv"]`
*/
colorSpaces: ColorSpace[];
/**
* Specifies whether this format supports HDR mode for video capture
*/
supportsVideoHDR: boolean;
/**
* Specifies whether this format supports HDR mode for photo capture
*/
supportsPhotoHDR: boolean;
/**
* All available frame rate ranges. You can query this to find the highest frame rate available
*/
frameRateRanges: FrameRateRange[];
/**
* Specifies this format's auto focus system.
*/
autoFocusSystem: AutoFocusSystem;
/**
* All supported video stabilization modes
*/
videoStabilizationModes: VideoStabilizationMode[];
/**
* Specifies this format's pixel format. The pixel format specifies how the individual pixels are interpreted as a visual image.
*
* The most common format is `420v`. Some formats (like `x420`) are not compatible with some frame processor plugins (e.g. MLKit)
*/
pixelFormat: PixelFormat;
}
2021-02-19 08:07:53 -07:00
/**
2021-03-08 10:51:53 -07:00
* Represents a camera device discovered by the {@linkcode Camera.getAvailableCameraDevices | Camera.getAvailableCameraDevices()} function
2021-02-19 08:07:53 -07:00
*/
export interface CameraDevice {
2021-02-19 08:07:53 -07:00
/**
* The native ID of the camera device instance.
*/
id: string;
/**
* The physical devices this `CameraDevice` contains.
*
* * If this camera device is a **logical camera** (combination of multiple physical cameras), there are multiple cameras in this array.
* * If this camera device is a **physical camera**, there is only a single element in this array.
*
* You can check if the camera is a logical multi-camera by using the `isMultiCam` property.
*/
devices: PhysicalCameraDeviceType[];
/**
* Specifies the physical position of this camera. (back or front)
*/
position: CameraPosition;
/**
* A friendly localized name describing the camera.
*/
name: string;
/**
* Specifies whether this camera supports enabling flash for photo capture.
*/
hasFlash: boolean;
/**
* Specifies whether this camera supports continuously enabling the flash to act like a torch (flash with video capture)
*/
hasTorch: boolean;
/**
* A property indicating whether the device is a virtual multi-camera consisting of multiple combined physical cameras.
2021-02-19 08:07:53 -07:00
*
* Examples:
* * The Dual Camera, which supports seamlessly switching between a wide and telephoto camera while zooming and generating depth data from the disparities between the different points of view of the physical cameras.
* * The TrueDepth Camera, which generates depth data from disparities between a YUV camera and an Infrared camera pointed in the same direction.
*/
isMultiCam: boolean;
/**
* Minimum available zoom factor (e.g. `1`)
2021-02-19 08:07:53 -07:00
*/
minZoom: number;
/**
* Maximum available zoom factor (e.g. `128`)
2021-02-19 08:07:53 -07:00
*/
maxZoom: number;
/**
* The zoom factor where the camera is "neutral".
2021-02-19 08:07:53 -07:00
*
* * For single-physical cameras this property is always `1.0`.
* * For multi cameras this property is a value between `minZoom` and `maxZoom`, where the camera is in _wide-angle_ mode and hasn't switched to the _ultra-wide-angle_ ("fish-eye") or telephoto camera yet.
2021-02-19 08:07:53 -07:00
*
* Use this value as an initial value for the zoom property if you implement custom zoom. (e.g. reanimated shared value should be initially set to this value)
* @example
* const device = ...
*
* const zoom = useSharedValue(device.neutralZoom) // <-- initial value so it doesn't start at ultra-wide
* const cameraProps = useAnimatedProps(() => ({
* zoom: zoom.value
* }))
2021-02-19 08:07:53 -07:00
*/
neutralZoom: number;
/**
* All available formats for this camera device. Use this to find the best format for your use case and set it to the Camera's {@linkcode CameraProps.format | Camera's .format} property.
2021-03-08 10:51:53 -07:00
*
* See [the Camera Formats documentation](https://mrousavy.github.io/react-native-vision-camera/docs/guides/formats) for more information about Camera Formats.
2021-02-19 08:07:53 -07:00
*/
formats: CameraDeviceFormat[];
/**
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
* Whether this camera device supports using Video Recordings (`video={true}`) and Frame Processors (`frameProcessor={...}`) at the same time. See ["The `supportsParallelVideoProcessing` prop"](https://mrousavy.github.io/react-native-vision-camera/docs/guides/devices#the-supportsparallelvideoprocessing-prop) for more information.
*
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
* If this property is `false`, you can only enable `video` or add a `frameProcessor`, but not both.
*
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
* * On iOS this value is always `true`.
* * On newer Android devices this value is always `true`.
* * On older Android devices this value is `false` if the Camera's hardware level is `LEGACY` or `LIMITED`, `true` otherwise. (See [`INFO_SUPPORTED_HARDWARE_LEVEL`](https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL) or [the tables at "Regular capture"](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture))
*/
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 04:37:54 -06:00
supportsParallelVideoProcessing: boolean;
2021-02-19 08:07:53 -07:00
/**
* Whether this camera device supports low light boost.
*/
supportsLowLightBoost: boolean;
/**
* Whether this camera supports taking photos with depth data.
*
* **! Work in Progress !**
*/
supportsDepthCapture: boolean;
/**
* Whether this camera supports taking photos in RAW format
*
* **! Work in Progress !**
*/
supportsRawCapture: boolean;
/**
* Specifies whether this device supports focusing ({@linkcode Camera.focus | Camera.focus(...)})
*/
supportsFocus: boolean;
}