docs: New V3 docs for new API (#1842)
* docs: New V3 docs for new API * fix: Prefer Wide-Angle unless explicitly opted-out * docs: Update DEVICES * Finish Devices docs * Switch links * Revert "Switch links" This reverts commit 06f196ae0e67787cbd5768e125be6d0a3cb5bbc9. * docs: New LIFECYCLE * docs: New CAPTURING docs * Update Worklets links * docs: Update TROUBLESHOOTING and ZOOMING * fix: Update `getAvailableCameraDevices()` usages * docs: Update FORMATS * Update Errors.kt * docs: Fix broken links * docs: Update references to old hooks * docs: Create Frame Processor Tips * docs: Auto-dark mode * fix: Fix FPS filter * feat: Add `'max'` flag to format filter * fix: Use loop * fix: Fix bug in `getCameraFormat` * fix: Find best aspect ratio as well * fix: Switch between formats on FPS change * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Add FPS graph explanation * feat: Support HDR filter * docs: Add HDR docs * docs: Add Video Stabilization * docs: Update Skia docs * Skia links * Add Skia labels * Update SKIA_FRAME_PROCESSORS.mdx * docs: Add Performance * Update some wording * Update headers / and zoom * Add examples for devices * fix highlights * fix: Expose `Frame` * docs: Update FP docs * Update links * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx
This commit is contained in:
@@ -43,7 +43,11 @@ class InvalidTypeScriptUnionError(unionName: String, unionValue: String) :
|
||||
CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
|
||||
|
||||
class NoCameraDeviceError :
|
||||
CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||
CameraError(
|
||||
"device",
|
||||
"no-device",
|
||||
"No device was set! Use `useCameraDevice(..)` or `Camera.getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
)
|
||||
class PixelFormatNotSupportedError(format: String) :
|
||||
CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
|
||||
class PixelFormatNotSupportedInVideoPipelineError(format: String) :
|
||||
|
@@ -1,10 +1,10 @@
|
||||
import * as React from 'react';
|
||||
import { useRef, useState, useCallback } from 'react';
|
||||
import { useRef, useState, useCallback, useMemo } from 'react';
|
||||
import { StyleSheet, Text, View } from 'react-native';
|
||||
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler';
|
||||
import { CameraRuntimeError, PhotoFile, useCameraDevice, useCameraFormat, useFrameProcessor, VideoFile } from 'react-native-vision-camera';
|
||||
import { Camera } from 'react-native-vision-camera';
|
||||
import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING } from './Constants';
|
||||
import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING, SCREEN_HEIGHT, SCREEN_WIDTH } from './Constants';
|
||||
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated';
|
||||
import { useEffect } from 'react';
|
||||
import { useIsForeground } from './hooks/useIsForeground';
|
||||
@@ -49,19 +49,23 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
physicalDevices: ['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera'],
|
||||
});
|
||||
|
||||
const [targetFps, setTargetFps] = useState(60);
|
||||
|
||||
const screenAspectRatio = SCREEN_HEIGHT / SCREEN_WIDTH;
|
||||
const format = useCameraFormat(device, [
|
||||
{ fps: 60 }, //
|
||||
{ fps: targetFps },
|
||||
{ videoAspectRatio: screenAspectRatio },
|
||||
{ videoResolution: 'max' },
|
||||
{ photoAspectRatio: screenAspectRatio },
|
||||
{ photoResolution: 'max' },
|
||||
]);
|
||||
|
||||
//#region Memos
|
||||
const [targetFps, setTargetFps] = useState(30);
|
||||
const fps = Math.min(format?.maxFps ?? 1, targetFps);
|
||||
|
||||
const supportsFlash = device?.hasFlash ?? false;
|
||||
const supportsHdr = format?.supportsPhotoHDR;
|
||||
const supports60Fps = (format?.maxFps ?? 0) >= 60;
|
||||
const supports60Fps = useMemo(() => device?.formats.some((f) => f.maxFps >= 60), [device?.formats]);
|
||||
const canToggleNightMode = device?.supportsLowLightBoost ?? false;
|
||||
//#endregion
|
||||
|
||||
//#region Animated Zoom
|
||||
// This just maps the zoom factor to a percentage value.
|
||||
|
@@ -90,9 +90,9 @@ enum DeviceError: String {
|
||||
case .configureError:
|
||||
return "Failed to lock the device for configuration."
|
||||
case .noDevice:
|
||||
return "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
return "No device was set! Use `useCameraDevice(..)` or `Camera.getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
case .invalid:
|
||||
return "The given Camera device was invalid. Use `getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
return "The given Camera device was invalid. Use `useCameraDevice(..)` or `Camera.getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
case .flashUnavailable:
|
||||
return "The Camera Device does not have a flash unit! Make sure you select a device where `hasFlash`/`hasTorch` is true!"
|
||||
case .lowLightBoostNotSupported:
|
||||
@@ -133,7 +133,7 @@ enum FormatError {
|
||||
var message: String {
|
||||
switch self {
|
||||
case .invalidFormat:
|
||||
return "The given format was invalid. Did you check if the current device supports the given format by using `getAvailableCameraDevices(...)`?"
|
||||
return "The given format was invalid. Did you check if the current device supports the given format in `device.formats`?"
|
||||
case let .invalidFps(fps):
|
||||
return "The given format cannot run at \(fps) FPS! Make sure your FPS is lower than `format.maxFps` but higher than `format.minFps`."
|
||||
case .invalidHdr:
|
||||
|
@@ -19,7 +19,6 @@ RCT_EXTERN_METHOD(getMicrophonePermissionStatus : (RCTPromiseResolveBlock)resolv
|
||||
RCT_EXTERN_METHOD(requestCameraPermission : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(requestMicrophonePermission : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject);
|
||||
|
||||
RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(getAvailableCameraDevices);
|
||||
RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(installFrameProcessorBindings);
|
||||
|
||||
// Camera View Properties
|
||||
|
@@ -37,18 +37,17 @@ type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
|
||||
*
|
||||
* Read the [VisionCamera documentation](https://react-native-vision-camera.com/) for more information.
|
||||
*
|
||||
* The `<Camera>` component's most important (and therefore _required_) properties are:
|
||||
* The `<Camera>` component's most important properties are:
|
||||
*
|
||||
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevice | useCameraDevice()} hook, or manually by using the {@linkcode CameraDevices.getAvailableCameraDevices CameraDevices.getAvailableCameraDevices()} function.
|
||||
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevice | useCameraDevice(..)} hook, or manually by using the {@linkcode CameraDevices.getAvailableCameraDevices CameraDevices.getAvailableCameraDevices()} function.
|
||||
* * {@linkcode CameraProps.isActive | isActive}: A boolean value that specifies whether the Camera should actively stream video frames or not. This can be compared to a Video component, where `isActive` specifies whether the video is paused or not. If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* function App() {
|
||||
* const devices = useCameraDevices('wide-angle-camera')
|
||||
* const device = devices.back
|
||||
* const device = useCameraDevice('back')
|
||||
*
|
||||
* if (device == null) return <LoadingView />
|
||||
* if (device == null) return <NoCameraErrorView />
|
||||
* return (
|
||||
* <Camera
|
||||
* style={StyleSheet.absoluteFill}
|
||||
@@ -256,7 +255,7 @@ export class Camera extends React.PureComponent<CameraProps> {
|
||||
/**
|
||||
* Get a list of all available camera devices on the current phone.
|
||||
*
|
||||
* If you use Hooks, use the `useCameraDevices()` hook instead.
|
||||
* If you use Hooks, use the `useCameraDevices(..)` hook instead.
|
||||
*
|
||||
* * For Camera Devices attached to the phone, it is safe to assume that this will never change.
|
||||
* * For external Camera Devices (USB cameras, Mac continuity cameras, etc.) the available Camera Devices could change over time when the external Camera device gets plugged in or plugged out, so use {@link addCameraDevicesChangedListener | addCameraDevicesChangedListener(...)} to listen for such changes.
|
||||
|
@@ -4,9 +4,9 @@ import type { PixelFormat } from './PixelFormat';
|
||||
/**
|
||||
* Represents the camera device position.
|
||||
*
|
||||
* * `"back"`: Indicates that the device is physically located on the back of the system hardware
|
||||
* * `"front"`: Indicates that the device is physically located on the front of the system hardware
|
||||
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the device's screen.
|
||||
* * `"back"`: Indicates that the device is physically located on the back of the phone
|
||||
* * `"front"`: Indicates that the device is physically located on the front of the phone
|
||||
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the phone. (e.g. USB or Continuity Cameras)
|
||||
*/
|
||||
export type CameraPosition = 'front' | 'back' | 'external';
|
||||
|
||||
@@ -46,7 +46,14 @@ export type AutoFocusSystem = 'contrast-detection' | 'phase-detection' | 'none';
|
||||
export type VideoStabilizationMode = 'off' | 'standard' | 'cinematic' | 'cinematic-extended' | 'auto';
|
||||
|
||||
/**
|
||||
* A Camera Device's video format. Do not create instances of this type yourself, only use {@linkcode Camera.getAvailableCameraDevices | Camera.getAvailableCameraDevices()}.
|
||||
* A Camera Device's stream-configuration format.
|
||||
*
|
||||
* A format specifies:
|
||||
* - Video Resolution (`videoWidth`/`videoHeight`)
|
||||
* - Photo Resolution (`photoWidth`/`photoHeight`)
|
||||
* - Possible FPS ranges (`fps`)
|
||||
* - Video Stabilization Modes (`videoStabilizationModes`)
|
||||
* - Pixel Formats (`pixelFormats`)
|
||||
*/
|
||||
export interface CameraDeviceFormat {
|
||||
/**
|
||||
@@ -134,7 +141,13 @@ export interface CameraDevice {
|
||||
*/
|
||||
physicalDevices: PhysicalCameraDeviceType[];
|
||||
/**
|
||||
* Specifies the physical position of this camera. (back or front)
|
||||
* Specifies the physical position of this camera.
|
||||
* - `back`: The Camera Device is located on the back of the phone. These devices can be used for capturing what's in front of the user.
|
||||
* - `front`: The Camera Device is located on the front of the phone. These devices can be used for selfies or FaceTime.
|
||||
* - `external`: The Camera Device is an external device. These devices can be either:
|
||||
* - USB Camera Devices (if they support the [USB Video Class (UVC) Specification](https://en.wikipedia.org/wiki/List_of_USB_video_class_devices))
|
||||
* - [Continuity Camera Devices](https://support.apple.com/en-us/HT213244) (e.g. your iPhone's or Mac's Camera connected through WiFi/Continuity)
|
||||
* - Bluetooth/WiFi Camera Devices (if they are supported in the platform-native Camera APIs; Camera2 and AVFoundation)
|
||||
*/
|
||||
position: CameraPosition;
|
||||
/**
|
||||
|
@@ -22,9 +22,9 @@ export interface CameraProps extends ViewProps {
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const devices = useCameraDevices('wide-angle-camera')
|
||||
* const device = devices.back
|
||||
* const device = useCameraDevice('back')
|
||||
*
|
||||
* if (device == null) return <NoCameraErrorView />
|
||||
* return (
|
||||
* <Camera
|
||||
* device={device}
|
||||
@@ -122,13 +122,13 @@ export interface CameraProps extends ViewProps {
|
||||
/**
|
||||
* Specify the frames per second this camera should use. Make sure the given `format` includes a frame rate range with the given `fps`.
|
||||
*
|
||||
* Requires `format` to be set.
|
||||
* Requires `format` to be set that supports the given `fps`.
|
||||
*/
|
||||
fps?: number;
|
||||
/**
|
||||
* Enables or disables HDR on this camera device. Make sure the given `format` supports HDR mode.
|
||||
*
|
||||
* Requires `format` to be set.
|
||||
* Requires `format` to be set that supports `photoHDR`/`videoHDR`.
|
||||
*/
|
||||
hdr?: boolean;
|
||||
/**
|
||||
@@ -155,7 +155,7 @@ export interface CameraProps extends ViewProps {
|
||||
/**
|
||||
* Enables or disables low-light boost on this camera device. Make sure the given `format` supports low-light boost.
|
||||
*
|
||||
* Requires `format` to be set.
|
||||
* Requires a `format` to be set that supports `lowLightBoost`.
|
||||
*/
|
||||
lowLightBoost?: boolean;
|
||||
/**
|
||||
|
@@ -2,11 +2,21 @@ import type { Orientation } from './Orientation';
|
||||
import { PixelFormat } from './PixelFormat';
|
||||
|
||||
/**
|
||||
* A single frame, as seen by the camera.
|
||||
* A single frame, as seen by the camera. This is backed by a C++ HostObject wrapping the native GPU buffer.
|
||||
* At a 4k resolution, a Frame can be 1.5MB in size.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const frameProcessor = useFrameProcessor((frame) => {
|
||||
* 'worklet'
|
||||
* console.log(`Frame: ${frame.width}x${frame.height} (${frame.pixelFormat})`)
|
||||
* }, [])
|
||||
* ```
|
||||
*/
|
||||
export interface Frame {
|
||||
/**
|
||||
* Whether the underlying buffer is still valid or not. The buffer will be released after the frame processor returns, or `close()` is called.
|
||||
* Whether the underlying buffer is still valid or not.
|
||||
* A Frame is valid as long as your Frame Processor (or a `runAsync(..)` operation) is still running
|
||||
*/
|
||||
isValid: boolean;
|
||||
/**
|
||||
@@ -37,7 +47,7 @@ export interface Frame {
|
||||
* Represents the orientation of the Frame.
|
||||
*
|
||||
* Some ML Models are trained for specific orientations, so they need to be taken into
|
||||
* consideration when running a frame processor. See also: `isMirrored`
|
||||
* consideration when running a frame processor. See also: {@linkcode isMirrored}
|
||||
*/
|
||||
orientation: Orientation;
|
||||
/**
|
||||
@@ -47,8 +57,21 @@ export interface Frame {
|
||||
|
||||
/**
|
||||
* Get the underlying data of the Frame as a uint8 array buffer.
|
||||
* The format of the buffer depends on the Frame's {@linkcode pixelFormat}.
|
||||
*
|
||||
* Note that Frames are allocated on the GPU, so calling `toArrayBuffer()` will copy from the GPU to the CPU.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const frameProcessor = useFrameProcessor((frame) => {
|
||||
* 'worklet'
|
||||
*
|
||||
* if (frame.pixelFormat === 'rgb') {
|
||||
* const data = frame.toArrayBuffer()
|
||||
* console.log(`Pixel at 0,0: RGB(${data[0]}, ${data[1]}, ${data[2]})`)
|
||||
* }
|
||||
* }, [])
|
||||
* ```
|
||||
*/
|
||||
toArrayBuffer(): Uint8Array;
|
||||
/**
|
||||
@@ -61,17 +84,20 @@ export interface Frame {
|
||||
toString(): string;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export interface FrameInternal extends Frame {
|
||||
/**
|
||||
* Increment the Frame Buffer ref-count by one.
|
||||
*
|
||||
* This is a private API, do not use this.
|
||||
* @internal
|
||||
*/
|
||||
incrementRefCount(): void;
|
||||
/**
|
||||
* Increment the Frame Buffer ref-count by one.
|
||||
*
|
||||
* This is a private API, do not use this.
|
||||
* @internal
|
||||
*/
|
||||
decrementRefCount(): void;
|
||||
}
|
||||
|
@@ -25,7 +25,7 @@ export interface TakePhotoOptions {
|
||||
*/
|
||||
enableAutoRedEyeReduction?: boolean;
|
||||
/**
|
||||
* Indicates whether still image stabilization will be employed when capturing the photo
|
||||
* Indicates whether still image stabilization will be enabled when capturing the photo
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
|
@@ -24,39 +24,57 @@ export interface DeviceFilter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the best matching Camera device that satisfies your requirements using a sorting filter.
|
||||
* @param devices All available Camera Devices this function will use for filtering. To get devices, use `Camera.getAvailableCameraDevices()`.
|
||||
* @param filter The filter you want to use. The device that matches your filter the closest will be returned.
|
||||
* @returns The device that matches your filter the closest.
|
||||
* Get the best matching Camera device that best satisfies your requirements using a sorting filter.
|
||||
* @param position The position of the Camera device relative to the phone.
|
||||
* @param filter The filter you want to use. The Camera device that matches your filter the closest will be returned
|
||||
* @returns The Camera device that matches your filter the closest, or `undefined` if no such Camera Device exists on the given {@linkcode position}.
|
||||
* @example
|
||||
* ```ts
|
||||
* const devices = Camera.getAvailableCameraDevices()
|
||||
* const device = getCameraDevice(devices, 'back', {
|
||||
* physicalDevices: ['wide-angle-camera']
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function getCameraDevice(devices: CameraDevice[], position: CameraPosition, filter: DeviceFilter = {}): CameraDevice {
|
||||
const explicitlyWantsNonWideAngle = filter.physicalDevices != null && !filter.physicalDevices.includes('wide-angle-camera');
|
||||
|
||||
const filtered = devices.filter((d) => d.position === position);
|
||||
const sortedDevices = filtered.sort((left, right) => {
|
||||
|
||||
let bestDevice = filtered[0];
|
||||
if (bestDevice == null) throw new CameraRuntimeError('device/invalid-device', 'No Camera Device could be found!');
|
||||
|
||||
// Compare each device using a point scoring system
|
||||
for (const device of devices) {
|
||||
let leftPoints = 0;
|
||||
let rightPoints = 0;
|
||||
|
||||
// prefer higher hardware-level
|
||||
if (left.hardwareLevel === 'full') leftPoints += 4;
|
||||
if (right.hardwareLevel === 'full') rightPoints += 4;
|
||||
if (bestDevice.hardwareLevel === 'full') leftPoints += 4;
|
||||
if (device.hardwareLevel === 'full') rightPoints += 4;
|
||||
|
||||
if (!explicitlyWantsNonWideAngle) {
|
||||
// prefer wide-angle-camera as a default
|
||||
if (bestDevice.physicalDevices.includes('wide-angle-camera')) leftPoints += 1;
|
||||
if (device.physicalDevices.includes('wide-angle-camera')) rightPoints += 1;
|
||||
}
|
||||
|
||||
// compare devices. two possible scenarios:
|
||||
// 1. user wants all cameras ([ultra-wide, wide, tele]) to zoom. prefer those devices that have all 3 cameras.
|
||||
// 2. user wants only one ([wide]) for faster performance. prefer those devices that only have one camera, if they have more, we rank them lower.
|
||||
if (filter.physicalDevices != null) {
|
||||
for (const device of left.physicalDevices) {
|
||||
if (filter.physicalDevices.includes(device)) leftPoints += 1;
|
||||
for (const d of bestDevice.physicalDevices) {
|
||||
if (filter.physicalDevices.includes(d)) leftPoints += 1;
|
||||
else leftPoints -= 1;
|
||||
}
|
||||
for (const device of right.physicalDevices) {
|
||||
if (filter.physicalDevices.includes(device)) rightPoints += 1;
|
||||
for (const d of device.physicalDevices) {
|
||||
if (filter.physicalDevices.includes(d)) rightPoints += 1;
|
||||
else rightPoints -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
return leftPoints - rightPoints;
|
||||
});
|
||||
if (rightPoints > leftPoints) bestDevice = device;
|
||||
}
|
||||
|
||||
const device = sortedDevices[0];
|
||||
if (device == null) throw new CameraRuntimeError('device/invalid-device', 'No Camera Device could be found!');
|
||||
return device;
|
||||
return bestDevice;
|
||||
}
|
||||
|
@@ -12,12 +12,12 @@ export interface FormatFilter {
|
||||
* The target resolution of the video (and frame processor) output pipeline.
|
||||
* If no format supports the given resolution, the format closest to this value will be used.
|
||||
*/
|
||||
videoResolution?: Size;
|
||||
videoResolution?: Size | 'max';
|
||||
/**
|
||||
* The target resolution of the photo output pipeline.
|
||||
* If no format supports the given resolution, the format closest to this value will be used.
|
||||
*/
|
||||
photoResolution?: Size;
|
||||
photoResolution?: Size | 'max';
|
||||
/**
|
||||
* The target aspect ratio of the video (and preview) output, expressed as a factor: `width / height`.
|
||||
*
|
||||
@@ -58,6 +58,14 @@ export interface FormatFilter {
|
||||
* If no format supports the target pixel format, the best other matching format will be used.
|
||||
*/
|
||||
pixelFormat?: PixelFormat;
|
||||
/**
|
||||
* Whether you want to find a format that supports Photo HDR.
|
||||
*/
|
||||
photoHDR?: boolean;
|
||||
/**
|
||||
* Whether you want to find a format that supports Photo HDR.
|
||||
*/
|
||||
videoHDR?: boolean;
|
||||
}
|
||||
|
||||
type FilterWithPriority<T> = {
|
||||
@@ -84,96 +92,121 @@ function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
|
||||
|
||||
/**
|
||||
* Get the best matching Camera format for the given device that satisfies your requirements using a sorting filter. By default, formats are sorted by highest to lowest resolution.
|
||||
*
|
||||
* The {@linkcode filters | filters} are ranked by priority, from highest to lowest.
|
||||
* This means the first item you pass will have a higher priority than the second, and so on.
|
||||
*
|
||||
* @param device The Camera Device you're currently using
|
||||
* @param filters The filter you want to use. The format that matches your filter the closest will be returned. The filter is ranked by priority, descending.
|
||||
* @param filter The filter you want to use. The format that matches your filter the closest will be returned
|
||||
* @returns The format that matches your filter the closest.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const format = getCameraFormat(device, [
|
||||
* { videoResolution: { width: 3048, height: 2160 } },
|
||||
* { fps: 60 }
|
||||
* ])
|
||||
* ```
|
||||
*/
|
||||
export function getCameraFormat(device: CameraDevice, filters: FormatFilter[]): CameraDeviceFormat {
|
||||
// Combine filters into a single filter map for constant-time lookup
|
||||
const filter = filtersToFilterMap(filters);
|
||||
|
||||
// Sort list because we will pick first element
|
||||
// TODO: Use reduce instead of sort?
|
||||
const copy = [...device.formats];
|
||||
const sortedFormats = copy.sort((left, right) => {
|
||||
let bestFormat = device.formats[0];
|
||||
if (bestFormat == null)
|
||||
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`);
|
||||
|
||||
// Compare each format using a point scoring system
|
||||
for (const format of device.formats) {
|
||||
let leftPoints = 0;
|
||||
let rightPoints = 0;
|
||||
|
||||
const leftVideoResolution = left.videoWidth * left.videoHeight;
|
||||
const rightVideoResolution = right.videoWidth * right.videoHeight;
|
||||
const leftVideoResolution = bestFormat.videoWidth * bestFormat.videoHeight;
|
||||
const rightVideoResolution = format.videoWidth * format.videoHeight;
|
||||
if (filter.videoResolution != null) {
|
||||
// Find video resolution closest to the filter (ignoring orientation)
|
||||
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height;
|
||||
const leftDiff = Math.abs(leftVideoResolution - targetResolution);
|
||||
const rightDiff = Math.abs(rightVideoResolution - targetResolution);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority;
|
||||
else if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority;
|
||||
} else {
|
||||
// No filter is set, so just prefer higher resolutions
|
||||
if (leftVideoResolution > rightVideoResolution) leftPoints++;
|
||||
else if (rightVideoResolution > leftVideoResolution) rightPoints++;
|
||||
if (filter.videoResolution.target === 'max') {
|
||||
// We just want the maximum resolution
|
||||
if (leftVideoResolution > rightVideoResolution) leftPoints += filter.videoResolution.priority;
|
||||
if (rightVideoResolution > leftVideoResolution) rightPoints += filter.videoResolution.priority;
|
||||
} else {
|
||||
// Find video resolution closest to the filter (ignoring orientation)
|
||||
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height;
|
||||
const leftDiff = Math.abs(leftVideoResolution - targetResolution);
|
||||
const rightDiff = Math.abs(rightVideoResolution - targetResolution);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority;
|
||||
}
|
||||
}
|
||||
|
||||
const leftPhotoResolution = left.photoWidth * left.photoHeight;
|
||||
const rightPhotoResolution = right.photoWidth * right.photoHeight;
|
||||
const leftPhotoResolution = bestFormat.photoWidth * bestFormat.photoHeight;
|
||||
const rightPhotoResolution = format.photoWidth * format.photoHeight;
|
||||
if (filter.photoResolution != null) {
|
||||
// Find closest photo resolution to the filter (ignoring orientation)
|
||||
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height;
|
||||
const leftDiff = Math.abs(leftPhotoResolution - targetResolution);
|
||||
const rightDiff = Math.abs(rightPhotoResolution - targetResolution);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority;
|
||||
else if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority;
|
||||
} else {
|
||||
// No filter is set, so just prefer higher resolutions
|
||||
if (leftPhotoResolution > rightPhotoResolution) leftPoints++;
|
||||
else if (rightPhotoResolution > leftPhotoResolution) rightPoints++;
|
||||
if (filter.photoResolution.target === 'max') {
|
||||
// We just want the maximum resolution
|
||||
if (leftPhotoResolution > rightPhotoResolution) leftPoints += filter.photoResolution.priority;
|
||||
if (rightPhotoResolution > leftPhotoResolution) rightPoints += filter.photoResolution.priority;
|
||||
} else {
|
||||
// Find closest photo resolution to the filter (ignoring orientation)
|
||||
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height;
|
||||
const leftDiff = Math.abs(leftPhotoResolution - targetResolution);
|
||||
const rightDiff = Math.abs(rightPhotoResolution - targetResolution);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority;
|
||||
}
|
||||
}
|
||||
|
||||
// Find closest aspect ratio (video)
|
||||
if (filter.videoAspectRatio != null) {
|
||||
const leftAspect = left.videoWidth / right.videoHeight;
|
||||
const rightAspect = right.videoWidth / right.videoHeight;
|
||||
const leftAspect = bestFormat.videoWidth / bestFormat.videoHeight;
|
||||
const rightAspect = format.videoWidth / format.videoHeight;
|
||||
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target);
|
||||
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority;
|
||||
else if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority;
|
||||
}
|
||||
|
||||
// Find closest aspect ratio (photo)
|
||||
if (filter.photoAspectRatio != null) {
|
||||
const leftAspect = left.photoWidth / right.photoHeight;
|
||||
const rightAspect = right.photoWidth / right.photoHeight;
|
||||
const leftAspect = bestFormat.photoWidth / bestFormat.photoHeight;
|
||||
const rightAspect = format.photoWidth / format.photoHeight;
|
||||
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target);
|
||||
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority;
|
||||
else if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority;
|
||||
}
|
||||
|
||||
// Find closest max FPS
|
||||
if (filter.fps != null) {
|
||||
const leftDiff = Math.abs(left.maxFps - filter.fps.target);
|
||||
const rightDiff = Math.abs(right.maxFps - filter.fps.target);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.fps.priority;
|
||||
else if (rightDiff < leftDiff) rightPoints += filter.fps.priority;
|
||||
if (bestFormat.maxFps >= filter.fps.target) leftPoints += filter.fps.priority;
|
||||
if (format.maxFps >= filter.fps.target) rightPoints += filter.fps.priority;
|
||||
}
|
||||
|
||||
// Find video stabilization mode
|
||||
if (filter.videoStabilizationMode != null) {
|
||||
if (left.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++;
|
||||
if (right.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++;
|
||||
if (bestFormat.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++;
|
||||
if (format.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++;
|
||||
}
|
||||
|
||||
// Find pixel format
|
||||
if (filter.pixelFormat != null) {
|
||||
if (left.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++;
|
||||
if (right.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++;
|
||||
if (bestFormat.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++;
|
||||
if (format.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++;
|
||||
}
|
||||
|
||||
return rightPoints - leftPoints;
|
||||
});
|
||||
// Find Photo HDR formats
|
||||
if (filter.photoHDR != null) {
|
||||
if (bestFormat.supportsPhotoHDR === filter.photoHDR.target) leftPoints++;
|
||||
if (format.supportsPhotoHDR === filter.photoHDR.target) rightPoints++;
|
||||
}
|
||||
|
||||
const format = sortedFormats[0];
|
||||
if (format == null)
|
||||
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`);
|
||||
return format;
|
||||
// Find Video HDR formats
|
||||
if (filter.videoHDR != null) {
|
||||
if (bestFormat.supportsVideoHDR === filter.videoHDR.target) leftPoints++;
|
||||
if (format.supportsVideoHDR === filter.videoHDR.target) rightPoints++;
|
||||
}
|
||||
|
||||
if (rightPoints > leftPoints) bestFormat = format;
|
||||
}
|
||||
|
||||
return bestFormat;
|
||||
}
|
||||
|
@@ -7,11 +7,10 @@ import { useCameraDevices } from './useCameraDevices';
|
||||
* Get the best matching Camera device that best satisfies your requirements using a sorting filter.
|
||||
* @param position The position of the Camera device relative to the phone.
|
||||
* @param filter The filter you want to use. The Camera device that matches your filter the closest will be returned
|
||||
* @returns The Camera device that matches your filter the closest.
|
||||
* @returns The Camera device that matches your filter the closest, or `undefined` if no such Camera Device exists on the given {@linkcode position}.
|
||||
* @example
|
||||
* ```ts
|
||||
* const [position, setPosition] = useState<CameraPosition>('back')
|
||||
* const device = useCameraDevice(position, {
|
||||
* const device = useCameraDevice('back', {
|
||||
* physicalDevices: ['wide-angle-camera']
|
||||
* })
|
||||
* ```
|
||||
|
@@ -2,7 +2,7 @@ export * from './Camera';
|
||||
export * from './CameraDevice';
|
||||
export * from './CameraError';
|
||||
export * from './CameraProps';
|
||||
export { Frame } from './Frame';
|
||||
export * from './Frame';
|
||||
export * from './FrameProcessorPlugins';
|
||||
export * from './Orientation';
|
||||
export * from './PhotoFile';
|
||||
|
Reference in New Issue
Block a user