feat: New JS API for useCameraDevice and useCameraFormat and much faster getAvailableCameraDevices() (#1784)

* Update podfile

* Update useCameraFormat.ts

* Update API

* Delete FormatFilter.md

* Format CameraViewManager.m ObjC style

* Make `getAvailableCameraDevices` synchronous/blocking

* Create some docs

* fix: Fix HardwareLevel types

* fix: Use new device/format API

* Use 60 FPS format as an example

* Replace `Camera.getAvailableCameraDevices` with new `CameraDevices` API/Module

* Fix Lint

* KTLint options

* Use continuation indent of 8

* Use 2 spaces for indent

* Update .editorconfig

* Format code

* Update .editorconfig

* Format more

* Update VideoStabilizationMode.kt

* fix: Expose `CameraDevicesManager` to ObjC

* Update CameraPage.tsx

* fix: `requiresMainQueueSetup() -> false`

* Always prefer higher resolution

* Update CameraDevicesManager.swift

* Update CameraPage.tsx

* Also filter pixelFormat

* fix: Add AVFoundation import
This commit is contained in:
Marc Rousavy
2023-09-21 11:20:33 +02:00
committed by GitHub
parent 9eed89aac6
commit 977b859e46
61 changed files with 1110 additions and 815 deletions

View File

@@ -9,6 +9,8 @@ import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
import type { Point } from './Point';
import type { RecordVideoOptions, VideoFile } from './VideoFile';
import { VisionCameraProxy } from './FrameProcessorPlugins';
import { CameraDevices } from './CameraDevices';
import type { EmitterSubscription } from 'react-native';
//#region Types
export type CameraPermissionStatus = 'granted' | 'not-determined' | 'denied' | 'restricted';
@@ -37,7 +39,7 @@ type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
*
* The `<Camera>` component's most important (and therefore _required_) properties are:
*
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevices | useCameraDevices()} hook, or manually by using the {@linkcode Camera.getAvailableCameraDevices Camera.getAvailableCameraDevices()} function.
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevice | useCameraDevice()} hook, or manually by using the {@linkcode CameraDevices.getAvailableCameraDevices CameraDevices.getAvailableCameraDevices()} function.
* * {@linkcode CameraProps.isActive | isActive}: A boolean value that specifies whether the Camera should actively stream video frames or not. This can be compared to a Video component, where `isActive` specifies whether the video is paused or not. If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again.
*
* @example
@@ -116,12 +118,6 @@ export class Camera extends React.PureComponent<CameraProps> {
/**
* Start a new video recording.
*
* Records in the following formats:
* * **iOS**: QuickTime (`.mov`)
* * **Android**: MPEG4 (`.mp4`)
*
* @blocking This function is synchronized/blocking.
*
* @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
*
* @example
@@ -144,8 +140,8 @@ export class Camera extends React.PureComponent<CameraProps> {
if (error != null) return onRecordingError(error);
if (video != null) return onRecordingFinished(video);
};
// TODO: Use TurboModules to either make this a sync invokation, or make it async.
try {
// TODO: Use TurboModules to make this awaitable.
CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback);
} catch (e) {
throw tryParseNativeCameraError(e);
@@ -231,8 +227,8 @@ export class Camera extends React.PureComponent<CameraProps> {
/**
* Focus the camera to a specific point in the coordinate system.
* @param {Point} point The point to focus to. This should be relative to the Camera view's coordinate system,
* and expressed in Pixel on iOS and Points on Android.
* @param {Point} point The point to focus to. This should be relative
* to the Camera view's coordinate system and is expressed in points.
* * `(0, 0)` means **top left**.
* * `(CameraView.width, CameraView.height)` means **bottom right**.
*
@@ -257,28 +253,32 @@ export class Camera extends React.PureComponent<CameraProps> {
//#endregion
//#region Static Functions (NativeModule)
/**
* Get a list of all available camera devices on the current phone.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting all available camera devices. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
* If you use Hooks, use the `useCameraDevices()` hook instead.
*
* * For Camera Devices attached to the phone, it is safe to assume that this will never change.
* * For external Camera Devices (USB cameras, Mac continuity cameras, etc.) the available Camera Devices could change over time when the external Camera device gets plugged in or plugged out, so use {@link addCameraDevicesChangedListener | addCameraDevicesChangedListener(...)} to listen for such changes.
*
* @example
* ```ts
* const devices = await Camera.getAvailableCameraDevices()
* const filtered = devices.filter((d) => matchesMyExpectations(d))
* const sorted = devices.sort(sortDevicesByAmountOfCameras)
* return {
* back: sorted.find((d) => d.position === "back"),
* front: sorted.find((d) => d.position === "front")
* }
* const devices = Camera.getAvailableCameraDevices()
* const backCameras = devices.filter((d) => d.position === "back")
* const frontCameras = devices.filter((d) => d.position === "front")
* ```
*/
public static async getAvailableCameraDevices(): Promise<CameraDevice[]> {
try {
return await CameraModule.getAvailableCameraDevices();
} catch (e) {
throw tryParseNativeCameraError(e);
}
public static getAvailableCameraDevices(): CameraDevice[] {
return CameraDevices.getAvailableCameraDevices();
}
/**
* Adds a listener that gets called everytime the Camera Devices change, for example
* when an external Camera Device (USB or continuity Camera) gets plugged in or plugged out.
*
* If you use Hooks, use the `useCameraDevices()` hook instead.
*/
public static addCameraDevicesChangedListener(listener: (newDevices: CameraDevice[]) => void): EmitterSubscription {
return CameraDevices.addCameraDevicesChangedListener(listener);
}
/**
* Gets the current Camera Permission Status. Check this before mounting the Camera to ensure

View File

@@ -1,48 +1,30 @@
import type { CameraPosition } from './CameraPosition';
import { Orientation } from './Orientation';
import type { PixelFormat } from './PixelFormat';
/**
* Represents the camera device position.
*
* * `"back"`: Indicates that the device is physically located on the back of the system hardware
* * `"front"`: Indicates that the device is physically located on the front of the system hardware
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the device's screen.
*/
export type CameraPosition = 'front' | 'back' | 'external';
/**
* Indentifiers for a physical camera (one that actually exists on the back/front of the device)
*
* * `"ultra-wide-angle-camera"`: A built-in camera with a shorter focal length than that of a wide-angle camera. (focal length between below 24mm)
* * `"wide-angle-camera"`: A built-in wide-angle camera. (focal length between 24mm and 35mm)
* * `"telephoto-camera"`: A built-in camera device with a longer focal length than a wide-angle camera. (focal length between above 85mm)
*
* Some Camera devices consist of multiple physical devices. They can be interpreted as _logical devices_, for example:
*
* * `"ultra-wide-angle-camera"` + `"wide-angle-camera"` = **dual wide-angle camera**.
* * `"wide-angle-camera"` + `"telephoto-camera"` = **dual camera**.
* * `"ultra-wide-angle-camera"` + `"wide-angle-camera"` + `"telephoto-camera"` = **triple camera**.
*/
export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-camera' | 'telephoto-camera';
/**
* Indentifiers for a logical camera (Combinations of multiple physical cameras to create a single logical camera).
*
* * `"dual-camera"`: A combination of wide-angle and telephoto cameras that creates a capture device.
* * `"dual-wide-camera"`: A device that consists of two cameras of fixed focal length, one ultrawide angle and one wide angle.
* * `"triple-camera"`: A device that consists of three cameras of fixed focal length, one ultrawide angle, one wide angle, and one telephoto.
*/
export type LogicalCameraDeviceType = 'dual-camera' | 'dual-wide-camera' | 'triple-camera';
/**
* Parses an array of physical device types into a single {@linkcode PhysicalCameraDeviceType} or {@linkcode LogicalCameraDeviceType}, depending what matches.
* @method
*/
export const parsePhysicalDeviceTypes = (
physicalDeviceTypes: PhysicalCameraDeviceType[],
): PhysicalCameraDeviceType | LogicalCameraDeviceType => {
if (physicalDeviceTypes.length === 1) {
// @ts-expect-error for very obvious reasons
return physicalDeviceTypes[0];
}
const hasWide = physicalDeviceTypes.includes('wide-angle-camera');
const hasUltra = physicalDeviceTypes.includes('ultra-wide-angle-camera');
const hasTele = physicalDeviceTypes.includes('telephoto-camera');
if (hasTele && hasWide && hasUltra) return 'triple-camera';
if (hasWide && hasUltra) return 'dual-wide-camera';
if (hasWide && hasTele) return 'dual-camera';
throw new Error(`Invalid physical device type combination! ${physicalDeviceTypes.join(' + ')}`);
};
/**
* Indicates a format's autofocus system.
*

View File

@@ -0,0 +1,25 @@
import { NativeModules, NativeEventEmitter } from 'react-native';
import { CameraDevice } from './CameraDevice';
const CameraDevicesManager = NativeModules.CameraDevices as {
getConstants: () => {
availableCameraDevices: CameraDevice[];
};
};
const constants = CameraDevicesManager.getConstants();
let devices = constants.availableCameraDevices;
const DEVICES_CHANGED_NAME = 'CameraDevicesChanged';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const eventEmitter = new NativeEventEmitter(CameraDevicesManager as any);
eventEmitter.addListener(DEVICES_CHANGED_NAME, (newDevices: CameraDevice[]) => {
devices = newDevices;
});
export const CameraDevices = {
getAvailableCameraDevices: () => devices,
addCameraDevicesChangedListener: (callback: (newDevices: CameraDevice[]) => void) => {
return eventEmitter.addListener(DEVICES_CHANGED_NAME, callback);
},
};

View File

@@ -1,13 +0,0 @@
/**
* Represents the camera device position.
*
* * `"back"`: Indicates that the device is physically located on the back of the system hardware
* * `"front"`: Indicates that the device is physically located on the front of the system hardware
*
* #### iOS only
* * `"unspecified"`: Indicates that the device's position relative to the system hardware is unspecified
*
* #### Android only
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the device's screen. (Android only)
*/
export type CameraPosition = 'front' | 'back' | 'unspecified' | 'external';

View File

@@ -0,0 +1,43 @@
export interface Filter<T> {
/**
* The target value for this specific requirement
*/
target: T;
/**
* The priority of this requirement.
* Filters with higher priority can take precedence over filters with lower priority.
*
* For example, if we have two formats:
* ```json
* [
* videoWidth: 3840,
* videoHeight: 2160,
* maxFps: 30,
* ...
* ],
* [
* videoWidth: 1920,
* videoHeight: 1080,
* maxFps: 60,
* ...
* ]
* ```
* And your filter looks like this:
* ```json
* {
* fps: { target: 60, priority: 1 }
* videoSize: { target: { width: 4000, height: 2000 }, priority: 3 }
* }
* ```
* The 4k format will be chosen since the `videoSize` filter has a higher priority (2) than the `fps` filter (1).
*
* To choose the 60 FPS format instead, use a higher priority for the `fps` filter:
* ```json
* {
* fps: { target: 60, priority: 2 }
* videoSize: { target: { width: 4000, height: 2000 }, priority: 1 }
* }
* ```
*/
priority: number;
}

View File

@@ -0,0 +1,62 @@
import { CameraDevice, CameraPosition, PhysicalCameraDeviceType } from '../CameraDevice';
import { CameraRuntimeError } from '../CameraError';
export interface DeviceFilter {
/**
* The desired physical devices your camera device should have.
*
* Many modern phones have multiple Camera devices on one side and can combine those physical camera devices to one logical camera device.
* For example, the iPhone 11 has two physical camera devices, the `ultra-wide-angle-camera` ("fish-eye") and the normal `wide-angle-camera`. You can either use one of those devices individually, or use a combined logical camera device which can smoothly switch over between the two physical cameras depending on the current `zoom` level.
* When the user is at 0.5x-1x zoom, the `ultra-wide-angle-camera` can be used to offer a fish-eye zoom-out effect, and anything above 1x will smoothly switch over to the `wide-angle-camera`.
*
* **Note:** Devices with less phyiscal devices (`['wide-angle-camera']`) are usually faster to start-up than more complex
* devices (`['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera']`), but don't offer zoom switch-over capabilities.
*
* @example
* ```ts
* // This device is simpler, so it starts up faster.
* getCameraDevice({ physicalDevices: ['wide-angle-camera'] })
* // This device is more complex, so it starts up slower, but you can switch between devices on 0.5x, 1x and 2x zoom.
* getCameraDevice({ physicalDevices: ['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera'] })
* ```
*/
physicalDevices?: PhysicalCameraDeviceType[];
}
/**
* Get the best matching Camera device that satisfies your requirements using a sorting filter.
* @param devices All available Camera Devices this function will use for filtering. To get devices, use `Camera.getAvailableCameraDevices()`.
* @param filter The filter you want to use. The device that matches your filter the closest will be returned.
* @returns The device that matches your filter the closest.
*/
export function getCameraDevice(devices: CameraDevice[], position: CameraPosition, filter: DeviceFilter = {}): CameraDevice {
const filtered = devices.filter((d) => d.position === position);
const sortedDevices = filtered.sort((left, right) => {
let leftPoints = 0;
let rightPoints = 0;
// prefer higher hardware-level
if (left.hardwareLevel === 'full') leftPoints += 4;
if (right.hardwareLevel === 'full') rightPoints += 4;
// compare devices. two possible scenarios:
// 1. user wants all cameras ([ultra-wide, wide, tele]) to zoom. prefer those devices that have all 3 cameras.
// 2. user wants only one ([wide]) for faster performance. prefer those devices that only have one camera, if they have more, we rank them lower.
if (filter.physicalDevices != null) {
for (const device of left.devices) {
if (filter.physicalDevices.includes(device)) leftPoints += 1;
else leftPoints -= 1;
}
for (const device of right.devices) {
if (filter.physicalDevices.includes(device)) rightPoints += 1;
else rightPoints -= 1;
}
}
return leftPoints - rightPoints;
});
const device = sortedDevices[0];
if (device == null) throw new CameraRuntimeError('device/invalid-device', 'No Camera Device could be found!');
return device;
}

View File

@@ -0,0 +1,153 @@
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from '../CameraDevice';
import { CameraRuntimeError } from '../CameraError';
import { PixelFormat } from '../PixelFormat';
import { Filter } from './Filter';
interface Size {
width: number;
height: number;
}
export interface FormatFilter {
/**
* The target resolution of the video (and frame processor) output pipeline.
* If no format supports the given resolution, the format closest to this value will be used.
*/
videoResolution?: Filter<Size>;
/**
* The target resolution of the photo output pipeline.
* If no format supports the given resolution, the format closest to this value will be used.
*/
photoResolution?: Filter<Size>;
/**
* The target aspect ratio of the video (and preview) output, expressed as a factor: `width / height`.
*
* In most cases, you want this to be as close to the screen's aspect ratio as possible (usually ~9:16).
*
* @example
* ```ts
* const screen = Dimensions.get('screen')
* targetVideoAspectRatio: screen.width / screen.height
* ```
*/
videoAspectRatio?: Filter<number>;
/**
* The target aspect ratio of the photo output, expressed as a factor: `width / height`.
*
* In most cases, you want this to be the same as `targetVideoAspectRatio`, which you often want
* to be as close to the screen's aspect ratio as possible (usually ~9:16)
*
* @example
* ```ts
* const screen = Dimensions.get('screen')
* targetPhotoAspectRatio: screen.width / screen.height
* ```
*/
photoAspectRatio?: Filter<number>;
/**
* The target FPS you want to record video at.
* If the FPS requirements can not be met, the format closest to this value will be used.
*/
fps?: Filter<number>;
/**
* The target video stabilization mode you want to use.
* If no format supports the target video stabilization mode, the best other matching format will be used.
*/
videoStabilizationMode?: Filter<VideoStabilizationMode>;
/**
* The target pixel format you want to use.
* If no format supports the target pixel format, the best other matching format will be used.
*/
pixelFormat?: Filter<PixelFormat>;
}
/**
* Get the best matching Camera format for the given device that satisfies your requirements using a sorting filter. By default, formats are sorted by highest to lowest resolution.
* @param device The Camera Device you're currently using
* @param filter The filter you want to use. The format that matches your filter the closest will be returned
* @returns The format that matches your filter the closest.
*/
export function getCameraFormat(device: CameraDevice, filter: FormatFilter): CameraDeviceFormat {
const copy = [...device.formats];
const sortedFormats = copy.sort((left, right) => {
let leftPoints = 0;
let rightPoints = 0;
const leftVideoResolution = left.videoWidth * left.videoHeight;
const rightVideoResolution = right.videoWidth * right.videoHeight;
if (filter.videoResolution != null) {
// Find video resolution closest to the filter (ignoring orientation)
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height;
const leftDiff = Math.abs(leftVideoResolution - targetResolution);
const rightDiff = Math.abs(rightVideoResolution - targetResolution);
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority;
else if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority;
} else {
// No filter is set, so just prefer higher resolutions
if (leftVideoResolution > rightVideoResolution) leftPoints++;
else if (rightVideoResolution > leftVideoResolution) rightPoints++;
}
const leftPhotoResolution = left.photoWidth * left.photoHeight;
const rightPhotoResolution = right.photoWidth * right.photoHeight;
if (filter.photoResolution != null) {
// Find closest photo resolution to the filter (ignoring orientation)
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height;
const leftDiff = Math.abs(leftPhotoResolution - targetResolution);
const rightDiff = Math.abs(rightPhotoResolution - targetResolution);
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority;
else if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority;
} else {
// No filter is set, so just prefer higher resolutions
if (leftPhotoResolution > rightPhotoResolution) leftPoints++;
else if (rightPhotoResolution > leftPhotoResolution) rightPoints++;
}
// Find closest aspect ratio (video)
if (filter.videoAspectRatio != null) {
const leftAspect = left.videoWidth / right.videoHeight;
const rightAspect = right.videoWidth / right.videoHeight;
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target);
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target);
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority;
else if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority;
}
// Find closest aspect ratio (photo)
if (filter.photoAspectRatio != null) {
const leftAspect = left.photoWidth / right.photoHeight;
const rightAspect = right.photoWidth / right.photoHeight;
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target);
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target);
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority;
else if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority;
}
// Find closest max FPS
if (filter.fps != null) {
const leftDiff = Math.abs(left.maxFps - filter.fps.target);
const rightDiff = Math.abs(right.maxFps - filter.fps.target);
if (leftDiff < rightDiff) leftPoints += filter.fps.priority;
else if (rightDiff < leftDiff) rightPoints += filter.fps.priority;
}
// Find video stabilization mode
if (filter.videoStabilizationMode != null) {
if (left.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++;
if (right.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++;
}
// Find pixel format
if (filter.pixelFormat != null) {
if (left.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++;
if (right.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++;
}
return rightPoints - leftPoints;
});
const format = sortedFormats[0];
if (format == null)
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`);
return format;
}

View File

@@ -0,0 +1,29 @@
import { useMemo } from 'react';
import { CameraDevice, CameraPosition } from '../CameraDevice';
import { getCameraDevice, DeviceFilter } from '../devices/getCameraDevice';
import { useCameraDevices } from './useCameraDevices';
/**
* Get the best matching Camera device that best satisfies your requirements using a sorting filter.
* @param position The position of the Camera device relative to the phone.
* @param filter The filter you want to use. The Camera device that matches your filter the closest will be returned
* @returns The Camera device that matches your filter the closest.
* @example
* ```ts
* const [position, setPosition] = useState<CameraPosition>('back')
* const device = useCameraDevice(position, {
* physicalDevices: ['wide-angle-camera']
* })
* ```
*/
export function useCameraDevice(position: CameraPosition, filter?: DeviceFilter): CameraDevice | undefined {
const devices = useCameraDevices();
const device = useMemo(
() => getCameraDevice(devices, position, filter),
// eslint-disable-next-line react-hooks/exhaustive-deps
[devices, position, JSON.stringify(filter)],
);
return device;
}

View File

@@ -1,78 +1,23 @@
import { useEffect, useState } from 'react';
import type { CameraPosition } from '../CameraPosition';
import { sortDevices } from '../utils/FormatFilter';
import { Camera } from '../Camera';
import { CameraDevice, LogicalCameraDeviceType, parsePhysicalDeviceTypes, PhysicalCameraDeviceType } from '../CameraDevice';
export type CameraDevices = {
[key in CameraPosition]: CameraDevice | undefined;
};
const DefaultCameraDevices: CameraDevices = {
back: undefined,
external: undefined,
front: undefined,
unspecified: undefined,
};
import type { CameraDevice } from '../CameraDevice';
import { CameraDevices } from '../CameraDevices';
/**
* Gets the best available {@linkcode CameraDevice}. Devices with more cameras are preferred.
* Get all available Camera Devices this phone has.
*
* @returns The best matching {@linkcode CameraDevice}.
* @throws {@linkcode CameraRuntimeError} if no device was found.
* @example
* ```tsx
* const device = useCameraDevice()
* // ...
* return <Camera device={device} />
* ```
* Camera Devices attached to this phone (`back` or `front`) are always available,
* while `external` devices might be plugged in or out at any point,
* so the result of this function might update over time.
*/
export function useCameraDevices(): CameraDevices;
/**
* Gets a {@linkcode CameraDevice} for the requested device type.
*
* @param {PhysicalCameraDeviceType | LogicalCameraDeviceType} deviceType Specifies a device type which will be used as a device filter.
* @returns A {@linkcode CameraDevice} for the requested device type.
* @throws {@linkcode CameraRuntimeError} if no device was found.
* @example
* ```tsx
* const device = useCameraDevice('wide-angle-camera')
* // ...
* return <Camera device={device} />
* ```
*/
export function useCameraDevices(deviceType: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevices;
export function useCameraDevices(deviceType?: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevices {
const [cameraDevices, setCameraDevices] = useState<CameraDevices>(DefaultCameraDevices);
export function useCameraDevices(): CameraDevice[] {
const [devices, setDevices] = useState(() => CameraDevices.getAvailableCameraDevices());
useEffect(() => {
let isMounted = true;
const listener = CameraDevices.addCameraDevicesChangedListener((newDevices) => {
setDevices(newDevices);
});
return () => listener.remove();
}, []);
const loadDevice = async (): Promise<void> => {
let devices = await Camera.getAvailableCameraDevices();
if (!isMounted) return;
devices = devices.sort(sortDevices);
if (deviceType != null) {
devices = devices.filter((d) => {
const parsedType = parsePhysicalDeviceTypes(d.devices);
return parsedType === deviceType;
});
}
setCameraDevices({
back: devices.find((d) => d.position === 'back'),
external: devices.find((d) => d.position === 'external'),
front: devices.find((d) => d.position === 'front'),
unspecified: devices.find((d) => d.position === 'unspecified'),
});
};
loadDevice();
return () => {
isMounted = false;
};
}, [deviceType]);
return cameraDevices;
return devices;
}

View File

@@ -1,16 +1,27 @@
import { useMemo } from 'react';
import type { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
import { sortFormats } from '../utils/FormatFilter';
import { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
import { FormatFilter, getCameraFormat } from '../devices/getCameraFormat';
/**
* Returns the best format for the given camera device.
*
* This function tries to choose a format with the highest possible photo-capture resolution and best matching aspect ratio.
*
* @param {CameraDevice} device The Camera Device
*
* @returns The best matching format for the given camera device, or `undefined` if the camera device is `undefined`.
* Get the best matching Camera format for the given device that satisfies your requirements using a sorting filter. By default, formats are sorted by highest to lowest resolution.
* @param device The Camera Device you're currently using
* @param filter The filter you want to use. The format that matches your filter the closest will be returned
* @returns The format that matches your filter the closest.
* @example
* ```ts
* const device = useCameraDevice(...)
* const format = useCameraFormat(device, {
* videoResolution: { target: { width: 3048, height: 2160 }, priority: 2 },
* fps: { target: 60, priority: 1 }
* })
* ```
*/
export function useCameraFormat(device?: CameraDevice): CameraDeviceFormat | undefined {
return useMemo(() => device?.formats.sort(sortFormats)[0], [device?.formats]);
export function useCameraFormat(device: CameraDevice | undefined, filter: FormatFilter): CameraDeviceFormat | undefined {
const format = useMemo(() => {
if (device == null) return undefined;
return getCameraFormat(device, filter);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [device, JSON.stringify(filter)]);
return format;
}

View File

@@ -1,19 +1,16 @@
export * from './Camera';
export * from './CameraDevice';
export * from './CameraError';
export * from './CameraPosition';
export * from './CameraProps';
export { Frame } from './Frame';
export * from './FrameProcessorPlugins';
export * from './CameraProps';
export * from './PhotoFile';
export * from './PixelFormat';
export * from './Point';
export * from './TemporaryFile';
export * from './VideoFile';
export * from './hooks/useCameraDevices';
export * from './hooks/useCameraDevice';
export * from './hooks/useCameraFormat';
export * from './devices/getCameraFormat';
export * from './hooks/useFrameProcessor';
export * from './utils/FormatFilter';

View File

@@ -1,93 +0,0 @@
import { Dimensions } from 'react-native';
import type { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
/**
* Compares two devices by the following criteria:
* * `wide-angle-camera`s are ranked higher than others
* * Devices with more physical cameras are ranked higher than ones with less. (e.g. "Triple Camera" > "Wide-Angle Camera")
*
* > Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*
* @example
* ```ts
* const devices = camera.devices.sort(sortDevices)
* const bestDevice = devices[0]
* ```
* @method
*/
export const sortDevices = (left: CameraDevice, right: CameraDevice): number => {
let leftPoints = 0;
let rightPoints = 0;
const leftHasWideAngle = left.devices.includes('wide-angle-camera');
const rightHasWideAngle = right.devices.includes('wide-angle-camera');
if (leftHasWideAngle) leftPoints += 2;
if (rightHasWideAngle) rightPoints += 2;
if (left.isMultiCam) leftPoints += 2;
if (right.isMultiCam) rightPoints += 2;
if (left.hardwareLevel === 'full') leftPoints += 3;
if (right.hardwareLevel === 'full') rightPoints += 3;
if (left.hardwareLevel === 'limited') leftPoints += 1;
if (right.hardwareLevel === 'limited') rightPoints += 1;
if (left.hasFlash) leftPoints += 1;
if (right.hasFlash) rightPoints += 1;
const leftMaxResolution = left.formats.reduce(
(prev, curr) => Math.max(prev, curr.videoHeight * curr.videoWidth + curr.photoHeight * curr.photoWidth),
0,
);
const rightMaxResolution = right.formats.reduce(
(prev, curr) => Math.max(prev, curr.videoHeight * curr.videoWidth + curr.photoHeight * curr.photoWidth),
0,
);
if (leftMaxResolution > rightMaxResolution) leftPoints += 3;
if (rightMaxResolution > leftMaxResolution) rightPoints += 3;
// telephoto cameras often have very poor quality.
const leftHasTelephoto = left.devices.includes('telephoto-camera');
const rightHasTelephoto = right.devices.includes('telephoto-camera');
if (leftHasTelephoto) leftPoints -= 2;
if (rightHasTelephoto) rightPoints -= 2;
if (left.devices.length > right.devices.length) leftPoints += 1;
if (right.devices.length > left.devices.length) rightPoints += 1;
return rightPoints - leftPoints;
};
const SCREEN_SIZE = {
width: Dimensions.get('window').width,
height: Dimensions.get('window').height,
};
const SCREEN_ASPECT_RATIO = SCREEN_SIZE.width / SCREEN_SIZE.height;
/**
* Sort formats by resolution and aspect ratio difference (to the Screen size).
*
* > Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*/
export const sortFormats = (left: CameraDeviceFormat, right: CameraDeviceFormat): number => {
let leftPoints = 0,
rightPoints = 0;
// we downscale the points so much that we are in smaller number ranges for future calculations
// e.g. for 4k (4096), this adds 8 points.
leftPoints += Math.round(left.photoWidth / 500);
rightPoints += Math.round(right.photoWidth / 500);
// e.g. for 4k (4096), this adds 8 points.
leftPoints += Math.round(left.videoWidth / 500);
rightPoints += Math.round(right.videoWidth / 500);
// we downscale the points here as well, so if left has 16:9 and right has 21:9, this roughly
// adds 5 points. If the difference is smaller, e.g. 16:9 vs 17:9, this roughly adds a little
// bit over 1 point, just enough to overrule the FPS below.
const leftAspectRatioDiff = left.photoHeight / left.photoWidth - SCREEN_ASPECT_RATIO;
const rightAspectRatioDiff = right.photoHeight / right.photoWidth - SCREEN_ASPECT_RATIO;
leftPoints -= Math.abs(leftAspectRatioDiff) * 10;
rightPoints -= Math.abs(rightAspectRatioDiff) * 10;
return rightPoints - leftPoints;
};