Squash format-filter

This commit is contained in:
Marc Rousavy 2021-02-20 23:20:28 +01:00
parent 99a640238e
commit 04fd5bb069
10 changed files with 203 additions and 223 deletions

View File

@ -1,3 +1,5 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-var-requires */
const path = require('path');
const blacklist = require('metro-config/src/defaults/blacklist');
const escape = require('escape-string-regexp');
@ -27,7 +29,7 @@ module.exports = {
transformer: {
getTransformOptions: async () => ({
transform: {
experimentalImportSupport: false,
experimentalImportSupport: true,
inlineRequires: true,
},
}),

View File

@ -6,7 +6,8 @@
"scripts": {
"android": "react-native run-android",
"ios": "react-native run-ios",
"start": "react-native start"
"start": "react-native start",
"pods": "cd ios && pod install"
},
"dependencies": {
"@react-native-community/blur": "^3.6.0",

View File

@ -10,9 +10,8 @@ import {
} from 'react-native-gesture-handler';
import { Navigation, NavigationFunctionComponent } from 'react-native-navigation';
import type { CameraDevice, CameraDeviceFormat, CameraProps, CameraRuntimeError, PhotoFile, VideoFile } from 'react-native-vision-camera';
import { Camera } from 'react-native-vision-camera';
import { Camera, frameRateIncluded, sortDevices, sortFormatsByResolution, filterFormatsByAspectRatio } from 'react-native-vision-camera';
import { useIsScreenFocused } from './hooks/useIsScreenFocused';
import { compareFormats, frameRateIncluded, formatWithClosestMatchingFps, compareDevices } from './FormatFilter';
import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING } from './Constants';
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated';
import { useEffect } from 'react';
@ -52,7 +51,11 @@ export const App: NavigationFunctionComponent = ({ componentId }) => {
// camera format settings
const [devices, setDevices] = useState<CameraDevice[]>([]); // All available camera devices, sorted by "best device" (descending)
const device = useMemo<CameraDevice | undefined>(() => devices.find((d) => d.position === cameraPosition), [cameraPosition, devices]);
const formats = useMemo<CameraDeviceFormat[]>(() => device?.formats.sort(compareFormats) ?? [], [device?.formats]);
const formats = useMemo<CameraDeviceFormat[]>(() => {
if (device?.formats == null) return [];
const filtered = filterFormatsByAspectRatio(device.formats);
return filtered.sort(sortFormatsByResolution);
}, [device?.formats]);
//#region Memos
const [targetFps] = useSelector(FpsSelector);
@ -94,7 +97,8 @@ export const App: NavigationFunctionComponent = ({ componentId }) => {
result = result.filter((f) => f.supportsVideoHDR);
}
return formatWithClosestMatchingFps(result, fps);
// find the first format that includes the given FPS
return result.find((f) => f.frameRateRanges.some((r) => frameRateIncluded(r, fps)));
}, [formats, fps, enableHdr]);
//#region Animated Zoom
@ -180,7 +184,7 @@ export const App: NavigationFunctionComponent = ({ componentId }) => {
try {
const availableCameraDevices = await Camera.getAvailableCameraDevices();
console.log(`Devices: ${availableCameraDevices.map((d) => d.name).join(', ')}`);
const sortedDevices = availableCameraDevices.sort(compareDevices);
const sortedDevices = availableCameraDevices.sort(sortDevices);
console.debug(`Devices (sorted): ${sortedDevices.map((d) => d.name).join(', ')}`);
setDevices(sortedDevices);
} catch (e) {

View File

@ -1,207 +0,0 @@
import { SCREEN_HEIGHT, SCREEN_WIDTH, USE_ULTRAWIDE_IF_AVAILABLE } from './Constants';
import type { CameraDevice, CameraDeviceFormat, FrameRateRange } from 'react-native-vision-camera';
/**
* Compares two devices with the following criteria:
* * Cameras with wide-angle-cameras are 5x **better** than cameras without.
* * Cameras with ultra-wide-angle-cameras are 5x **worse** than cameras without.
* * Cameras with more physical devices are "better"
*
* @returns
* * `-1` if left is BETTER than right
* * `0` if left equals right
* * `1` if left is WORSE than right
*
* Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*/
export const compareDevices = (left: CameraDevice, right: CameraDevice): -1 | 0 | 1 => {
let leftPoints = 0;
const leftHasWideAngle = left.devices.includes('wide-angle-camera');
const rightHasWideAngle = right.devices.includes('wide-angle-camera');
if (leftHasWideAngle && !rightHasWideAngle) {
// left does have a wide-angle-camera, but right doesn't.
leftPoints += 5;
} else if (!leftHasWideAngle && rightHasWideAngle) {
// left doesn't have a wide-angle-camera, but right does.
leftPoints -= 5;
}
if (!USE_ULTRAWIDE_IF_AVAILABLE) {
const leftHasUltraWideAngle = left.devices.includes('ultra-wide-angle-camera');
const rightHasUltraWideAngle = right.devices.includes('ultra-wide-angle-camera');
if (leftHasUltraWideAngle && !rightHasUltraWideAngle) {
// left does have an ultra-wide-angle-camera, but right doesn't. Ultra-Wide cameras are bad because of their poor quality.
leftPoints -= 5;
} else if (!leftHasUltraWideAngle && rightHasUltraWideAngle) {
// left doesn't have an ultra-wide-angle-camera, but right does. Ultra-Wide cameras are bad because of their poor quality.
leftPoints += 5;
}
}
if (left.devices.length > right.devices.length) {
// left has more devices than right
leftPoints += 1;
} else if (left.devices.length < right.devices.length) {
// left has less more devices than right
leftPoints -= 1;
}
if (leftPoints > 0) return -1;
if (leftPoints < 0) return 1;
return 0;
};
type Size = { width: number; height: number };
const CAMERA_VIEW_SIZE: Size = {
width: SCREEN_WIDTH,
height: SCREEN_HEIGHT,
};
const applyScaledMask = (
clippedElementDimensions: Size, // 3024 x 4032 | 2160x3840
maskDimensions: Size, // 375 x 623
): Size => {
const wScale = maskDimensions.width / clippedElementDimensions.width;
const hScale = maskDimensions.height / clippedElementDimensions.height;
if (wScale < hScale) {
return {
width: maskDimensions.width / hScale,
height: clippedElementDimensions.height,
};
} else {
return {
width: clippedElementDimensions.width,
height: maskDimensions.height / wScale,
};
}
};
/**
* Compares two Formats with the following comparators:
* * Photo Dimensions (higher is better) (weights x3)
* * Video Dimensions (higher is better) (weights x2)
* * Max FPS (higher is better) (weights x2)
* * HDR Support (true is better) (weights x2)
* * Max Zoom Factor (higher is better) (weights x1)
* * MaxISO (higher is better) (weights x1)
* * MinISO (lower is better) (weights x1)
*
* @returns
* * `-1` if left is BETTER than right
* * `0` if left equals right
* * `1` if left is WORSE than right
*
* Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" format.
*/
export const compareFormats = (left: CameraDeviceFormat, right: CameraDeviceFormat): -1 | 0 | 1 => {
// Point score of the left format. Higher is better.
let leftPoints = 0;
const leftPhotoPixels = left.photoHeight * left.photoWidth;
const rightPhotoPixels = right.photoHeight * right.photoWidth;
if (leftPhotoPixels > rightPhotoPixels) {
// left has greater photo dimensions
leftPoints += 3;
} else if (leftPhotoPixels < rightPhotoPixels) {
// left has smaller photo dimensions
leftPoints -= 3;
}
const leftCropped = applyScaledMask(
{ width: left.photoHeight, height: left.photoWidth }, // cameras are horizontal, we rotate to portrait
CAMERA_VIEW_SIZE,
);
const rightCropped = applyScaledMask(
{ width: right.photoHeight, height: right.photoWidth }, // cameras are horizontal, we rotate to portrait
CAMERA_VIEW_SIZE,
);
const leftOverflow = left.photoWidth * left.photoHeight - leftCropped.width * leftCropped.height;
const rightOverflow = right.photoWidth * right.photoHeight - rightCropped.width * rightCropped.height;
if (leftOverflow > rightOverflow) {
// left has a higher overflow, aka more pixels that aren't on-screen and therefore wasted. Maybe left is 4:3 and right is 16:9
leftPoints -= 4;
} else if (leftOverflow < rightOverflow) {
// right has a higher overflow, aka more pixels that aren't on-screen and therefore wasted. Maybe right is 4:3 and left is 16:9
leftPoints += 4;
}
if (left.videoHeight != null && left.videoWidth != null && right.videoHeight != null && right.videoWidth != null) {
const leftVideoPixels = left.videoWidth * left.videoHeight ?? 0;
const rightVideoPixels = right.videoWidth * right.videoHeight ?? 0;
if (leftVideoPixels > rightVideoPixels) {
// left has greater video dimensions
leftPoints += 2;
} else if (leftVideoPixels < rightVideoPixels) {
// left has smaller video dimensions
leftPoints -= 2;
}
}
const leftMaxFps = Math.max(...left.frameRateRanges.map((r) => r.maxFrameRate));
const rightMaxFps = Math.max(...right.frameRateRanges.map((r) => r.maxFrameRate));
if (leftMaxFps > rightMaxFps) {
// left has more fps
leftPoints += 2;
} else if (leftMaxFps < rightMaxFps) {
// left has less fps
leftPoints -= 2;
}
if (left.supportsVideoHDR && !right.supportsVideoHDR) {
// left does support video HDR, right doesn't
leftPoints += 1;
} else if (!left.supportsVideoHDR && right.supportsVideoHDR) {
// left doesn't support video HDR, right does
leftPoints -= 1;
}
if (left.supportsPhotoHDR && !right.supportsPhotoHDR) {
// left does support photo HDR, right doesn't
leftPoints += 1;
} else if (!left.supportsPhotoHDR && right.supportsPhotoHDR) {
// left doesn't support photo HDR, right does
leftPoints -= 1;
}
if (leftPoints > 0) return -1;
if (leftPoints < 0) return 1;
return 0;
};
/**
* Selects the smallest difference between a FrameRateRange's `maxFrameRate` and the given `fps`
*/
const smallestFpsDiff = (frameRateRanges: FrameRateRange[], fps: number): number => {
const bestFrameRateRange = frameRateRanges.reduce<FrameRateRange | undefined>((prev, curr) => {
if (prev == null) return curr;
const prevDiff = Math.abs(prev.maxFrameRate - fps);
const currDiff = Math.abs(curr.maxFrameRate - fps);
if (prevDiff < currDiff) return prev;
else return curr;
}, undefined);
const max = bestFrameRateRange?.maxFrameRate ?? 0;
return Math.abs(max - fps);
};
export const frameRateIncluded = (range: FrameRateRange, fps: number): boolean => fps >= range.minFrameRate && fps <= range.maxFrameRate;
const isFpsInFrameRateRange = (format: CameraDeviceFormat, fps: number): boolean => format.frameRateRanges.some((r) => frameRateIncluded(r, fps));
/**
* Selects the format with the closest frame rate ranges to the FPS
*/
export const formatWithClosestMatchingFps = (formats: CameraDeviceFormat[], fps: number): CameraDeviceFormat | undefined =>
formats.reduce<CameraDeviceFormat | undefined>((prev, curr) => {
if (prev == null) return curr;
// if range is 3-30 and FPS is 31, it doesn't match.
if (!isFpsInFrameRateRange(curr, fps)) return prev;
const prevFpsDiff = smallestFpsDiff(prev.frameRateRanges, fps);
const currFpsDiff = smallestFpsDiff(curr.frameRateRanges, fps);
if (currFpsDiff < prevFpsDiff) return curr;
else return prev;
}, undefined);

View File

@ -6,6 +6,6 @@ interface FormatSettings {
export const FormatSettingsAtom = atom<FormatSettings>({
default: {
fps: 50,
fps: 60,
},
});

View File

@ -20,7 +20,8 @@
"ios/**/*.cpp",
"ios/**/*.swift",
"react-native-vision-camera.podspec",
"*.md"
"README.md",
"docs"
],
"scripts": {
"test": "jest",

View File

@ -1,21 +1,64 @@
import { useEffect, useState } from 'react';
import { CameraRuntimeError } from 'src/CameraError';
import { sortDevices } from 'src/utils/FormatFilter';
import { Camera } from '../Camera';
import { CameraDevice, LogicalCameraDeviceType, parsePhysicalDeviceTypes, PhysicalCameraDeviceType } from '../CameraDevice';
export const useCameraDevice = (deviceType: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevice | undefined => {
/**
* Gets the best available `CameraDevice`. Devices with more cameras are preferred.
*
* @returns A `CameraDevice` for the requested device type.
* @throws `CameraRuntimeError` if no device was found.
* @example
* const device = useCameraDevice('wide-angle-camera')
* // ...
* return <Camera device={device} />
*/
export function useCameraDevice(): CameraDevice;
/**
* Gets a `CameraDevice` for the requested device type.
*
* @returns A `CameraDevice` for the requested device type, or `undefined` if no matching device was found
*
* @example
* const device = useCameraDevice('wide-angle-camera')
* // ...
* return <Camera device={device} />
*/
export function useCameraDevice(deviceType: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevice | undefined;
export function useCameraDevice(deviceType?: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevice | undefined {
const [device, setDevice] = useState<CameraDevice>();
useEffect(() => {
let isMounted = true;
const loadDevice = async (): Promise<void> => {
const devices = await Camera.getAvailableCameraDevices();
const bestMatch = devices.find((d) => {
const parsedType = parsePhysicalDeviceTypes(d.devices);
return parsedType === deviceType;
});
setDevice(bestMatch);
if (!isMounted) return;
if (deviceType == null) {
// use any device
const sorted = devices.sort(sortDevices);
const bestMatch = sorted[0];
if (bestMatch == null) throw new CameraRuntimeError('device/no-device', 'No Camera device was found!');
setDevice(bestMatch);
} else {
// use specified device (type)
const bestMatch = devices.find((d) => {
const parsedType = parsePhysicalDeviceTypes(d.devices);
return parsedType === deviceType;
});
setDevice(bestMatch);
}
};
loadDevice();
return () => {
isMounted = false;
};
}, [deviceType]);
return device;
};
}

View File

@ -0,0 +1,24 @@
import { useMemo } from 'react';
import type { CameraDevice, CameraDeviceFormat } from 'src/CameraDevice';
import { filterFormatsByAspectRatio, sortFormatsByResolution } from 'src/utils/FormatFilter';
import type { Size } from 'src/utils/FormatFilter';
/**
* Returns the best format for the given camera device.
*
* This function tries to choose a format with the highest possible photo-capture resolution and best matching aspect ratio.
*
* @param device The Camera Device
* @param cameraViewSize The Camera View's size. This can be an approximation and **must be memoized**! Default: `SCREEN_SIZE`
*
* @returns The best matching format for the given camera device, or `undefined` if the camera device is `undefined`.
*/
export function useCameraFormat(device?: CameraDevice, cameraViewSize?: Size): CameraDeviceFormat | undefined {
const formats = useMemo(() => {
if (device?.formats == null) return [];
const filtered = filterFormatsByAspectRatio(device.formats, cameraViewSize);
return filtered.sort(sortFormatsByResolution);
}, [device?.formats, cameraViewSize]);
return formats[0];
}

View File

@ -10,3 +10,6 @@ export * from './Point';
export * from './Snapshot';
export * from './TemporaryFile';
export * from './VideoFile';
export * from './hooks/useCameraDevice';
export * from './hooks/useCameraFormat';
export * from './utils/FormatFilter';

109
src/utils/FormatFilter.ts Normal file
View File

@ -0,0 +1,109 @@
import { Dimensions } from 'react-native';
import type { CameraDevice, CameraDeviceFormat, FrameRateRange } from 'react-native-vision-camera';
/**
* Compares two devices by the following criteria:
* * `wide-angle-camera`s are ranked higher than others
* * Devices with more physical cameras are ranked higher than ones with less. (e.g. "Triple Camera" > "Wide-Angle Camera")
*
* > Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*
* @example
* const devices = camera.devices.sort(sortDevices)
* const bestDevice = devices[0]
*/
export const sortDevices = (left: CameraDevice, right: CameraDevice): number => {
let leftPoints = 0;
let rightPoints = 0;
const leftHasWideAngle = left.devices.includes('wide-angle-camera');
const rightHasWideAngle = right.devices.includes('wide-angle-camera');
if (leftHasWideAngle) leftPoints += 5;
if (rightHasWideAngle) rightPoints += 5;
if (left.devices.length > right.devices.length) leftPoints += 3;
if (right.devices.length > left.devices.length) rightPoints += 3;
return rightPoints - leftPoints;
};
export type Size = { width: number; height: number };
const SCREEN_SIZE: Size = {
width: Dimensions.get('window').width,
height: Dimensions.get('window').height,
};
const applyScaledMask = (
clippedElementDimensions: Size, // 12 x 12
maskDimensions: Size, // 6 x 12
): Size => {
const wScale = maskDimensions.width / clippedElementDimensions.width; // 0.5
const hScale = maskDimensions.height / clippedElementDimensions.height; // 1.0
if (wScale > hScale) {
return {
width: maskDimensions.width / hScale,
height: maskDimensions.height / hScale,
};
} else {
return {
width: maskDimensions.width / wScale,
height: maskDimensions.height / wScale,
};
}
};
const getFormatAspectRatioOverflow = (format: CameraDeviceFormat, size: Size): number => {
const downscaled = applyScaledMask(
size,
// cameras are landscape, so we intentionally rotate
{ width: format.photoHeight, height: format.photoWidth },
);
return downscaled.width * downscaled.height - size.width * size.height;
};
/**
* Filters Camera Device Formats by the best matching aspect ratio for the given `viewSize`.
*
* @returns A list of Camera Device Formats that match the given `viewSize`' aspect ratio _as close as possible_.
*
* @example
* const formats = useMemo(() => filterFormatsByAspectRatio(device.formats, CAMERA_VIEW_SIZE), [device.formats])
*/
export const filterFormatsByAspectRatio = (formats: CameraDeviceFormat[], viewSize = SCREEN_SIZE): CameraDeviceFormat[] => {
const minOverflow = formats.reduce((prev, curr) => {
const overflow = getFormatAspectRatioOverflow(curr, viewSize);
if (overflow < prev) return overflow;
else return prev;
}, Number.MAX_SAFE_INTEGER);
return formats.filter((f) => getFormatAspectRatioOverflow(f, viewSize) === minOverflow);
};
/**
* Sorts Camera Device Formats by highest photo-capture resolution, descending.
*
* @example
* const formats = useMemo(() => device.formats.sort(sortFormatsByResolution), [device.formats])
* const bestFormat = formats[0]
*/
export const sortFormatsByResolution = (left: CameraDeviceFormat, right: CameraDeviceFormat): number => {
let leftPoints = left.photoHeight * left.photoWidth;
let rightPoints = right.photoHeight * right.photoWidth;
if (left.videoHeight != null && left.videoWidth != null && right.videoHeight != null && right.videoWidth != null) {
leftPoints += left.videoWidth * left.videoHeight ?? 0;
rightPoints += right.videoWidth * right.videoHeight ?? 0;
}
// "returns a negative value if left is better than one"
return rightPoints - leftPoints;
};
/**
* Returns `true` if the given Frame Rate Range (`range`) contains the given frame rate (`fps`)
*
* @example
* // get all formats that support 60 FPS
* const formatsWithHighFps = useMemo(() => device.formats.filter((f) => f.frameRateRanges.some((r) => frameRateIncluded(r, 60))), [device.formats])
*/
export const frameRateIncluded = (range: FrameRateRange, fps: number): boolean => fps >= range.minFrameRate && fps <= range.maxFrameRate;