chore: Simplify format sorting/filtering (#140)

* Simplify format sorting/filtering

* Update useCameraFormat.ts

* Also check photo HDR

* Simplify double tap

* Remove snapshot

* Remove custom `useCameraDevice` hook

* Update Podfile.lock
This commit is contained in:
Marc Rousavy 2021-05-14 11:52:28 +02:00 committed by GitHub
parent f839bc23ac
commit 310ad5fc4c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 37 additions and 200 deletions

View File

@ -322,7 +322,7 @@ PODS:
- React
- RNVectorIcons (8.1.0):
- React-Core
- VisionCamera (1.0.10):
- VisionCamera (2.0.0):
- React
- React-callinvoker
- React-Core
@ -490,7 +490,7 @@ SPEC CHECKSUMS:
RNReanimated: b8c8004b43446e3c2709fe64b2b41072f87428ad
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
RNVectorIcons: 31cebfcf94e8cf8686eb5303ae0357da64d7a5a4
VisionCamera: 65836f8bb8dda4e718c350e32c35a0b9f166b731
VisionCamera: 05d7d5d8df6513e61f6b6ad94d6c6befc73b780d
Yoga: 8c8436d4171c87504c648ae23b1d81242bdf3bbf
PODFILE CHECKSUM: 4b093c1d474775c2eac3268011e4b0b80929d3a2

View File

@ -1,16 +1,10 @@
import * as React from 'react';
import { useRef, useState, useMemo, useCallback } from 'react';
import { StyleSheet, Text, View } from 'react-native';
import {
PinchGestureHandler,
PinchGestureHandlerGestureEvent,
State,
TapGestureHandler,
TapGestureHandlerStateChangeEvent,
} from 'react-native-gesture-handler';
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler';
import { Navigation, NavigationFunctionComponent } from 'react-native-navigation';
import type { CameraDeviceFormat, CameraRuntimeError, PhotoFile, VideoFile } from 'react-native-vision-camera';
import { Camera, frameRateIncluded, sortFormatsByResolution, filterFormatsByAspectRatio } from 'react-native-vision-camera';
import { CameraDeviceFormat, CameraRuntimeError, PhotoFile, sortFormats, useCameraDevices, VideoFile } from 'react-native-vision-camera';
import { Camera, frameRateIncluded } from 'react-native-vision-camera';
import { useIsScreenFocussed } from './hooks/useIsScreenFocused';
import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING } from './Constants';
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated';
@ -21,7 +15,6 @@ import { CaptureButton } from './views/CaptureButton';
import { PressableOpacity } from 'react-native-pressable-opacity';
import MaterialIcon from 'react-native-vector-icons/MaterialCommunityIcons';
import IonIcon from 'react-native-vector-icons/Ionicons';
import { useCameraDevice } from './hooks/useCameraDevice';
const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera);
Reanimated.addWhitelistedNativeProps({
@ -48,12 +41,11 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
const [enableNightMode, setEnableNightMode] = useState(false);
// camera format settings
const devices = useCameraDevice();
const devices = useCameraDevices();
const device = devices[cameraPosition];
const formats = useMemo<CameraDeviceFormat[]>(() => {
if (device?.formats == null) return [];
const filtered = filterFormatsByAspectRatio(device.formats);
return filtered.sort(sortFormatsByResolution);
return device.formats.sort(sortFormats);
}, [device?.formats]);
//#region Memos
@ -83,7 +75,7 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
const supportsCameraFlipping = useMemo(() => devices.back != null && devices.front != null, [devices.back, devices.front]);
const supportsFlash = device?.hasFlash ?? false;
const supportsHdr = useMemo(() => formats.some((f) => f.supportsVideoHDR), [formats]);
const supportsHdr = useMemo(() => formats.some((f) => f.supportsVideoHDR || f.supportsPhotoHDR), [formats]);
const supports60Fps = useMemo(() => formats.some((f) => f.frameRateRanges.some((rate) => frameRateIncluded(rate, 60))), [formats]);
const canToggleNightMode = enableNightMode
? true // it's enabled so you have to be able to turn it off again
@ -95,7 +87,7 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
if (enableHdr) {
// We only filter by HDR capable formats if HDR is set to true.
// Otherwise we ignore the `supportsVideoHDR` property and accept formats which support HDR `true` or `false`
result = result.filter((f) => f.supportsVideoHDR);
result = result.filter((f) => f.supportsVideoHDR || f.supportsPhotoHDR);
}
// find the first format that includes the given FPS
@ -153,21 +145,11 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
//#endregion
//#region Tap Gesture
const onDoubleTapGesture = useCallback(
({ nativeEvent: event }: TapGestureHandlerStateChangeEvent) => {
// TODO: (MARC) Allow switching camera (back <-> front) while recording and stich videos together!
if (isPressingButton.value) return;
switch (event.state) {
case State.END:
// on double tap
onFlipCameraPressed();
break;
default:
break;
}
},
[isPressingButton, onFlipCameraPressed],
);
const onDoubleTap = useCallback(() => {
// TODO: (MARC) Allow switching camera (back <-> front) while recording and stich videos together!
if (isPressingButton.value) return;
onFlipCameraPressed();
}, [isPressingButton, onFlipCameraPressed]);
//#endregion
//#region Effects
@ -208,14 +190,12 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
// _log(`Codes: ${JSON.stringify(codes)}`);
// }, []);
// TODO: Implement camera flipping (back <-> front) while recording and stich the videos together
// TODO: iOS: Use custom video data stream output to manually process the data and write the MOV/MP4 for more customizability.
return (
<View style={styles.container}>
{device != null && (
<PinchGestureHandler onGestureEvent={onPinchGesture} enabled={isActive}>
<Reanimated.View style={StyleSheet.absoluteFill}>
<TapGestureHandler onHandlerStateChange={onDoubleTapGesture} numberOfTaps={2}>
<TapGestureHandler onEnded={onDoubleTap} numberOfTaps={2}>
<ReanimatedCamera
ref={camera}
style={StyleSheet.absoluteFill}

View File

@ -15,12 +15,6 @@ export const SAFE_AREA_PADDING = {
paddingBottom: SAFE_BOTTOM + CONTENT_SPACING,
};
// whether to use takeSnapshot() instead of takePhoto() on Android
export const USE_SNAPSHOT_ON_ANDROID = false;
// whether to use ultra-wide-angle cameras if available, or explicitly disable them. I think ultra-wide-angle cams don't support 60FPS...
export const USE_ULTRAWIDE_IF_AVAILABLE = true;
// The maximum zoom _factor_ you should be able to zoom in
export const MAX_ZOOM_FACTOR = 20;

View File

@ -1,45 +0,0 @@
import { useEffect, useMemo, useState } from 'react';
import { Camera, CameraDevice, sortDevices } from 'react-native-vision-camera';
/**
* A custom hook that's just like `useCameraDevices` from VisionCamera, but ignores `'telephoto-camera'` devices since those often have poor quality.
*/
export const useCameraDevice = (): {
front: CameraDevice | undefined;
back: CameraDevice | undefined;
} => {
const [backDevice, setBackDevice] = useState<CameraDevice>();
const [frontDevice, setFrontDevice] = useState<CameraDevice>();
useEffect(() => {
let isMounted = true;
const loadDevice = async (): Promise<void> => {
const devices = await Camera.getAvailableCameraDevices();
if (!isMounted) return;
// use any device
const filtered = devices.filter((d) => !d.devices.includes('telephoto-camera'));
const sorted = filtered.sort(sortDevices);
const back = sorted.find((d) => d.position === 'back');
const front = sorted.find((d) => d.position === 'front');
setBackDevice(back);
setFrontDevice(front);
console.log(`Devices: ${sorted.map((d) => d.name).join(', ')}`);
};
loadDevice();
return () => {
isMounted = false;
};
}, []);
return useMemo(
() => ({
back: backDevice,
front: frontDevice,
}),
[backDevice, frontDevice],
);
};

View File

@ -1,5 +1,5 @@
import React, { useCallback, useMemo, useRef } from 'react';
import { Platform, StyleSheet, View, ViewProps } from 'react-native';
import { StyleSheet, View, ViewProps } from 'react-native';
import {
PanGestureHandler,
PanGestureHandlerGestureEvent,
@ -20,13 +20,11 @@ import Reanimated, {
withRepeat,
} from 'react-native-reanimated';
import type { Camera, PhotoFile, TakePhotoOptions, TakeSnapshotOptions, VideoFile } from 'react-native-vision-camera';
import { CAPTURE_BUTTON_SIZE, SCREEN_HEIGHT, SCREEN_WIDTH, USE_SNAPSHOT_ON_ANDROID } from './../Constants';
import { CAPTURE_BUTTON_SIZE, SCREEN_HEIGHT, SCREEN_WIDTH } from './../Constants';
const PAN_GESTURE_HANDLER_FAIL_X = [-SCREEN_WIDTH, SCREEN_WIDTH];
const PAN_GESTURE_HANDLER_ACTIVE_Y = [-2, 2];
const IS_ANDROID = Platform.OS === 'android';
const START_RECORDING_DELAY = 200;
const BORDER_WIDTH = CAPTURE_BUTTON_SIZE * 0.1;
@ -73,14 +71,8 @@ const _CaptureButton: React.FC<Props> = ({
try {
if (camera.current == null) throw new Error('Camera ref is null!');
// If we're on Android and flash is disabled, we can use the "snapshot" method.
// this will take a snapshot of the current SurfaceView, which results in faster image
// capture rate at the cost of greatly reduced quality.
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
const photoMethod = USE_SNAPSHOT_ON_ANDROID && IS_ANDROID && takePhotoOptions.flash === 'off' ? 'snapshot' : 'photo';
console.log(`Taking ${photoMethod}...`);
const photo =
photoMethod === 'snapshot' ? await camera.current.takeSnapshot(takePhotoOptions) : await camera.current.takePhoto(takePhotoOptions);
console.log('Taking photo...');
const photo = await camera.current.takePhoto(takePhotoOptions);
onMediaCaptured(photo, 'photo');
} catch (e) {
console.error('Failed to take photo!', e);

View File

@ -1,7 +1,6 @@
import { useMemo } from 'react';
import type { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
import { filterFormatsByAspectRatio, sortFormatsByResolution } from '../utils/FormatFilter';
import type { Size } from '../utils/FormatFilter';
import { sortFormats } from '../utils/FormatFilter';
/**
* Returns the best format for the given camera device.
@ -9,27 +8,9 @@ import type { Size } from '../utils/FormatFilter';
* This function tries to choose a format with the highest possible photo-capture resolution and best matching aspect ratio.
*
* @param {CameraDevice} device The Camera Device
* @param {Size} cameraViewSize The Camera View's size. This can be an approximation and **must be memoized**! Default: `SCREEN_SIZE`
*
* @returns The best matching format for the given camera device, or `undefined` if the camera device is `undefined`.
*/
export function useCameraFormat(device?: CameraDevice, cameraViewSize?: Size): CameraDeviceFormat | undefined {
const formats = useMemo(() => {
if (device?.formats == null) return [];
const filtered = filterFormatsByAspectRatio(device.formats, cameraViewSize);
const sorted = filtered.sort(sortFormatsByResolution);
const bestFormat = sorted[0];
if (bestFormat == null) return [];
const bestFormatResolution = bestFormat.photoHeight * bestFormat.photoWidth;
return sorted.filter((f) => {
// difference in resolution in percent (e.g. 100x100 is 0.5 of 200x200)
const resolutionDiff = (bestFormatResolution - f.photoHeight * f.photoWidth) / bestFormatResolution;
// formats that are less than 25% of the bestFormat's resolution are dropped. They are too much quality loss
return resolutionDiff <= 0.25;
});
}, [device?.formats, cameraViewSize]);
return formats[0];
export function useCameraFormat(device?: CameraDevice): CameraDeviceFormat | undefined {
return useMemo(() => device?.formats.sort(sortFormats)[0], [device?.formats]);
}

View File

@ -30,96 +30,31 @@ export const sortDevices = (left: CameraDevice, right: CameraDevice): number =>
return rightPoints - leftPoints;
};
/**
* Represents a Size in any unit.
*/
export type Size = {
/**
* Points in width.
*/
width: number;
/**
* Points in height.
*/
height: number;
};
const SCREEN_SIZE: Size = {
const SCREEN_SIZE = {
width: Dimensions.get('window').width,
height: Dimensions.get('window').height,
};
const applyScaledMask = (
clippedElementDimensions: Size, // 12 x 12
maskDimensions: Size, // 6 x 12
): Size => {
const wScale = maskDimensions.width / clippedElementDimensions.width; // 0.5
const hScale = maskDimensions.height / clippedElementDimensions.height; // 1.0
const scale = Math.min(wScale, hScale);
return {
width: maskDimensions.width / scale,
height: maskDimensions.height / scale,
};
};
const getFormatAspectRatioOverflow = (format: CameraDeviceFormat, size: Size): number => {
const downscaled = applyScaledMask(
size,
// cameras are landscape, so we intentionally rotate
{ width: format.photoHeight, height: format.photoWidth },
);
return downscaled.width * downscaled.height - size.width * size.height;
};
const SCREEN_ASPECT_RATIO = SCREEN_SIZE.width / SCREEN_SIZE.height;
/**
* Filters Camera Device Formats by the best matching aspect ratio for the given `viewSize`.
* Sort formats by resolution and aspect ratio difference (to the Screen size).
*
* @param {CameraDeviceFormat[]} formats A list of formats the current device has (see {@linkcode CameraDevice.formats})
* @param {Size} viewSize The size of the camera view which will be used to find the best aspect ratio. Defaults to the screen size.
* @returns A list of Camera Device Formats that match the given `viewSize`' aspect ratio _as close as possible_.
*
* @example
* ```ts
* const formats = useMemo(() => filterFormatsByAspectRatio(device.formats, CAMERA_VIEW_SIZE), [device.formats])
* ```
* @method
* > Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*/
export const filterFormatsByAspectRatio = (formats: CameraDeviceFormat[], viewSize = SCREEN_SIZE): CameraDeviceFormat[] => {
const minOverflow = formats.reduce((prev, curr) => {
const overflow = getFormatAspectRatioOverflow(curr, viewSize);
if (overflow < prev) return overflow;
else return prev;
}, Number.MAX_SAFE_INTEGER);
export const sortFormats = (left: CameraDeviceFormat, right: CameraDeviceFormat): number => {
let leftPoints = 0,
rightPoints = 0;
return formats.filter((f) => {
// percentage of difference in overflow from this format, to the minimum available overflow
const overflowDiff = (getFormatAspectRatioOverflow(f, viewSize) - minOverflow) / minOverflow;
// we have an acceptance of 25%, if overflow is more than 25% off to the min available overflow, we drop it
return overflowDiff < 0.25;
});
};
// we downscale the points so much that we are in smaller number ranges for future calculations
// e.g. for 4k (4096), this adds 4 points.
leftPoints += Math.round(left.photoWidth / 1000);
rightPoints += Math.round(right.photoWidth / 1000);
/**
* Sorts Camera Device Formats by highest photo-capture resolution, descending. Use this in a `.sort` function.
*
* @example
* ```ts
* const formats = useMemo(() => device.formats.sort(sortFormatsByResolution), [device.formats])
* const bestFormat = formats[0]
* ```
* @method
*/
export const sortFormatsByResolution = (left: CameraDeviceFormat, right: CameraDeviceFormat): number => {
let leftPoints = left.photoHeight * left.photoWidth;
let rightPoints = right.photoHeight * right.photoWidth;
const leftAspectRatioDiff = left.photoHeight / left.photoWidth - SCREEN_ASPECT_RATIO;
const rightAspectRatioDiff = right.photoHeight / right.photoWidth - SCREEN_ASPECT_RATIO;
leftPoints -= Math.abs(leftAspectRatioDiff) * 50;
rightPoints -= Math.abs(rightAspectRatioDiff) * 50;
if (left.videoHeight != null && left.videoWidth != null && right.videoHeight != null && right.videoWidth != null) {
leftPoints += left.videoWidth * left.videoHeight;
rightPoints += right.videoWidth * right.videoHeight;
}
// "returns a negative value if left is better than one"
return rightPoints - leftPoints;
};