Create media captured page

This commit is contained in:
Marc Rousavy 2021-02-19 18:44:05 +01:00
parent 1d71aada11
commit c13c46f388
5 changed files with 410 additions and 0 deletions

View File

@ -4,6 +4,7 @@ import { gestureHandlerRootHOC } from 'react-native-gesture-handler';
import { App } from './src/App'; import { App } from './src/App';
import { Settings } from './src/Settings'; import { Settings } from './src/Settings';
import { Splash } from './src/Splash'; import { Splash } from './src/Splash';
import { Media } from './src/Media';
Navigation.setDefaultOptions({ Navigation.setDefaultOptions({
topBar: { topBar: {
@ -13,6 +14,7 @@ Navigation.setDefaultOptions({
Navigation.registerComponent('Splash', () => gestureHandlerRootHOC(Splash), () => Splash); Navigation.registerComponent('Splash', () => gestureHandlerRootHOC(Splash), () => Splash);
Navigation.registerComponent('Home', () => gestureHandlerRootHOC(App), () => App); Navigation.registerComponent('Home', () => gestureHandlerRootHOC(App), () => App);
Navigation.registerComponent('Media', () => gestureHandlerRootHOC(Media), () => Media);
Navigation.registerComponent('Settings', () => gestureHandlerRootHOC(Settings), () => Settings); Navigation.registerComponent('Settings', () => gestureHandlerRootHOC(Settings), () => Settings);
Navigation.events().registerAppLaunchedListener(() => { Navigation.events().registerAppLaunchedListener(() => {

269
example/src/FormatFilter.ts Normal file
View File

@ -0,0 +1,269 @@
import {
RESOLUTION_LIMIT,
SCREEN_HEIGHT,
SCREEN_WIDTH,
USE_ULTRAWIDE_IF_AVAILABLE,
} from "./Constants";
import type {
CameraDevice,
CameraDeviceFormat,
FrameRateRange,
} from "react-native-vision-camera";
/**
* Compares two devices with the following criteria:
* * Cameras with wide-angle-cameras are 5x **better** than cameras without.
* * Cameras with ultra-wide-angle-cameras are 5x **worse** than cameras without.
* * Cameras with more physical devices are "better"
*
* @returns
* * `-1` if left is BETTER than right
* * `0` if left equals right
* * `1` if left is WORSE than right
*
* Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*/
export const compareDevices = (
left: CameraDevice,
right: CameraDevice
): -1 | 0 | 1 => {
let leftPoints = 0;
const leftHasWideAngle = left.devices.includes("wide-angle-camera");
const rightHasWideAngle = right.devices.includes("wide-angle-camera");
if (leftHasWideAngle && !rightHasWideAngle) {
// left does have a wide-angle-camera, but right doesn't.
leftPoints += 5;
} else if (!leftHasWideAngle && rightHasWideAngle) {
// left doesn't have a wide-angle-camera, but right does.
leftPoints -= 5;
}
if (!USE_ULTRAWIDE_IF_AVAILABLE) {
const leftHasUltraWideAngle = left.devices.includes(
"ultra-wide-angle-camera"
);
const rightHasUltraWideAngle = right.devices.includes(
"ultra-wide-angle-camera"
);
if (leftHasUltraWideAngle && !rightHasUltraWideAngle) {
// left does have an ultra-wide-angle-camera, but right doesn't. Ultra-Wide cameras are bad because of their poor quality.
leftPoints -= 5;
} else if (!leftHasUltraWideAngle && rightHasUltraWideAngle) {
// left doesn't have an ultra-wide-angle-camera, but right does. Ultra-Wide cameras are bad because of their poor quality.
leftPoints += 5;
}
}
if (left.devices.length > right.devices.length) {
// left has more devices than right
leftPoints += 1;
} else if (left.devices.length < right.devices.length) {
// left has less more devices than right
leftPoints -= 1;
}
if (leftPoints > 0) return -1;
if (leftPoints < 0) return 1;
return 0;
};
type Size = { width: number; height: number };
const CAMERA_VIEW_SIZE: Size = {
width: SCREEN_WIDTH,
height: SCREEN_HEIGHT,
};
const applyScaledMask = (
clippedElementDimensions: Size, // 3024 x 4032 | 2160x3840
maskDimensions: Size // 375 x 623
): Size => {
const wScale = maskDimensions.width / clippedElementDimensions.width;
const hScale = maskDimensions.height / clippedElementDimensions.height;
if (wScale < hScale) {
return {
width: maskDimensions.width / hScale,
height: clippedElementDimensions.height,
};
} else {
return {
width: clippedElementDimensions.width,
height: maskDimensions.height / wScale,
};
}
};
/**
* Compares two Formats with the following comparators:
* * Photo Dimensions (higher is better) (weights x3)
* * Video Dimensions (higher is better) (weights x2)
* * Max FPS (higher is better) (weights x2)
* * HDR Support (true is better) (weights x2)
* * Max Zoom Factor (higher is better) (weights x1)
* * MaxISO (higher is better) (weights x1)
* * MinISO (lower is better) (weights x1)
*
* @returns
* * `-1` if left is BETTER than right
* * `0` if left equals right
* * `1` if left is WORSE than right
*
* Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" format.
*/
export const compareFormats = (
left: CameraDeviceFormat,
right: CameraDeviceFormat
): -1 | 0 | 1 => {
// Point score of the left format. Higher is better.
let leftPoints = 0;
const leftPhotoPixels = left.photoHeight * left.photoWidth;
const rightPhotoPixels = right.photoHeight * right.photoWidth;
if (leftPhotoPixels > rightPhotoPixels) {
// left has greater photo dimensions
const isLeftAbovePixelLimit =
RESOLUTION_LIMIT != null && leftPhotoPixels > RESOLUTION_LIMIT;
if (isLeftAbovePixelLimit) {
// left exceeds our pixel limit
leftPoints -= 3;
} else {
// left does not exceed our pixel limit
leftPoints += 3;
}
} else if (leftPhotoPixels < rightPhotoPixels) {
// left has smaller photo dimensions
const isRightAbovePixelLimit =
RESOLUTION_LIMIT != null && rightPhotoPixels > RESOLUTION_LIMIT;
if (isRightAbovePixelLimit) {
// right exceeds our pixel limit
leftPoints += 3;
} else {
// right does not exceed our pixel limit
leftPoints -= 3;
}
}
const leftCropped = applyScaledMask(
{ width: left.photoHeight, height: left.photoWidth }, // cameras are horizontal, we rotate to portrait
CAMERA_VIEW_SIZE
);
const rightCropped = applyScaledMask(
{ width: right.photoHeight, height: right.photoWidth }, // cameras are horizontal, we rotate to portrait
CAMERA_VIEW_SIZE
);
const leftOverflow =
left.photoWidth * left.photoHeight - leftCropped.width * leftCropped.height;
const rightOverflow =
right.photoWidth * right.photoHeight -
rightCropped.width * rightCropped.height;
if (leftOverflow > rightOverflow) {
// left has a higher overflow, aka more pixels that aren't on-screen and therefore wasted. Maybe left is 4:3 and right is 16:9
leftPoints -= 4;
} else if (leftOverflow < rightOverflow) {
// right has a higher overflow, aka more pixels that aren't on-screen and therefore wasted. Maybe right is 4:3 and left is 16:9
leftPoints += 4;
}
if (
left.videoHeight != null &&
left.videoWidth != null &&
right.videoHeight != null &&
right.videoWidth != null
) {
const leftVideoPixels = left.videoWidth * left.videoHeight ?? 0;
const rightVideoPixels = right.videoWidth * right.videoHeight ?? 0;
if (leftVideoPixels > rightVideoPixels) {
// left has greater video dimensions
leftPoints += 2;
} else if (leftVideoPixels < rightVideoPixels) {
// left has smaller video dimensions
leftPoints -= 2;
}
}
const leftMaxFps = Math.max(
...left.frameRateRanges.map((r) => r.maxFrameRate)
);
const rightMaxFps = Math.max(
...right.frameRateRanges.map((r) => r.maxFrameRate)
);
if (leftMaxFps > rightMaxFps) {
// left has more fps
leftPoints += 2;
} else if (leftMaxFps < rightMaxFps) {
// left has less fps
leftPoints -= 2;
}
if (left.supportsVideoHDR && !right.supportsVideoHDR) {
// left does support video HDR, right doesn't
leftPoints += 1;
} else if (!left.supportsVideoHDR && right.supportsVideoHDR) {
// left doesn't support video HDR, right does
leftPoints -= 1;
}
if (left.supportsPhotoHDR && !right.supportsPhotoHDR) {
// left does support photo HDR, right doesn't
leftPoints += 1;
} else if (!left.supportsPhotoHDR && right.supportsPhotoHDR) {
// left doesn't support photo HDR, right does
leftPoints -= 1;
}
if (leftPoints > 0) return -1;
if (leftPoints < 0) return 1;
return 0;
};
/**
* Selects the smallest difference between a FrameRateRange's `maxFrameRate` and the given `fps`
*/
const smallestFpsDiff = (
frameRateRanges: FrameRateRange[],
fps: number
): number => {
const bestFrameRateRange = frameRateRanges.reduce<FrameRateRange | undefined>(
(prev, curr) => {
if (prev == null) return curr;
const prevDiff = Math.abs(prev.maxFrameRate - fps);
const currDiff = Math.abs(curr.maxFrameRate - fps);
if (prevDiff < currDiff) return prev;
else return curr;
},
undefined
);
const max = bestFrameRateRange?.maxFrameRate ?? 0;
return Math.abs(max - fps);
};
export const frameRateIncluded = (
range: FrameRateRange,
fps: number
): boolean => fps >= range.minFrameRate && fps <= range.maxFrameRate;
const isFpsInFrameRateRange = (
format: CameraDeviceFormat,
fps: number
): boolean => format.frameRateRanges.some((r) => frameRateIncluded(r, fps));
/**
* Selects the format with the closest frame rate ranges to the FPS
*/
export const formatWithClosestMatchingFps = (
formats: CameraDeviceFormat[],
fps: number
): CameraDeviceFormat | undefined =>
formats.reduce<CameraDeviceFormat | undefined>((prev, curr) => {
if (prev == null) return curr;
// if range is 3-30 and FPS is 31, it doesn't match.
if (!isFpsInFrameRateRange(curr, fps)) return prev;
const prevFpsDiff = smallestFpsDiff(prev.frameRateRanges, fps);
const currFpsDiff = smallestFpsDiff(curr.frameRateRanges, fps);
if (currFpsDiff < prevFpsDiff) return curr;
else return prev;
}, undefined);

43
example/src/Media.tsx Normal file
View File

@ -0,0 +1,43 @@
import React, { useCallback, useMemo } from 'react';
import { StyleSheet, View, Text, Image, Pressable } from 'react-native';
import { Navigation, NavigationFunctionComponent } from 'react-native-navigation';
import Video from 'react-native-video';
import { CONTENT_SPACING } from './Constants';
interface MediaProps {
path: string,
type: 'video' | 'photo'
}
export const Media: NavigationFunctionComponent<MediaProps> = ({ componentId, type, path }) => {
const onClosePressed = useCallback(() => {
Navigation.dismissModal(componentId);
}, [componentId]);
const source = useMemo(() => ({ uri: `file://${path}` }), [path])
return (
<View style={styles.container}>
{type === "photo" && (<Image source={source} style={StyleSheet.absoluteFill} />)}
{type === "video" && (<Video source={source} style={StyleSheet.absoluteFill} />)}
<Pressable style={styles.closeButton} onPress={onClosePressed}><Text>Close.</Text></Pressable>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
backgroundColor: 'white',
},
closeButton: {
position: 'absolute',
top: CONTENT_SPACING,
left: CONTENT_SPACING,
width: 40,
height: 40,
},
});

View File

@ -0,0 +1,32 @@
import { useCallback, useRef, useState } from "react";
/**
* Same as `useState`, but swallow all calls to `setState` if the value didn't change (uses `===` comparator per default)
* @param initialValue The initial state
* @param comparator A custom comparator, useful if you want to only round numbers or use string locale for comparison. Make sure this function is memoized!
*/
export const useCachedState = <T>(
initialValue: T,
comparator?: (oldState: T, newState: T) => boolean
): [T, (newState: T) => void] => {
const [state, setState] = useState(initialValue);
const cachedState = useRef(initialValue);
const dispatchState = useCallback(
(newState: T) => {
const areEqual =
comparator == null
? cachedState.current === newState
: comparator(cachedState.current, newState);
if (areEqual) {
return;
} else {
cachedState.current = newState;
setState(newState);
}
},
[comparator]
);
return [state, dispatchState];
};

View File

@ -0,0 +1,64 @@
import { useEffect, useMemo, useReducer } from "react";
import { Navigation } from "react-native-navigation";
type Action =
| {
action: "push";
componentId: string;
}
| {
action: "pop";
componentId: string;
};
const reducer = (stack: string[], action: Action): string[] => {
switch (action.action) {
case "push": {
stack.push(action.componentId);
break;
}
case "pop": {
const index = stack.indexOf(action.componentId);
if (index > -1) stack.splice(index, 1);
break;
}
}
return [...stack];
};
export const useIsScreenFocused = (componentId: string): boolean => {
const [componentStack, dispatch] = useReducer(reducer, [componentId]);
const isFocussed = useMemo(
() => componentStack[componentStack.length - 1] === componentId,
[componentStack, componentId]
);
useEffect(() => {
const listener = Navigation.events().registerComponentDidAppearListener(
(event) => {
if (event.componentType !== "Component") return;
dispatch({
action: "push",
componentId: event.componentId,
});
}
);
return () => listener.remove();
}, []);
useEffect(() => {
const listener = Navigation.events().registerComponentDidDisappearListener(
(event) => {
if (event.componentType !== "Component") return;
dispatch({
action: "pop",
componentId: event.componentId,
});
}
);
return () => listener.remove();
}, []);
return isFocussed;
};