update a whole lotta stuff

This commit is contained in:
Marc Rousavy 2021-02-19 19:06:28 +01:00
parent cf157cb299
commit 16e73fc910
9 changed files with 746 additions and 12 deletions

View File

@ -1,4 +1,3 @@
module.exports = {
presets: ['module:metro-react-native-babel-preset'],
plugins: ['react-native-reanimated/plugin']
presets: ['module:metro-react-native-babel-preset']
};

View File

@ -4,6 +4,7 @@ const pak = require('../package.json');
module.exports = {
presets: ['module:metro-react-native-babel-preset'],
plugins: [
'react-native-reanimated/plugin',
[
'module-resolver',
{

View File

@ -345,6 +345,8 @@ PODS:
- Yoga
- RNStaticSafeAreaInsets (2.1.1):
- React
- RNVectorIcons (8.0.0):
- React-Core
- Yoga (1.14.0)
- YogaKit (1.18.1):
- Yoga (~> 1.14)
@ -403,6 +405,7 @@ DEPENDENCIES:
- RNGestureHandler (from `../node_modules/react-native-gesture-handler`)
- RNReanimated (from `../node_modules/react-native-reanimated`)
- RNStaticSafeAreaInsets (from `../node_modules/react-native-static-safe-area-insets`)
- RNVectorIcons (from `../node_modules/react-native-vector-icons`)
- Yoga (from `../node_modules/react-native/ReactCommon/yoga`)
SPEC REPOS:
@ -485,6 +488,8 @@ EXTERNAL SOURCES:
:path: "../node_modules/react-native-reanimated"
RNStaticSafeAreaInsets:
:path: "../node_modules/react-native-static-safe-area-insets"
RNVectorIcons:
:path: "../node_modules/react-native-vector-icons"
Yoga:
:path: "../node_modules/react-native/ReactCommon/yoga"
@ -532,6 +537,7 @@ SPEC CHECKSUMS:
RNGestureHandler: 5e58135436aacc1c5d29b75547d3d2b9430d052c
RNReanimated: ca4f28c765329144d68bdad126bf6b0b1afc7a5a
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
RNVectorIcons: f67a1abce2ec73e62fe4606e8110e95a832bc859
Yoga: 4bd86afe9883422a7c4028c00e34790f560923d6
YogaKit: f782866e155069a2cca2517aafea43200b01fd5a

View File

@ -240,10 +240,42 @@
);
inputPaths = (
"${PODS_ROOT}/Target Support Files/Pods-VisionCameraExample/Pods-VisionCameraExample-resources.sh",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/AntDesign.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Entypo.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/EvilIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Feather.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome5_Brands.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome5_Regular.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/FontAwesome5_Solid.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Fontisto.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Foundation.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Ionicons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/MaterialCommunityIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/MaterialIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Octicons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/SimpleLineIcons.ttf",
"${PODS_ROOT}/../../node_modules/react-native-vector-icons/Fonts/Zocial.ttf",
"${PODS_CONFIGURATION_BUILD_DIR}/React-Core/AccessibilityResources.bundle",
);
name = "[CP] Copy Pods Resources";
outputPaths = (
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/AntDesign.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Entypo.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/EvilIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Feather.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome5_Brands.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome5_Regular.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/FontAwesome5_Solid.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Fontisto.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Foundation.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Ionicons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/MaterialCommunityIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/MaterialIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Octicons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/SimpleLineIcons.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/Zocial.ttf",
"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/AccessibilityResources.bundle",
);
runOnlyForDeploymentPostprocessing = 0;

View File

@ -1501,6 +1501,16 @@
"@types/react": "*"
}
},
"@types/react-native-vector-icons": {
"version": "6.4.6",
"resolved": "https://registry.npmjs.org/@types/react-native-vector-icons/-/react-native-vector-icons-6.4.6.tgz",
"integrity": "sha512-lAyxNfMd5L1xZvXWsGcJmNegDf61TAp40uL6ashNNWj9W3IrDJO59L9+9inh0Y2MsEZpLTdxzVU8Unb4/0FQng==",
"dev": true,
"requires": {
"@types/react": "*",
"@types/react-native": "*"
}
},
"@types/react-native-video": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/@types/react-native-video/-/react-native-video-5.0.4.tgz",

View File

@ -26,6 +26,7 @@
"@react-native-community/eslint-plugin": "^1.1.0",
"@types/react": "^17.0.2",
"@types/react-native": "^0.63.50",
"@types/react-native-vector-icons": "^6.4.6",
"@types/react-native-video": "^5.0.4",
"@typescript-eslint/eslint-plugin": "^4.15.1",
"@typescript-eslint/parser": "^4.15.1",

View File

@ -1,12 +1,343 @@
import * as React from 'react';
import { useRef, useState, useMemo, useCallback } from 'react';
import { StyleSheet, View } from 'react-native';
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, State, TapGestureHandler, TapGestureHandlerStateChangeEvent } from 'react-native-gesture-handler';
import { Navigation, NavigationFunctionComponent } from 'react-native-navigation';
import type { CameraDevice, CameraDeviceFormat, CameraProps, CameraRuntimeError, PhotoFile, VideoFile } from 'react-native-vision-camera';
import { Camera } from 'react-native-vision-camera';
import { useIsScreenFocused } from './hooks/useIsScreenFocused';
import { compareFormats, frameRateIncluded, formatWithClosestMatchingFps, compareDevices } from './FormatFilter';
import { CAPTURE_BUTTON_SIZE, CONTENT_SPACING, HIGH_FPS, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING, SCREEN_WIDTH } from './Constants';
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated';
import { useEffect } from 'react';
import { useIsForeground } from './hooks/useIsForeground';
import { StatusBarBlurBackground } from './views/StatusBarBlurBackground';
import { CaptureButton } from './views/CaptureButton';
import { PressableOpacity } from './views/PressableOpacity';
import MaterialIcon from 'react-native-vector-icons/MaterialCommunityIcons';
import IonIcon from 'react-native-vector-icons/Ionicons';
import { StyleSheet, View, Text } from 'react-native';
const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera);
Reanimated.addWhitelistedNativeProps({
zoom: true,
});
export default function App() {
const SCALE_FULL_ZOOM = 3;
const BUTTON_SIZE = 40;
const LOCAL_GALLERY_BUTTON_SIZE = 40;
export const App: NavigationFunctionComponent = ({ componentId }) => {
const camera = useRef<Camera>(null);
const [isCameraInitialized, setIsCameraInitialized] = useState(false);
const zoom = useSharedValue(0);
const isPressingButton = useSharedValue(false);
// check if camera page is active
const isFocussed = useIsScreenFocused(componentId);
const isForeground = useIsForeground();
const isActive = isFocussed && isForeground;
const [cameraPosition, setCameraPosition] = useState<"front" | "back">(
"back"
);
const [enableHdr, setEnableHdr] = useState(false);
const [flash, setFlash] = useState<"off" | "on">("off");
const [enableNightMode, setEnableNightMode] = useState(false);
// camera format settings
const [devices, setDevices] = useState<CameraDevice[]>([]); // All available camera devices, sorted by "best device" (descending)
const device = useMemo<CameraDevice | undefined>(
() => devices.find((d) => d.position === cameraPosition),
[cameraPosition, devices]
);
const formats = useMemo<CameraDeviceFormat[]>(
() => device?.formats.sort(compareFormats) ?? [],
[device?.formats]
);
//#region Memos
const fps = useMemo(() => {
if (enableNightMode && !device?.supportsLowLightBoost) {
// User has enabled Night Mode, but Night Mode is not natively supported, so we simulate it by lowering the frame rate.
return 30;
}
const supportsHdrAtHighFps = formats.some(
(f) =>
f.supportsVideoHDR &&
f.frameRateRanges.some((r) => frameRateIncluded(r, HIGH_FPS))
);
if (enableHdr && !supportsHdrAtHighFps) {
// User has enabled HDR, but HDR is not supported at HIGH_FPS.
return 30;
}
const supportsHighFps = formats.some((f) =>
f.frameRateRanges.some((r) => frameRateIncluded(r, HIGH_FPS))
);
if (!supportsHighFps) {
// HIGH_FPS is not supported by any format.
return 30;
}
// If nothing blocks us from using it, we default to HIGH_FPS.
return HIGH_FPS;
}, [device, enableHdr, enableNightMode, formats,]);
const supportsCameraFlipping = useMemo(
() =>
devices.some((d) => d.position === "back") &&
devices.some((d) => d.position === "front"),
[devices]
);
const supportsFlash = device?.hasFlash ?? false;
const supportsHdr = useMemo(() => formats.some((f) => f.supportsVideoHDR), [
formats,
]);
const canToggleNightMode = enableNightMode
? true // it's enabled so you have to be able to turn it off again
: (device?.supportsLowLightBoost ?? false) || fps > 30; // either we have native support, or we can lower the FPS
//#endregion
const format = useMemo(() => {
let result = formats;
if (enableHdr) {
// We only filter by HDR capable formats if HDR is set to true.
// Otherwise we ignore the `supportsVideoHDR` property and accept formats which support HDR `true` or `false`
result = result.filter((f) => f.supportsVideoHDR);
}
return formatWithClosestMatchingFps(result, fps);
}, [formats, fps, enableHdr]);
//#region Animated Zoom
const formatMaxZoom = format?.maxZoom ?? 1;
const maxZoomFactor = Math.min(formatMaxZoom, MAX_ZOOM_FACTOR);
const neutralZoom = device?.neutralZoom ?? 0;
const neutralZoomScaled = (neutralZoom / maxZoomFactor) * formatMaxZoom;
const maxZoomScaled = (1 / formatMaxZoom) * maxZoomFactor;
const cameraAnimatedProps = useAnimatedProps<Partial<CameraProps>>(
() => ({
zoom: interpolate(
zoom.value,
[0, neutralZoomScaled, 1],
[0, neutralZoom, maxZoomScaled],
Extrapolate.CLAMP
),
}),
[maxZoomScaled, neutralZoom, neutralZoomScaled, zoom]
);
//#endregion
//#region Callbacks
const setIsPressingButton = useCallback(
(_isPressingButton: boolean) => {
isPressingButton.value = _isPressingButton;
},
[isPressingButton]
);
// Camera callbacks
const onError = useCallback((error: CameraRuntimeError) => {
console.error(error);
}, []);
const onInitialized = useCallback(() => {
console.log(`Camera initialized!`);
setIsCameraInitialized(true);
}, []);
const onMediaCaptured = useCallback(
async (media: PhotoFile | VideoFile, type: "photo" | "video") => {
console.log(`Media captured! ${JSON.stringify(media)}`);
await Navigation.showModal({
component: {
name: 'Media',
passProps: {
type: type,
path: media.path,
}
}
})
},
[]
);
const onFlipCameraPressed = useCallback(() => {
setCameraPosition((p) => (p === "back" ? "front" : "back"));
}, []);
const onHdrSwitchPressed = useCallback(() => {
setEnableHdr((h) => !h);
}, []);
const onFlashPressed = useCallback(() => {
setFlash((f) => (f === "off" ? "on" : "off"));
}, []);
const onNightModePressed = useCallback(() => {
setEnableNightMode((n) => !n);
}, []);
//#endregion
//#region Tap Gesture
const onDoubleTapGesture = useCallback(
({ nativeEvent: event }: TapGestureHandlerStateChangeEvent) => {
// TODO: (MARC) Allow switching camera (back <-> front) while recording and stich videos together!
if (isPressingButton.value) return;
switch (event.state) {
case State.END:
// on double tap
onFlipCameraPressed();
break;
default:
break;
}
},
[isPressingButton, onFlipCameraPressed]
);
//#endregion
//#region Effects
useEffect(() => {
const loadDevices = async () => {
try {
const availableCameraDevices = await Camera.getAvailableCameraDevices();
console.log(`Devices: ${availableCameraDevices.map((d) => d.name).join(", ")}`);
const sortedDevices = availableCameraDevices.sort(compareDevices);
console.debug(`Devices (sorted): ${sortedDevices.map((d) => d.name).join(", ")}`);
setDevices(sortedDevices);
} catch (e) {
console.error(`Failed to get available devices!`, e);
}
};
loadDevices();
}, []);
useEffect(() => {
// Run everytime the neutralZoomScaled value changes. (reset zoom when device changes)
zoom.value = neutralZoomScaled;
}, [neutralZoomScaled, zoom]);
useEffect(() => {
// Run everytime the camera gets set to isActive = false. (reset zoom when tab switching)
if (!isActive) {
zoom.value = neutralZoomScaled;
}
}, [neutralZoomScaled, isActive, zoom]);
//#endregion
//#region Pinch to Zoom Gesture
// The gesture handler maps the linear pinch gesture (0 - 1) to an exponential curve since a camera's zoom
// function does not appear linear to the user. (aka zoom 0.1 -> 0.2 does not look equal in difference as 0.8 -> 0.9)
const onPinchGesture = useAnimatedGestureHandler<PinchGestureHandlerGestureEvent, { startZoom?: number }>({
onStart: (_, context) => {
context.startZoom = zoom.value;
},
onActive: (event, context) => {
// we're trying to map the scale gesture to a linear zoom here
const startZoom = context.startZoom ?? 0;
const scale = interpolate(
event.scale,
[1 - 1 / SCALE_FULL_ZOOM, 1, SCALE_FULL_ZOOM],
[-1, 0, 1],
Extrapolate.CLAMP
);
zoom.value = interpolate(
scale,
[-1, 0, 1],
[0, startZoom, 1],
Extrapolate.CLAMP
);
},
});
//#endregion
// TODO: Implement camera flipping (back <-> front) while recording and stich the videos together
// TODO: iOS: Use custom video data stream output to manually process the data and write the MOV/MP4 for more customizability.
return (
<View style={styles.container}>
<Text>Hello World!</Text>
{device != null && (
<PinchGestureHandler onGestureEvent={onPinchGesture} enabled={isActive}>
<Reanimated.View style={StyleSheet.absoluteFill}>
<TapGestureHandler
onHandlerStateChange={onDoubleTapGesture}
numberOfTaps={2}>
<ReanimatedCamera
ref={camera}
style={StyleSheet.absoluteFill}
device={device}
format={format}
fps={fps}
hdr={enableHdr}
lowLightBoost={
device.supportsLowLightBoost && enableNightMode
}
isActive={isActive}
onInitialized={onInitialized}
onError={onError}
enableZoomGesture={false}
// TODO: Remove once https://github.com/software-mansion/react-native-reanimated/pull/1697 gets merged
// @ts-expect-error animatedProps should be Partial<P>
animatedProps={cameraAnimatedProps}
/>
</TapGestureHandler>
</Reanimated.View>
</PinchGestureHandler>
)}
<CaptureButton
style={styles.captureButton}
camera={camera}
onMediaCaptured={onMediaCaptured}
maximumVideoDuration={15}
cameraZoom={zoom}
flash={supportsFlash ? flash : "off"}
enabled={isCameraInitialized && isActive}
setIsPressingButton={setIsPressingButton}
/>
<StatusBarBlurBackground />
<View style={styles.rightButtonRow}>
{supportsCameraFlipping && (
<PressableOpacity
style={styles.button}
onPress={onFlipCameraPressed}
disabledOpacity={0.4}>
<IonIcon
name="camera-reverse"
color="white"
size={24}
/>
</PressableOpacity>
)}
{supportsFlash && (
<PressableOpacity
style={styles.button}
onPress={onFlashPressed}
disabledOpacity={0.4}>
<IonIcon
name={flash === "on" ? "flash" : "flash-off"}
color="white"
size={24}
/>
</PressableOpacity>
)}
{canToggleNightMode && (
<PressableOpacity
style={styles.button}
onPress={onNightModePressed}
disabledOpacity={0.4}>
<IonIcon
name={enableNightMode ? "moon" : "moon-outline"}
color="white"
size={24}
/>
</PressableOpacity>
)}
{supportsHdr && (
<PressableOpacity style={styles.button} onPress={onHdrSwitchPressed}>
<MaterialIcon
name={enableHdr ? "hdr" : "hdr-off"}
color="white"
size={24}
/>
</PressableOpacity>
)}
</View>
</View>
);
}
@ -14,13 +345,32 @@ export default function App() {
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
backgroundColor: 'white',
backgroundColor: "black",
},
box: {
width: 60,
height: 60,
marginVertical: 20,
captureButton: {
position: "absolute",
alignSelf: "center",
bottom: SAFE_AREA_PADDING.paddingBottom
},
openLocalGalleryButton: {
position: "absolute",
left: (SCREEN_WIDTH / 2 - CAPTURE_BUTTON_SIZE / 2) / 2,
width: LOCAL_GALLERY_BUTTON_SIZE,
height: LOCAL_GALLERY_BUTTON_SIZE,
marginBottom: CAPTURE_BUTTON_SIZE / 2 - LOCAL_GALLERY_BUTTON_SIZE / 2,
},
button: {
marginTop: CONTENT_SPACING,
width: BUTTON_SIZE,
height: BUTTON_SIZE,
borderRadius: BUTTON_SIZE / 2,
backgroundColor: "rgba(140, 140, 140, 0.3)",
justifyContent: "center",
alignItems: "center",
},
rightButtonRow: {
position: "absolute",
right: CONTENT_SPACING,
top: SAFE_AREA_PADDING.paddingTop
},
});

View File

@ -34,3 +34,6 @@ export const SCREEN_HEIGHT = Platform.select<number>({
android: Dimensions.get("screen").height - StaticSafeAreaInsets.safeAreaInsetsBottom,
ios: Dimensions.get("window").height,
}) as number;
// Capture Button
export const CAPTURE_BUTTON_SIZE = 78;

View File

@ -0,0 +1,332 @@
import React, { useCallback, useMemo, useRef } from "react";
import { Platform, StyleSheet, View, ViewProps } from "react-native";
import {
PanGestureHandler,
PanGestureHandlerGestureEvent,
State,
TapGestureHandler,
TapGestureHandlerStateChangeEvent,
} from "react-native-gesture-handler";
import Reanimated, {
cancelAnimation,
Easing,
Extrapolate,
interpolate,
useAnimatedStyle,
withSpring,
withTiming,
useAnimatedGestureHandler,
useSharedValue,
} from "react-native-reanimated";
import type { Camera, PhotoFile, TakePhotoOptions, TakeSnapshotOptions, VideoFile } from "react-native-vision-camera";
import { CAPTURE_BUTTON_SIZE, SCREEN_HEIGHT, SCREEN_WIDTH, USE_SNAPSHOT_ON_ANDROID } from "./../Constants";
const PAN_GESTURE_HANDLER_FAIL_X = [-SCREEN_WIDTH, SCREEN_WIDTH];
const PAN_GESTURE_HANDLER_ACTIVE_Y = [-2, 2];
const IS_ANDROID = Platform.OS === "android"
const START_RECORDING_DELAY = 200;
interface Props extends ViewProps {
camera: React.RefObject<Camera>;
onMediaCaptured: (
media: PhotoFile | VideoFile,
type: "photo" | "video"
) => void;
cameraZoom: Reanimated.SharedValue<number>;
flash: "off" | "on" | "auto";
enabled: boolean;
setIsPressingButton: (isPressingButton: boolean) => void;
}
const _CaptureButton: React.FC<Props> = ({
camera,
onMediaCaptured,
cameraZoom,
flash,
enabled,
setIsPressingButton,
style,
...props
}): React.ReactElement => {
const pressDownDate = useRef<Date | undefined>(undefined);
const isRecording = useRef(false);
const recordingProgress = useSharedValue(0);
const takePhotoOptions = useMemo<TakePhotoOptions & TakeSnapshotOptions>(
() => ({
photoCodec: "jpeg",
qualityPrioritization: "speed",
flash: flash,
quality: 90,
skipMetadata: true,
}),
[flash]
);
const isPressingButton = useSharedValue(false);
//#region Camera Capture
const takePhoto = useCallback(async () => {
try {
if (camera.current == null) throw new Error("Camera ref is null!");
// If we're on Android and flash is disabled, we can use the "snapshot" method.
// this will take a snapshot of the current SurfaceView, which results in faster image
// capture rate at the cost of greatly reduced quality.
const photoMethod =
USE_SNAPSHOT_ON_ANDROID &&
IS_ANDROID &&
takePhotoOptions.flash === "off"
? "snapshot"
: "photo";
console.log(`Taking ${photoMethod}...`);
const photo =
photoMethod === "snapshot"
? await camera.current.takeSnapshot(takePhotoOptions)
: await camera.current.takePhoto(takePhotoOptions);
onMediaCaptured(photo, "photo");
} catch (e) {
console.error('Failed to take photo!', e);
}
}, [camera, onMediaCaptured, takePhotoOptions]);
const onStoppedRecording = useCallback(() => {
isRecording.current = false;
cancelAnimation(recordingProgress);
console.log(`stopped recording video!`);
}, [recordingProgress]);
const stopRecording = useCallback(async () => {
try {
if (camera.current == null) throw new Error("Camera ref is null!");
console.log("calling stopRecording()...");
await camera.current.stopRecording();
console.log("called stopRecording()!");
} catch (e) {
console.error(`failed to stop recording!`, e);
}
}, [camera]);
const startRecording = useCallback(() => {
try {
if (camera.current == null) throw new Error("Camera ref is null!");
console.log(`calling startRecording()...`);
camera.current.startRecording({
flash: flash,
onRecordingError: (error) => {
console.error('Recording failed!', error);
onStoppedRecording();
},
onRecordingFinished: (video) => {
console.log(`Recording successfully finished! ${video.path}`);
onMediaCaptured(video, "video");
onStoppedRecording();
},
});
// TODO: wait until startRecording returns to actually find out if the recording has successfully started
console.log(`called startRecording()!`);
isRecording.current = true;
} catch (e) {
console.error(`failed to start recording!`, e, "camera");
}
}, [
camera,
flash,
onMediaCaptured,
onStoppedRecording,
recordingProgress,
stopRecording,
]);
//#endregion
//#region Tap handler
const tapHandler = useRef<TapGestureHandler>();
const onHandlerStateChanged = useCallback(
async ({ nativeEvent: event }: TapGestureHandlerStateChangeEvent) => {
// This is the gesture handler for the circular "shutter" button.
// Once the finger touches the button (State.BEGAN), a photo is being taken and "capture mode" is entered. (disabled tab bar)
// Also, we set `pressDownDate` to the time of the press down event, and start a 200ms timeout. If the `pressDownDate` hasn't changed
// after the 200ms, the user is still holding down the "shutter" button. In that case, we start recording.
//
// Once the finger releases the button (State.END/FAILED/CANCELLED), we leave "capture mode" (enable tab bar) and check the `pressDownDate`,
// if `pressDownDate` was less than 200ms ago, we know that the intention of the user is to take a photo. We check the `takePhotoPromise` if
// there already is an ongoing (or already resolved) takePhoto() call (remember that we called takePhoto() when the user pressed down), and
// if yes, use that. If no, we just try calling takePhoto() again
console.debug(`state: ${Object.keys(State)[event.state]}`);
switch (event.state) {
case State.BEGAN: {
// enter "recording mode"
recordingProgress.value = 0;
isPressingButton.value = true;
const now = new Date();
pressDownDate.current = now;
setTimeout(() => {
if (pressDownDate.current === now) {
// user is still pressing down after 200ms, so his intention is to create a video
startRecording();
}
}, START_RECORDING_DELAY);
setIsPressingButton(true);
return;
}
case State.END:
case State.FAILED:
case State.CANCELLED: {
// exit "recording mode"
try {
if (pressDownDate.current == null)
throw new Error("PressDownDate ref .current was null!");
const now = new Date();
const diff = now.getTime() - pressDownDate.current.getTime();
pressDownDate.current = undefined;
if (diff < START_RECORDING_DELAY) {
// user has released the button within 200ms, so his intention is to take a single picture.
await takePhoto();
} else {
// user has held the button for more than 200ms, so he has been recording this entire time.
await stopRecording();
}
} finally {
setTimeout(() => {
isPressingButton.value = false;
setIsPressingButton(false);
}, 500);
}
return;
}
default:
break;
}
},
[
isPressingButton,
recordingProgress,
setIsPressingButton,
startRecording,
stopRecording,
takePhoto,
]
);
//#endregion
//#region Pan handler
const panHandler = useRef<PanGestureHandler>();
const onPanGestureEvent = useAnimatedGestureHandler<
PanGestureHandlerGestureEvent,
{ offsetY?: number; startY?: number }
>({
onStart: (event, context) => {
context.startY = event.absoluteY;
const yForFullZoom = context.startY * 0.7;
const offsetYForFullZoom = context.startY - yForFullZoom;
// extrapolate [0 ... 1] zoom -> [0 ... Y_FOR_FULL_ZOOM] finger position
context.offsetY = interpolate(
Math.sqrt(cameraZoom.value),
[0, 1],
[0, offsetYForFullZoom],
Extrapolate.CLAMP
);
},
onActive: (event, context) => {
const offset = context.offsetY ?? 0;
const startY = context.startY ?? SCREEN_HEIGHT;
const yForFullZoom = startY * 0.7;
const zoom = interpolate(
event.absoluteY - offset,
[yForFullZoom, startY],
[1, 0],
Extrapolate.CLAMP
);
cameraZoom.value = zoom ** 2;
},
});
//#endregion
const shadowStyle = useAnimatedStyle(
() => ({
transform: [
{
scale: withSpring(isPressingButton.value ? 1.1 : 1, {
mass: 0.5,
damping: 35,
stiffness: 300,
}),
},
],
}),
[isPressingButton]
);
const buttonStyle = useAnimatedStyle(
() => ({
opacity: withTiming(enabled ? 1 : 0.3, {
duration: 100,
easing: Easing.linear,
}),
transform: [
{
scale: withSpring(
enabled ? (isPressingButton.value ? 1 : 0.9) : 0.6,
{
stiffness: 500,
damping: 300,
}
),
},
],
}),
[enabled, isPressingButton]
);
return (
<TapGestureHandler
enabled={enabled}
ref={tapHandler}
onHandlerStateChange={onHandlerStateChanged}
shouldCancelWhenOutside={false}
maxDurationMs={99999999} // <-- this prevents the TapGestureHandler from going to State.FAILED when the user moves his finger outside of the child view (to zoom)
simultaneousHandlers={panHandler}>
<Reanimated.View {...props} style={[buttonStyle, style]}>
<PanGestureHandler
enabled={enabled}
ref={panHandler}
failOffsetX={PAN_GESTURE_HANDLER_FAIL_X}
activeOffsetY={PAN_GESTURE_HANDLER_ACTIVE_Y}
onGestureEvent={onPanGestureEvent}
simultaneousHandlers={tapHandler}>
<Reanimated.View style={styles.flex}>
<Reanimated.View style={[styles.shadow, shadowStyle]} />
<View style={styles.button} />
</Reanimated.View>
</PanGestureHandler>
</Reanimated.View>
</TapGestureHandler>
);
};
export const CaptureButton = React.memo(_CaptureButton);
const styles = StyleSheet.create({
flex: {
flex: 1,
},
shadow: {
position: "absolute",
width: CAPTURE_BUTTON_SIZE,
height: CAPTURE_BUTTON_SIZE,
borderRadius: CAPTURE_BUTTON_SIZE / 2,
borderWidth: 3,
borderColor: "rgba(225, 48, 108, 0.7)",
},
button: {
width: CAPTURE_BUTTON_SIZE,
height: CAPTURE_BUTTON_SIZE,
borderRadius: CAPTURE_BUTTON_SIZE / 2,
borderWidth: CAPTURE_BUTTON_SIZE * 0.1,
borderColor: "white",
},
});