309 lines
10 KiB
TypeScript
Raw Normal View History

2021-02-20 17:07:10 +01:00
import React, { useCallback, useMemo, useRef } from 'react';
import { StyleSheet, View, ViewProps } from 'react-native';
2021-02-19 19:06:28 +01:00
import {
PanGestureHandler,
PanGestureHandlerGestureEvent,
State,
TapGestureHandler,
TapGestureHandlerStateChangeEvent,
2021-02-20 17:07:10 +01:00
} from 'react-native-gesture-handler';
2021-02-19 19:06:28 +01:00
import Reanimated, {
cancelAnimation,
Easing,
Extrapolate,
interpolate,
useAnimatedStyle,
withSpring,
withTiming,
useAnimatedGestureHandler,
useSharedValue,
2021-02-19 19:54:30 +01:00
withRepeat,
2021-02-20 17:07:10 +01:00
} from 'react-native-reanimated';
import type { Camera, PhotoFile, TakePhotoOptions, TakeSnapshotOptions, VideoFile } from 'react-native-vision-camera';
import { CAPTURE_BUTTON_SIZE, SCREEN_HEIGHT, SCREEN_WIDTH } from './../Constants';
2021-02-19 19:06:28 +01:00
const PAN_GESTURE_HANDLER_FAIL_X = [-SCREEN_WIDTH, SCREEN_WIDTH];
const PAN_GESTURE_HANDLER_ACTIVE_Y = [-2, 2];
const START_RECORDING_DELAY = 200;
2021-02-19 19:51:59 +01:00
const BORDER_WIDTH = CAPTURE_BUTTON_SIZE * 0.1;
2021-02-19 19:06:28 +01:00
interface Props extends ViewProps {
camera: React.RefObject<Camera>;
2021-02-20 17:07:10 +01:00
onMediaCaptured: (media: PhotoFile | VideoFile, type: 'photo' | 'video') => void;
2021-02-19 19:06:28 +01:00
minZoom: number;
maxZoom: number;
2021-02-19 19:06:28 +01:00
cameraZoom: Reanimated.SharedValue<number>;
feat: Frame Processors for Android (#196) * Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * :warning: TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * :warning: TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
2021-06-27 12:37:54 +02:00
flash: 'off' | 'on';
2021-02-19 19:06:28 +01:00
enabled: boolean;
setIsPressingButton: (isPressingButton: boolean) => void;
}
const _CaptureButton: React.FC<Props> = ({
camera,
onMediaCaptured,
minZoom,
maxZoom,
2021-02-19 19:06:28 +01:00
cameraZoom,
flash,
enabled,
setIsPressingButton,
style,
...props
}): React.ReactElement => {
const pressDownDate = useRef<Date | undefined>(undefined);
const isRecording = useRef(false);
const recordingProgress = useSharedValue(0);
const takePhotoOptions = useMemo<TakePhotoOptions & TakeSnapshotOptions>(
() => ({
2021-02-20 17:07:10 +01:00
photoCodec: 'jpeg',
qualityPrioritization: 'speed',
2021-02-19 19:06:28 +01:00
flash: flash,
quality: 90,
skipMetadata: true,
}),
2021-02-20 17:07:10 +01:00
[flash],
2021-02-19 19:06:28 +01:00
);
const isPressingButton = useSharedValue(false);
//#region Camera Capture
const takePhoto = useCallback(async () => {
try {
2021-02-20 17:07:10 +01:00
if (camera.current == null) throw new Error('Camera ref is null!');
2021-02-19 19:06:28 +01:00
console.log('Taking photo...');
const photo = await camera.current.takePhoto(takePhotoOptions);
2021-02-20 17:07:10 +01:00
onMediaCaptured(photo, 'photo');
2021-02-19 19:06:28 +01:00
} catch (e) {
console.error('Failed to take photo!', e);
}
}, [camera, onMediaCaptured, takePhotoOptions]);
const onStoppedRecording = useCallback(() => {
isRecording.current = false;
cancelAnimation(recordingProgress);
2021-02-20 17:07:10 +01:00
console.log('stopped recording video!');
2021-02-19 19:06:28 +01:00
}, [recordingProgress]);
const stopRecording = useCallback(async () => {
try {
2021-02-20 17:07:10 +01:00
if (camera.current == null) throw new Error('Camera ref is null!');
2021-02-19 19:06:28 +01:00
2021-02-20 17:07:10 +01:00
console.log('calling stopRecording()...');
2021-02-19 19:06:28 +01:00
await camera.current.stopRecording();
2021-02-20 17:07:10 +01:00
console.log('called stopRecording()!');
2021-02-19 19:06:28 +01:00
} catch (e) {
2021-02-20 17:07:10 +01:00
console.error('failed to stop recording!', e);
2021-02-19 19:06:28 +01:00
}
}, [camera]);
const startRecording = useCallback(() => {
try {
2021-02-20 17:07:10 +01:00
if (camera.current == null) throw new Error('Camera ref is null!');
2021-02-19 19:06:28 +01:00
2021-02-20 17:07:10 +01:00
console.log('calling startRecording()...');
2021-02-19 19:06:28 +01:00
camera.current.startRecording({
flash: flash,
onRecordingError: (error) => {
console.error('Recording failed!', error);
onStoppedRecording();
},
onRecordingFinished: (video) => {
console.log(`Recording successfully finished! ${video.path}`);
2021-02-20 17:07:10 +01:00
onMediaCaptured(video, 'video');
2021-02-19 19:06:28 +01:00
onStoppedRecording();
},
});
// TODO: wait until startRecording returns to actually find out if the recording has successfully started
2021-02-20 17:07:10 +01:00
console.log('called startRecording()!');
2021-02-19 19:06:28 +01:00
isRecording.current = true;
} catch (e) {
2021-02-20 17:07:10 +01:00
console.error('failed to start recording!', e, 'camera');
2021-02-19 19:06:28 +01:00
}
2021-02-23 11:57:43 +01:00
}, [camera, flash, onMediaCaptured, onStoppedRecording]);
2021-02-19 19:06:28 +01:00
//#endregion
//#region Tap handler
const tapHandler = useRef<TapGestureHandler>();
const onHandlerStateChanged = useCallback(
async ({ nativeEvent: event }: TapGestureHandlerStateChangeEvent) => {
// This is the gesture handler for the circular "shutter" button.
// Once the finger touches the button (State.BEGAN), a photo is being taken and "capture mode" is entered. (disabled tab bar)
// Also, we set `pressDownDate` to the time of the press down event, and start a 200ms timeout. If the `pressDownDate` hasn't changed
// after the 200ms, the user is still holding down the "shutter" button. In that case, we start recording.
//
// Once the finger releases the button (State.END/FAILED/CANCELLED), we leave "capture mode" (enable tab bar) and check the `pressDownDate`,
// if `pressDownDate` was less than 200ms ago, we know that the intention of the user is to take a photo. We check the `takePhotoPromise` if
// there already is an ongoing (or already resolved) takePhoto() call (remember that we called takePhoto() when the user pressed down), and
// if yes, use that. If no, we just try calling takePhoto() again
console.debug(`state: ${Object.keys(State)[event.state]}`);
switch (event.state) {
case State.BEGAN: {
// enter "recording mode"
recordingProgress.value = 0;
isPressingButton.value = true;
const now = new Date();
pressDownDate.current = now;
setTimeout(() => {
if (pressDownDate.current === now) {
// user is still pressing down after 200ms, so his intention is to create a video
startRecording();
}
}, START_RECORDING_DELAY);
setIsPressingButton(true);
return;
}
case State.END:
case State.FAILED:
case State.CANCELLED: {
// exit "recording mode"
try {
2021-02-20 17:07:10 +01:00
if (pressDownDate.current == null) throw new Error('PressDownDate ref .current was null!');
2021-02-19 19:06:28 +01:00
const now = new Date();
const diff = now.getTime() - pressDownDate.current.getTime();
pressDownDate.current = undefined;
if (diff < START_RECORDING_DELAY) {
// user has released the button within 200ms, so his intention is to take a single picture.
await takePhoto();
} else {
// user has held the button for more than 200ms, so he has been recording this entire time.
await stopRecording();
}
} finally {
setTimeout(() => {
isPressingButton.value = false;
setIsPressingButton(false);
}, 500);
}
return;
}
default:
break;
}
},
2021-02-20 17:07:10 +01:00
[isPressingButton, recordingProgress, setIsPressingButton, startRecording, stopRecording, takePhoto],
2021-02-19 19:06:28 +01:00
);
//#endregion
//#region Pan handler
const panHandler = useRef<PanGestureHandler>();
2021-02-20 17:07:10 +01:00
const onPanGestureEvent = useAnimatedGestureHandler<PanGestureHandlerGestureEvent, { offsetY?: number; startY?: number }>({
2021-02-19 19:06:28 +01:00
onStart: (event, context) => {
context.startY = event.absoluteY;
const yForFullZoom = context.startY * 0.7;
const offsetYForFullZoom = context.startY - yForFullZoom;
// extrapolate [0 ... 1] zoom -> [0 ... Y_FOR_FULL_ZOOM] finger position
context.offsetY = interpolate(cameraZoom.value, [minZoom, maxZoom], [0, offsetYForFullZoom], Extrapolate.CLAMP);
2021-02-19 19:06:28 +01:00
},
onActive: (event, context) => {
const offset = context.offsetY ?? 0;
const startY = context.startY ?? SCREEN_HEIGHT;
const yForFullZoom = startY * 0.7;
cameraZoom.value = interpolate(event.absoluteY - offset, [yForFullZoom, startY], [maxZoom, minZoom], Extrapolate.CLAMP);
2021-02-19 19:06:28 +01:00
},
});
//#endregion
const shadowStyle = useAnimatedStyle(
() => ({
transform: [
{
2021-02-19 19:51:59 +01:00
scale: withSpring(isPressingButton.value ? 1 : 0, {
mass: 1,
2021-02-19 19:06:28 +01:00
damping: 35,
stiffness: 300,
}),
},
],
}),
2021-02-20 17:07:10 +01:00
[isPressingButton],
2021-02-19 19:06:28 +01:00
);
2021-02-20 17:07:10 +01:00
const buttonStyle = useAnimatedStyle(() => {
let scale: number;
if (enabled) {
if (isPressingButton.value) {
scale = withRepeat(
withSpring(1, {
2021-02-19 19:54:30 +01:00
stiffness: 100,
damping: 1000,
2021-02-20 17:07:10 +01:00
}),
-1,
true,
);
2021-02-19 19:54:30 +01:00
} else {
2021-02-20 17:07:10 +01:00
scale = withSpring(0.9, {
2021-02-19 19:54:30 +01:00
stiffness: 500,
damping: 300,
});
}
2021-02-20 17:07:10 +01:00
} else {
scale = withSpring(0.6, {
stiffness: 500,
damping: 300,
});
}
2021-02-19 19:54:30 +01:00
2021-02-20 17:07:10 +01:00
return {
opacity: withTiming(enabled ? 1 : 0.3, {
duration: 100,
easing: Easing.linear,
}),
transform: [
{
scale: scale,
},
],
};
}, [enabled, isPressingButton]);
2021-02-19 19:06:28 +01:00
return (
<TapGestureHandler
enabled={enabled}
ref={tapHandler}
onHandlerStateChange={onHandlerStateChanged}
shouldCancelWhenOutside={false}
maxDurationMs={99999999} // <-- this prevents the TapGestureHandler from going to State.FAILED when the user moves his finger outside of the child view (to zoom)
simultaneousHandlers={panHandler}>
<Reanimated.View {...props} style={[buttonStyle, style]}>
<PanGestureHandler
enabled={enabled}
ref={panHandler}
failOffsetX={PAN_GESTURE_HANDLER_FAIL_X}
activeOffsetY={PAN_GESTURE_HANDLER_ACTIVE_Y}
onGestureEvent={onPanGestureEvent}
simultaneousHandlers={tapHandler}>
<Reanimated.View style={styles.flex}>
<Reanimated.View style={[styles.shadow, shadowStyle]} />
<View style={styles.button} />
</Reanimated.View>
</PanGestureHandler>
</Reanimated.View>
</TapGestureHandler>
);
};
export const CaptureButton = React.memo(_CaptureButton);
const styles = StyleSheet.create({
flex: {
flex: 1,
},
shadow: {
2021-02-20 17:07:10 +01:00
position: 'absolute',
2021-02-19 19:06:28 +01:00
width: CAPTURE_BUTTON_SIZE,
height: CAPTURE_BUTTON_SIZE,
borderRadius: CAPTURE_BUTTON_SIZE / 2,
2021-02-20 17:07:10 +01:00
backgroundColor: '#e34077',
2021-02-19 19:06:28 +01:00
},
button: {
width: CAPTURE_BUTTON_SIZE,
height: CAPTURE_BUTTON_SIZE,
borderRadius: CAPTURE_BUTTON_SIZE / 2,
2021-02-19 19:51:59 +01:00
borderWidth: BORDER_WIDTH,
2021-02-20 17:07:10 +01:00
borderColor: 'white',
2021-02-19 19:06:28 +01:00
},
});