react-native-vision-camera/src/Camera.tsx
Marc Rousavy 87e6bb710e
feat: Frame Processors for Android (#196)
* Create android gradle build setup

* Fix `prefab` config

* Add `pickFirst **/*.so` to example build.gradle

* fix REA path

* cache gradle builds

* Update validate-android.yml

* Create Native Proxy

* Copy REA header

* implement ctor

* Rename CameraViewModule -> FrameProcessorRuntimeManager

* init FrameProcessorRuntimeManager

* fix name

* Update FrameProcessorRuntimeManager.h

* format

* Create AndroidErrorHandler.h

* Initialize runtime and install JSI funcs

* Update FrameProcessorRuntimeManager.cpp

* Update CameraViewModule.kt

* Make CameraView hybrid C++ class to find view & set frame processor

* Update FrameProcessorRuntimeManager.cpp

* pass function by rvalue

* pass by const &&

* extract hermes and JSC REA

* pass `FOR_HERMES`

* correctly prepare JSC and Hermes

* Update CMakeLists.txt

* add missing hermes include

* clean up imports

* Create JImageProxy.h

* pass ImageProxy to JNI as `jobject`

* try use `JImageProxy` C++ wrapper type

* Use `local_ref<JImageProxy>`

* Create `JImageProxyHostObject` for JSI interop

* debug call to frame processor

* Unset frame processor

* Fix CameraView native part not being registered

* close image

* use `jobject` instead of `JImageProxy` for now :(

* fix hermes build error

* Set enable FP callback

* fix JNI call

* Update CameraView.cpp

* Get Format

* Create plugin abstract

* Make `FrameProcessorPlugin` a hybrid object

* Register plugin CXX

* Call `registerPlugin`

* Catch

* remove JSI

* Create sample QR code plugin

* register plugins

* Fix missing JNI binding

* Add `mHybridData`

* prefix name with two underscores (`__`)

* Update CameraPage.tsx

* wrap `ImageProxy` in host object

* Use `jobject` for HO box

* Update JImageProxy.h

* reinterpret jobject

* Try using `JImageProxy` instead of `jobject`

* Update JImageProxy.h

* get bytes per row and plane count

* Update CameraView.cpp

* Return base

* add some docs and JNI JSI conversion

* indent

* Convert JSI value to JNI jobject

* using namespace facebook

* Try using class

* Use plain old Object[]

* Try convert JNI -> JSI

* fix decl

* fix bool init

* Correctly link folly

* Update CMakeLists.txt

* Convert Map to Object

* Use folly for Map and Array

* Return `alias_ref<jobject>` instead of raw `jobject`

* fix JNI <-> JSI conversion

* Update JSIJNIConversion.cpp

* Log parameters

* fix params index offset

* add more test cases

* Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx

* fix types

* Rename to example plugin

* remove support for hashmap

* Try use HashMap iterable fbjni binding

* try using JReadableArray/JReadableMap

* Fix list return values

* Update JSIJNIConversion.cpp

* Update JSIJNIConversion.cpp

* (iOS) Rename ObjC QR Code Plugin to Example Plugin

* Rename Swift plugin QR -> Example

* Update ExamplePluginSwift.swift

* Fix Map/Dictionary logging format

* Update ExampleFrameProcessorPlugin.m

* Reconfigure session if frame processor changed

* Handle use-cases via `maxUseCasesCount`

* Don't crash app on `configureSession` error

* Document "use-cases"

* Update DEVICES.mdx

* fix merge

* Make `const &`

* iOS: Automatically enable `video` if a `frameProcessor` is set

* Update CameraView.cpp

* fix docs

* Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false.

* Fix lookup

* Update CameraView.kt

* Implement `frameProcessorFps`

* Finalize Frame Processor Plugin Hybrid

* Update CameraViewModule.kt

* Support `flash` on `takeSnapshot()`

* Update docs

* Add docs

* Update CameraPage.tsx

* Attribute NonNull

* remove unused imports

* Add Android docs for Frame Processors

* Make JNI HashMap <-> JSI Object conversion faster

directly access `toHashMap` instead of going through java

* add todo

* Always run `prepareJSC` and `prepareHermes`

* switch jsc and hermes

* Specify ndkVersion `21.4.7075529`

* Update gradle.properties

* Update gradle.properties

* Create .aar

* Correctly prepare android package

* Update package.json

* Update package.json

* remove `prefab` build feature

* split

* Add docs for registering the FP plugin

* Add step for dep

* Update CaptureButton.tsx

* Move to `reanimated-headers/`

* Exclude reanimated-headers from cpplint

* disable `build/include_order` rule

* cpplint fixes

* perf: Make `JSIJNIConversion` a `namespace` instead of `class`

* Ignore runtime/references for `convert` funcs

* Build Android .aar in CI

* Run android build script only on `prepack`

* Update package.json

* Update package.json

* Update build-android-npm-package.sh

* Move to `yarn build`

* Also install node_modules in example step

* Update validate-android.yml

* sort imports

* fix torch

* Run ImageAnalysis on `FrameProcessorThread`

* Update Errors.kt

* Add clean android script

* Upgrade reanimated to 2.3.0-alpha.1

* Revert "Upgrade reanimated to 2.3.0-alpha.1"

This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035.

* ⚠️ TEMP FIX: hotfix reanimated build.gradle

* Update CameraView+TakeSnapshot.kt

* ⚠️ TEMP FIX: Disable ktlint action for now

* Update clean.sh

* Set max heap size to 4g

* rebuild lockfiles

* Update Podfile.lock

* rename

* Build lib .aar before example/
2021-06-27 12:37:54 +02:00

405 lines
15 KiB
TypeScript

import React from 'react';
import { requireNativeComponent, NativeModules, NativeSyntheticEvent, findNodeHandle, NativeMethods, Platform } from 'react-native';
import type { CameraDevice } from './CameraDevice';
import type { ErrorWithCause } from './CameraError';
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
import type { CameraProps } from './CameraProps';
import type { Frame } from './Frame';
import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
import type { Point } from './Point';
import type { TakeSnapshotOptions } from './Snapshot';
import type { RecordVideoOptions, VideoFile } from './VideoFile';
//#region Types
export type CameraPermissionStatus = 'authorized' | 'not-determined' | 'denied' | 'restricted';
export type CameraPermissionRequestResult = 'authorized' | 'denied';
interface OnErrorEvent {
code: string;
message: string;
cause?: ErrorWithCause;
}
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & {
cameraId: string;
onInitialized?: (event: NativeSyntheticEvent<void>) => void;
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void;
};
type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
//#endregion
// NativeModules automatically resolves 'CameraView' to 'CameraViewModule'
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const CameraModule = NativeModules.CameraView;
if (CameraModule == null) console.error("Camera: Native Module 'CameraView' was null! Did you run pod install?");
//#region Camera Component
/**
* ### A powerful `<Camera>` component.
*
* Read the [VisionCamera documentation](https://mrousavy.github.io/react-native-vision-camera/) for more information.
*
* The `<Camera>` component's most important (and therefore _required_) properties are:
*
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevices | useCameraDevices()} hook, or manually by using the {@linkcode Camera.getAvailableCameraDevices Camera.getAvailableCameraDevices()} function.
* * {@linkcode CameraProps.isActive | isActive}: A boolean value that specifies whether the Camera should actively stream video frames or not. This can be compared to a Video component, where `isActive` specifies whether the video is paused or not. If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again.
*
* @example
* ```tsx
* function App() {
* const devices = useCameraDevices('wide-angle-camera')
* const device = devices.back
*
* if (device == null) return <LoadingView />
* return (
* <Camera
* style={StyleSheet.absoluteFill}
* device={device}
* isActive={true}
* />
* )
* }
* ```
*
* @component
*/
export class Camera extends React.PureComponent<CameraProps> {
/**
* @internal
*/
static displayName = 'Camera';
/**
* @internal
*/
displayName = Camera.displayName;
private lastFrameProcessor: ((frame: Frame) => void) | undefined;
private readonly ref: React.RefObject<RefType>;
/**
* @internal
*/
constructor(props: CameraProps) {
super(props);
this.onInitialized = this.onInitialized.bind(this);
this.onError = this.onError.bind(this);
this.ref = React.createRef<RefType>();
this.lastFrameProcessor = undefined;
}
private get handle(): number | null {
const nodeHandle = findNodeHandle(this.ref.current);
if (nodeHandle == null) console.error('Camera: findNodeHandle(ref) returned null! Does the Camera view exist in the native view tree?');
return nodeHandle;
}
//#region View-specific functions (UIViewManager)
/**
* Take a single photo and write it's content to a temporary file.
*
* @throws {@linkcode CameraCaptureError} When any kind of error occured while capturing the photo. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* @example
* ```ts
* const photo = await camera.current.takePhoto({
* qualityPrioritization: 'quality',
* flash: 'on',
* enableAutoRedEyeReduction: true
* })
* ```
*/
public async takePhoto(options?: TakePhotoOptions): Promise<PhotoFile> {
try {
return await CameraModule.takePhoto(this.handle, options ?? {});
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Take a snapshot of the current preview view.
*
* This can be used as an alternative to {@linkcode Camera.takePhoto | takePhoto()} if speed is more important than quality
*
* @throws {@linkcode CameraCaptureError} When any kind of error occured while taking a snapshot. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
*
* @platform Android
* @example
* ```ts
* const photo = await camera.current.takeSnapshot({
* quality: 85,
* skipMetadata: true
* })
* ```
*/
public async takeSnapshot(options?: TakeSnapshotOptions): Promise<PhotoFile> {
if (Platform.OS !== 'android')
throw new CameraCaptureError('capture/capture-type-not-supported', `'takeSnapshot()' is not available on ${Platform.OS}!`);
try {
return await CameraModule.takeSnapshot(this.handle, options ?? {});
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Start a new video recording.
*
* Records in the following formats:
* * **iOS**: QuickTime (`.mov`)
* * **Android**: MPEG4 (`.mp4`)
*
* @blocking This function is synchronized/blocking.
*
* @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
*
* @example
* ```ts
* camera.current.startRecording({
* onRecordingFinished: (video) => console.log(video),
* onRecordingError: (error) => console.error(error),
* })
* setTimeout(() => {
* camera.current.stopRecording()
* }, 5000)
* ```
*/
public startRecording(options: RecordVideoOptions): void {
const { onRecordingError, onRecordingFinished, ...passThroughOptions } = options;
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!');
const onRecordCallback = (video?: VideoFile, error?: CameraCaptureError): void => {
if (error != null) return onRecordingError(error);
if (video != null) return onRecordingFinished(video);
};
// TODO: Use TurboModules to either make this a sync invokation, or make it async.
try {
CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback);
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Stop the current video recording.
*
* @throws {@linkcode CameraCaptureError} When any kind of error occured while stopping the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
*
* @example
* ```ts
* await camera.current.startRecording()
* setTimeout(async () => {
* const video = await camera.current.stopRecording()
* }, 5000)
* ```
*/
public async stopRecording(): Promise<void> {
try {
return await CameraModule.stopRecording(this.handle);
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Focus the camera to a specific point in the coordinate system.
* @param {Point} point The point to focus to. This should be relative to the Camera view's coordinate system,
* and expressed in Pixel on iOS and Points on Android.
* * `(0, 0)` means **top left**.
* * `(CameraView.width, CameraView.height)` means **bottom right**.
*
* Make sure the value doesn't exceed the CameraView's dimensions.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while focussing. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
* @example
* ```ts
* await camera.current.focus({
* x: tapEvent.x,
* y: tapEvent.y
* })
* ```
*/
public async focus(point: Point): Promise<void> {
try {
return await CameraModule.focus(this.handle, point);
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
//#endregion
//#region Static Functions (NativeModule)
/**
* Get a list of all available camera devices on the current phone.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting all available camera devices. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
* @example
* ```ts
* const devices = await Camera.getAvailableCameraDevices()
* const filtered = devices.filter((d) => matchesMyExpectations(d))
* const sorted = devices.sort(sortDevicesByAmountOfCameras)
* return {
* back: sorted.find((d) => d.position === "back"),
* front: sorted.find((d) => d.position === "front")
* }
* ```
*/
public static async getAvailableCameraDevices(): Promise<CameraDevice[]> {
try {
return await CameraModule.getAvailableCameraDevices();
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Gets the current Camera Permission Status. Check this before mounting the Camera to ensure
* the user has permitted the app to use the camera.
*
* To actually prompt the user for camera permission, use {@linkcode Camera.requestCameraPermission | requestCameraPermission()}.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting the current permission status. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
*/
public static async getCameraPermissionStatus(): Promise<CameraPermissionStatus> {
try {
return await CameraModule.getCameraPermissionStatus();
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Gets the current Microphone-Recording Permission Status. Check this before mounting the Camera to ensure
* the user has permitted the app to use the microphone.
*
* To actually prompt the user for microphone permission, use {@linkcode Camera.requestMicrophonePermission | requestMicrophonePermission()}.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting the current permission status. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
*/
public static async getMicrophonePermissionStatus(): Promise<CameraPermissionStatus> {
try {
return await CameraModule.getMicrophonePermissionStatus();
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Shows a "request permission" alert to the user, and resolves with the new camera permission status.
*
* If the user has previously blocked the app from using the camera, the alert will not be shown
* and `"denied"` will be returned.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
*/
public static async requestCameraPermission(): Promise<CameraPermissionRequestResult> {
try {
return await CameraModule.requestCameraPermission();
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
/**
* Shows a "request permission" alert to the user, and resolves with the new microphone permission status.
*
* If the user has previously blocked the app from using the microphone, the alert will not be shown
* and `"denied"` will be returned.
*
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
*/
public static async requestMicrophonePermission(): Promise<CameraPermissionRequestResult> {
try {
return await CameraModule.requestMicrophonePermission();
} catch (e) {
throw tryParseNativeCameraError(e);
}
}
//#endregion
//#region Events (Wrapped to maintain reference equality)
private onError(event: NativeSyntheticEvent<OnErrorEvent>): void {
if (this.props.onError != null) {
const error = event.nativeEvent;
const cause = isErrorWithCause(error.cause) ? error.cause : undefined;
this.props.onError(
// @ts-expect-error We're casting from unknown bridge types to TS unions, I expect it to hopefully work
new CameraRuntimeError(error.code, error.message, cause),
);
}
}
private onInitialized(): void {
this.props.onInitialized?.();
}
//#endregion
/**
* @internal
*/
private assertFrameProcessorsEnabled(): void {
// @ts-expect-error JSI functions aren't typed
if (global.setFrameProcessor == null || global.unsetFrameProcessor == null)
throw new Error('Frame Processors are not enabled. Make sure you install react-native-reanimated 2.2.0 or above!');
}
/**
* @internal
*/
componentWillUnmount(): void {
if (this.lastFrameProcessor != null || this.props.frameProcessor != null) {
this.assertFrameProcessorsEnabled();
// @ts-expect-error JSI functions aren't typed
global.unsetFrameProcessor(this.handle);
}
}
/**
* @internal
*/
componentDidUpdate(): void {
if (this.props.frameProcessor !== this.lastFrameProcessor) {
this.assertFrameProcessorsEnabled();
// frameProcessor argument changed. Update native to reflect the change.
if (this.props.frameProcessor != null) {
// 1. Spawn threaded JSI Runtime (if not already done)
// 2. Add video data output to Camera stream (if not already done)
// 3. Workletize the frameProcessor and prepare it for being called with frames
// @ts-expect-error JSI functions aren't typed
global.setFrameProcessor(this.handle, this.props.frameProcessor);
} else {
// 1. Destroy the threaded runtime
// 2. remove the frame processor
// 3. Remove the video data output
// @ts-expect-error JSI functions aren't typed
global.unsetFrameProcessor(this.handle);
}
this.lastFrameProcessor = this.props.frameProcessor;
}
}
/**
* @internal
*/
public render(): React.ReactNode {
// We remove the big `device` object from the props because we only need to pass `cameraId` to native.
const { device, video: enableVideo, frameProcessor, ...props } = this.props;
// on iOS, enabling a frameProcessor requires `video` to be `true`. On Android, it doesn't.
const video = Platform.OS === 'ios' ? frameProcessor != null || enableVideo : enableVideo;
return (
<NativeCameraView
{...props}
cameraId={device.id}
ref={this.ref}
onInitialized={this.onInitialized}
onError={this.onError}
video={video}
/>
);
}
}
//#endregion
// requireNativeComponent automatically resolves 'CameraView' to 'CameraViewManager'
const NativeCameraView = requireNativeComponent<NativeCameraViewProps>(
'CameraView',
// @ts-expect-error because the type declarations are kinda wrong, no?
Camera,
);