b6a67d5ced
* Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) 👀 * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs 🎉 * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
420 lines
15 KiB
TypeScript
420 lines
15 KiB
TypeScript
import React from 'react';
|
|
import { requireNativeComponent, NativeModules, NativeSyntheticEvent, findNodeHandle, NativeMethods, Platform } from 'react-native';
|
|
import type { CameraDevice } from './CameraDevice';
|
|
import type { ErrorWithCause } from './CameraError';
|
|
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
|
|
import type { CameraProps } from './CameraProps';
|
|
import type { Frame } from './Frame';
|
|
import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
|
|
import type { Point } from './Point';
|
|
import type { TakeSnapshotOptions } from './Snapshot';
|
|
import type { RecordVideoOptions, VideoFile } from './VideoFile';
|
|
|
|
//#region Types
|
|
export type CameraPermissionStatus = 'authorized' | 'not-determined' | 'denied' | 'restricted';
|
|
export type CameraPermissionRequestResult = 'authorized' | 'denied';
|
|
|
|
interface OnErrorEvent {
|
|
code: string;
|
|
message: string;
|
|
cause?: ErrorWithCause;
|
|
}
|
|
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & {
|
|
cameraId: string;
|
|
onInitialized?: (event: NativeSyntheticEvent<void>) => void;
|
|
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void;
|
|
};
|
|
type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
|
|
//#endregion
|
|
|
|
// NativeModules automatically resolves 'CameraView' to 'CameraViewModule'
|
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
const CameraModule = NativeModules.CameraView;
|
|
if (CameraModule == null) console.error("Camera: Native Module 'CameraView' was null! Did you run pod install?");
|
|
|
|
interface CameraState {
|
|
/**
|
|
* The actual native ID for the camera device.
|
|
*/
|
|
cameraId?: string;
|
|
}
|
|
|
|
//#region Camera Component
|
|
/**
|
|
* ### A powerful `<Camera>` component.
|
|
*
|
|
* Read the [VisionCamera documentation](https://cuvent.github.io/react-native-vision-camera/) for more information.
|
|
*
|
|
* The `<Camera>` component's most important (and therefore _required_) properties are:
|
|
*
|
|
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevices | useCameraDevices()} hook, or manually by using the {@linkcode Camera.getAvailableCameraDevices Camera.getAvailableCameraDevices()} function.
|
|
* * {@linkcode CameraProps.isActive | isActive}: A boolean value that specifies whether the Camera should actively stream video frames or not. This can be compared to a Video component, where `isActive` specifies whether the video is paused or not. If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again.
|
|
*
|
|
* @example
|
|
* ```tsx
|
|
* function App() {
|
|
* const devices = useCameraDevices('wide-angle-camera')
|
|
* const device = devices.back
|
|
*
|
|
* if (device == null) return <LoadingView />
|
|
* return (
|
|
* <Camera
|
|
* style={StyleSheet.absoluteFill}
|
|
* device={device}
|
|
* isActive={true}
|
|
* />
|
|
* )
|
|
* }
|
|
* ```
|
|
*
|
|
* @component
|
|
*/
|
|
export class Camera extends React.PureComponent<CameraProps, CameraState> {
|
|
/**
|
|
* @internal
|
|
*/
|
|
static displayName = 'Camera';
|
|
/**
|
|
* @internal
|
|
*/
|
|
displayName = Camera.displayName;
|
|
private lastFrameProcessor: ((frame: Frame) => void) | undefined;
|
|
|
|
private readonly ref: React.RefObject<RefType>;
|
|
|
|
/**
|
|
* @internal
|
|
*/
|
|
constructor(props: CameraProps) {
|
|
super(props);
|
|
this.state = { cameraId: undefined };
|
|
this.onInitialized = this.onInitialized.bind(this);
|
|
this.onError = this.onError.bind(this);
|
|
this.ref = React.createRef<RefType>();
|
|
this.lastFrameProcessor = undefined;
|
|
}
|
|
|
|
private get handle(): number | null {
|
|
const nodeHandle = findNodeHandle(this.ref.current);
|
|
if (nodeHandle == null) console.error('Camera: findNodeHandle(ref) returned null! Does the Camera view exist in the native view tree?');
|
|
|
|
return nodeHandle;
|
|
}
|
|
|
|
//#region View-specific functions (UIViewManager)
|
|
/**
|
|
* Take a single photo and write it's content to a temporary file.
|
|
*
|
|
* @throws {@linkcode CameraCaptureError} When any kind of error occured while capturing the photo. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
|
|
* @example
|
|
* ```ts
|
|
* const photo = await camera.current.takePhoto({
|
|
* qualityPrioritization: 'quality',
|
|
* flash: 'on',
|
|
* enableAutoRedEyeReduction: true
|
|
* })
|
|
* ```
|
|
*/
|
|
public async takePhoto(options?: TakePhotoOptions): Promise<PhotoFile> {
|
|
try {
|
|
return await CameraModule.takePhoto(this.handle, options ?? {});
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Take a snapshot of the current preview view.
|
|
*
|
|
* This can be used as an alternative to {@linkcode Camera.takePhoto | takePhoto()} if speed is more important than quality
|
|
*
|
|
* @throws {@linkcode CameraCaptureError} When any kind of error occured while taking a snapshot. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
|
|
*
|
|
* @platform Android
|
|
* @example
|
|
* ```ts
|
|
* const photo = await camera.current.takeSnapshot({
|
|
* quality: 85,
|
|
* skipMetadata: true
|
|
* })
|
|
* ```
|
|
*/
|
|
public async takeSnapshot(options?: TakeSnapshotOptions): Promise<PhotoFile> {
|
|
if (Platform.OS !== 'android')
|
|
throw new CameraCaptureError('capture/capture-type-not-supported', `'takeSnapshot()' is not available on ${Platform.OS}!`);
|
|
|
|
try {
|
|
return await CameraModule.takeSnapshot(this.handle, options ?? {});
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Start a new video recording.
|
|
*
|
|
* Records in the following formats:
|
|
* * **iOS**: QuickTime (`.mov`)
|
|
* * **Android**: MPEG4 (`.mp4`)
|
|
*
|
|
* @blocking This function is synchronized/blocking.
|
|
*
|
|
* @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
|
|
*
|
|
* @example
|
|
* ```ts
|
|
* camera.current.startRecording({
|
|
* onRecordingFinished: (video) => console.log(video),
|
|
* onRecordingError: (error) => console.error(error),
|
|
* })
|
|
* setTimeout(() => {
|
|
* camera.current.stopRecording()
|
|
* }, 5000)
|
|
* ```
|
|
*/
|
|
public startRecording(options: RecordVideoOptions): void {
|
|
const { onRecordingError, onRecordingFinished, ...passThroughOptions } = options;
|
|
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
|
|
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!');
|
|
|
|
const onRecordCallback = (video?: VideoFile, error?: CameraCaptureError): void => {
|
|
if (error != null) return onRecordingError(error);
|
|
if (video != null) return onRecordingFinished(video);
|
|
};
|
|
// TODO: Use TurboModules to either make this a sync invokation, or make it async.
|
|
try {
|
|
CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback);
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
/**
|
|
* Stop the current video recording.
|
|
*
|
|
* @throws {@linkcode CameraCaptureError} When any kind of error occured while stopping the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
|
|
*
|
|
* @example
|
|
* ```ts
|
|
* await camera.current.startRecording()
|
|
* setTimeout(async () => {
|
|
* const video = await camera.current.stopRecording()
|
|
* }, 5000)
|
|
* ```
|
|
*/
|
|
public async stopRecording(): Promise<void> {
|
|
try {
|
|
return await CameraModule.stopRecording(this.handle);
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Focus the camera to a specific point in the coordinate system.
|
|
* @param {Point} point The point to focus to. This should be relative to the Camera view's coordinate system,
|
|
* and expressed in Pixel on iOS and Points on Android.
|
|
* * `(0, 0)` means **top left**.
|
|
* * `(CameraView.width, CameraView.height)` means **bottom right**.
|
|
*
|
|
* Make sure the value doesn't exceed the CameraView's dimensions.
|
|
*
|
|
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while focussing. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
|
|
* @example
|
|
* ```ts
|
|
* await camera.current.focus({
|
|
* x: tapEvent.x,
|
|
* y: tapEvent.y
|
|
* })
|
|
* ```
|
|
*/
|
|
public async focus(point: Point): Promise<void> {
|
|
try {
|
|
return await CameraModule.focus(this.handle, point);
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
//#endregion
|
|
|
|
//#region Static Functions (NativeModule)
|
|
/**
|
|
* Get a list of all available camera devices on the current phone.
|
|
*
|
|
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting all available camera devices. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
|
|
* @example
|
|
* ```ts
|
|
* const devices = await Camera.getAvailableCameraDevices()
|
|
* const filtered = devices.filter((d) => matchesMyExpectations(d))
|
|
* const sorted = devices.sort(sortDevicesByAmountOfCameras)
|
|
* return {
|
|
* back: sorted.find((d) => d.position === "back"),
|
|
* front: sorted.find((d) => d.position === "front")
|
|
* }
|
|
* ```
|
|
*/
|
|
public static async getAvailableCameraDevices(): Promise<CameraDevice[]> {
|
|
try {
|
|
return await CameraModule.getAvailableCameraDevices();
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
/**
|
|
* Gets the current Camera Permission Status. Check this before mounting the Camera to ensure
|
|
* the user has permitted the app to use the camera.
|
|
*
|
|
* To actually prompt the user for camera permission, use {@linkcode Camera.requestCameraPermission | requestCameraPermission()}.
|
|
*
|
|
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting the current permission status. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
|
|
*/
|
|
public static async getCameraPermissionStatus(): Promise<CameraPermissionStatus> {
|
|
try {
|
|
return await CameraModule.getCameraPermissionStatus();
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
/**
|
|
* Gets the current Microphone-Recording Permission Status. Check this before mounting the Camera to ensure
|
|
* the user has permitted the app to use the microphone.
|
|
*
|
|
* To actually prompt the user for microphone permission, use {@linkcode Camera.requestMicrophonePermission | requestMicrophonePermission()}.
|
|
*
|
|
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting the current permission status. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
|
|
*/
|
|
public static async getMicrophonePermissionStatus(): Promise<CameraPermissionStatus> {
|
|
try {
|
|
return await CameraModule.getMicrophonePermissionStatus();
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
/**
|
|
* Shows a "request permission" alert to the user, and resolves with the new camera permission status.
|
|
*
|
|
* If the user has previously blocked the app from using the camera, the alert will not be shown
|
|
* and `"denied"` will be returned.
|
|
*
|
|
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
|
|
*/
|
|
public static async requestCameraPermission(): Promise<CameraPermissionRequestResult> {
|
|
try {
|
|
return await CameraModule.requestCameraPermission();
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
/**
|
|
* Shows a "request permission" alert to the user, and resolves with the new microphone permission status.
|
|
*
|
|
* If the user has previously blocked the app from using the microphone, the alert will not be shown
|
|
* and `"denied"` will be returned.
|
|
*
|
|
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
|
|
*/
|
|
public static async requestMicrophonePermission(): Promise<CameraPermissionRequestResult> {
|
|
try {
|
|
return await CameraModule.requestMicrophonePermission();
|
|
} catch (e) {
|
|
throw tryParseNativeCameraError(e);
|
|
}
|
|
}
|
|
//#endregion
|
|
|
|
//#region Events (Wrapped to maintain reference equality)
|
|
private onError(event: NativeSyntheticEvent<OnErrorEvent>): void {
|
|
if (this.props.onError != null) {
|
|
const error = event.nativeEvent;
|
|
const cause = isErrorWithCause(error.cause) ? error.cause : undefined;
|
|
this.props.onError(
|
|
// @ts-expect-error We're casting from unknown bridge types to TS unions, I expect it to hopefully work
|
|
new CameraRuntimeError(error.code, error.message, cause),
|
|
);
|
|
}
|
|
}
|
|
|
|
private onInitialized(): void {
|
|
this.props.onInitialized?.();
|
|
}
|
|
//#endregion
|
|
|
|
/**
|
|
* @internal
|
|
*/
|
|
static getDerivedStateFromProps(props: CameraProps, state: CameraState): CameraState | null {
|
|
const newCameraId = props.device.id;
|
|
if (state.cameraId !== newCameraId) return { ...state, cameraId: newCameraId };
|
|
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* @internal
|
|
*/
|
|
private assertFrameProcessorsEnabled(): void {
|
|
// @ts-expect-error JSI functions aren't typed
|
|
if (global.setFrameProcessor == null || global.unsetFrameProcessor == null)
|
|
throw new Error('Frame Processors are not enabled. Make sure you install react-native-reanimated 2.1.0 or above!');
|
|
}
|
|
|
|
/**
|
|
* @internal
|
|
*/
|
|
componentWillUnmount(): void {
|
|
this.assertFrameProcessorsEnabled();
|
|
// @ts-expect-error JSI functions aren't typed
|
|
global.unsetFrameProcessor(this.handle);
|
|
}
|
|
|
|
/**
|
|
* @internal
|
|
*/
|
|
componentDidUpdate(): void {
|
|
if (this.props.frameProcessor !== this.lastFrameProcessor) {
|
|
this.assertFrameProcessorsEnabled();
|
|
// frameProcessor argument changed. Update native to reflect the change.
|
|
if (this.props.frameProcessor != null) {
|
|
// 1. Spawn threaded JSI Runtime (if not already done)
|
|
// 2. Add video data output to Camera stream (if not already done)
|
|
// 3. Workletize the frameProcessor and prepare it for being called with frames
|
|
// @ts-expect-error JSI functions aren't typed
|
|
global.setFrameProcessor(this.handle, this.props.frameProcessor);
|
|
} else {
|
|
// 1. Destroy the threaded runtime
|
|
// 2. remove the frame processor
|
|
// 3. Remove the video data output
|
|
// @ts-expect-error JSI functions aren't typed
|
|
global.unsetFrameProcessor(this.handle);
|
|
}
|
|
this.lastFrameProcessor = this.props.frameProcessor;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @internal
|
|
*/
|
|
public render(): React.ReactNode {
|
|
if (this.state.cameraId == null) throw new Error('CameraID is null! Did you pass a valid `device`?');
|
|
// We remove the big `device` object from the props because we only need to pass `cameraId` to native.
|
|
const { device: _, frameProcessor: __, ...props } = this.props;
|
|
|
|
return (
|
|
<NativeCameraView
|
|
{...props}
|
|
cameraId={this.state.cameraId}
|
|
ref={this.ref}
|
|
onInitialized={this.onInitialized}
|
|
onError={this.onError}
|
|
/>
|
|
);
|
|
}
|
|
}
|
|
//#endregion
|
|
|
|
// requireNativeComponent automatically resolves 'CameraView' to 'CameraViewManager'
|
|
const NativeCameraView = requireNativeComponent<NativeCameraViewProps>(
|
|
'CameraView',
|
|
// @ts-expect-error because the type declarations are kinda wrong, no?
|
|
Camera,
|
|
);
|