chore: Remove semicolons (#1846)

* chore: Disable `semi` in Prettier

* chore: Format w/o semi

* Remove more `;`

* Lint example

* More ;
This commit is contained in:
Marc Rousavy
2023-09-26 11:39:17 +02:00
committed by GitHub
parent f7428f26a4
commit 14721d314f
69 changed files with 998 additions and 999 deletions

View File

@@ -1,34 +1,34 @@
import React from 'react';
import { requireNativeComponent, NativeSyntheticEvent, findNodeHandle, NativeMethods } from 'react-native';
import type { CameraDevice } from './CameraDevice';
import type { ErrorWithCause } from './CameraError';
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
import type { CameraProps, FrameProcessor } from './CameraProps';
import { CameraModule } from './NativeCameraModule';
import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
import type { Point } from './Point';
import type { RecordVideoOptions, VideoFile } from './VideoFile';
import { VisionCameraProxy } from './FrameProcessorPlugins';
import { CameraDevices } from './CameraDevices';
import type { EmitterSubscription } from 'react-native';
import React from 'react'
import { requireNativeComponent, NativeSyntheticEvent, findNodeHandle, NativeMethods } from 'react-native'
import type { CameraDevice } from './CameraDevice'
import type { ErrorWithCause } from './CameraError'
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError'
import type { CameraProps, FrameProcessor } from './CameraProps'
import { CameraModule } from './NativeCameraModule'
import type { PhotoFile, TakePhotoOptions } from './PhotoFile'
import type { Point } from './Point'
import type { RecordVideoOptions, VideoFile } from './VideoFile'
import { VisionCameraProxy } from './FrameProcessorPlugins'
import { CameraDevices } from './CameraDevices'
import type { EmitterSubscription } from 'react-native'
//#region Types
export type CameraPermissionStatus = 'granted' | 'not-determined' | 'denied' | 'restricted';
export type CameraPermissionRequestResult = 'granted' | 'denied';
export type CameraPermissionStatus = 'granted' | 'not-determined' | 'denied' | 'restricted'
export type CameraPermissionRequestResult = 'granted' | 'denied'
interface OnErrorEvent {
code: string;
message: string;
cause?: ErrorWithCause;
code: string
message: string
cause?: ErrorWithCause
}
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & {
cameraId: string;
enableFrameProcessor: boolean;
onInitialized?: (event: NativeSyntheticEvent<void>) => void;
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void;
onViewReady: () => void;
};
type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
cameraId: string
enableFrameProcessor: boolean
onInitialized?: (event: NativeSyntheticEvent<void>) => void
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void
onViewReady: () => void
}
type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>
//#endregion
//#region Camera Component
@@ -62,34 +62,34 @@ type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
*/
export class Camera extends React.PureComponent<CameraProps> {
/** @internal */
static displayName = 'Camera';
static displayName = 'Camera'
/** @internal */
displayName = Camera.displayName;
private lastFrameProcessor: FrameProcessor | undefined;
private isNativeViewMounted = false;
displayName = Camera.displayName
private lastFrameProcessor: FrameProcessor | undefined
private isNativeViewMounted = false
private readonly ref: React.RefObject<RefType>;
private readonly ref: React.RefObject<RefType>
/** @internal */
constructor(props: CameraProps) {
super(props);
this.onViewReady = this.onViewReady.bind(this);
this.onInitialized = this.onInitialized.bind(this);
this.onError = this.onError.bind(this);
this.ref = React.createRef<RefType>();
this.lastFrameProcessor = undefined;
super(props)
this.onViewReady = this.onViewReady.bind(this)
this.onInitialized = this.onInitialized.bind(this)
this.onError = this.onError.bind(this)
this.ref = React.createRef<RefType>()
this.lastFrameProcessor = undefined
}
private get handle(): number {
const nodeHandle = findNodeHandle(this.ref.current);
const nodeHandle = findNodeHandle(this.ref.current)
if (nodeHandle == null || nodeHandle === -1) {
throw new CameraRuntimeError(
'system/view-not-found',
"Could not get the Camera's native view tag! Does the Camera View exist in the native view-tree?",
);
)
}
return nodeHandle;
return nodeHandle
}
//#region View-specific functions (UIViewManager)
@@ -108,9 +108,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public async takePhoto(options?: TakePhotoOptions): Promise<PhotoFile> {
try {
return await CameraModule.takePhoto(this.handle, options ?? {});
return await CameraModule.takePhoto(this.handle, options ?? {})
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
@@ -131,19 +131,19 @@ export class Camera extends React.PureComponent<CameraProps> {
* ```
*/
public startRecording(options: RecordVideoOptions): void {
const { onRecordingError, onRecordingFinished, ...passThroughOptions } = options;
const { onRecordingError, onRecordingFinished, ...passThroughOptions } = options
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!');
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!')
const onRecordCallback = (video?: VideoFile, error?: CameraCaptureError): void => {
if (error != null) return onRecordingError(error);
if (video != null) return onRecordingFinished(video);
};
if (error != null) return onRecordingError(error)
if (video != null) return onRecordingFinished(video)
}
try {
// TODO: Use TurboModules to make this awaitable.
CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback);
CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback)
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
@@ -169,9 +169,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public async pauseRecording(): Promise<void> {
try {
return await CameraModule.pauseRecording(this.handle);
return await CameraModule.pauseRecording(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
@@ -197,9 +197,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public async resumeRecording(): Promise<void> {
try {
return await CameraModule.resumeRecording(this.handle);
return await CameraModule.resumeRecording(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
@@ -218,9 +218,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public async stopRecording(): Promise<void> {
try {
return await CameraModule.stopRecording(this.handle);
return await CameraModule.stopRecording(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
@@ -244,9 +244,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public async focus(point: Point): Promise<void> {
try {
return await CameraModule.focus(this.handle, point);
return await CameraModule.focus(this.handle, point)
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
//#endregion
@@ -268,7 +268,7 @@ export class Camera extends React.PureComponent<CameraProps> {
* ```
*/
public static getAvailableCameraDevices(): CameraDevice[] {
return CameraDevices.getAvailableCameraDevices();
return CameraDevices.getAvailableCameraDevices()
}
/**
* Adds a listener that gets called everytime the Camera Devices change, for example
@@ -277,7 +277,7 @@ export class Camera extends React.PureComponent<CameraProps> {
* If you use Hooks, use the `useCameraDevices()` hook instead.
*/
public static addCameraDevicesChangedListener(listener: (newDevices: CameraDevice[]) => void): EmitterSubscription {
return CameraDevices.addCameraDevicesChangedListener(listener);
return CameraDevices.addCameraDevicesChangedListener(listener)
}
/**
* Gets the current Camera Permission Status. Check this before mounting the Camera to ensure
@@ -289,9 +289,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public static async getCameraPermissionStatus(): Promise<CameraPermissionStatus> {
try {
return await CameraModule.getCameraPermissionStatus();
return await CameraModule.getCameraPermissionStatus()
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
/**
@@ -304,9 +304,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public static async getMicrophonePermissionStatus(): Promise<CameraPermissionStatus> {
try {
return await CameraModule.getMicrophonePermissionStatus();
return await CameraModule.getMicrophonePermissionStatus()
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
/**
@@ -319,9 +319,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public static async requestCameraPermission(): Promise<CameraPermissionRequestResult> {
try {
return await CameraModule.requestCameraPermission();
return await CameraModule.requestCameraPermission()
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
/**
@@ -334,9 +334,9 @@ export class Camera extends React.PureComponent<CameraProps> {
*/
public static async requestMicrophonePermission(): Promise<CameraPermissionRequestResult> {
try {
return await CameraModule.requestMicrophonePermission();
return await CameraModule.requestMicrophonePermission()
} catch (e) {
throw tryParseNativeCameraError(e);
throw tryParseNativeCameraError(e)
}
}
//#endregion
@@ -344,48 +344,48 @@ export class Camera extends React.PureComponent<CameraProps> {
//#region Events (Wrapped to maintain reference equality)
private onError(event: NativeSyntheticEvent<OnErrorEvent>): void {
if (this.props.onError != null) {
const error = event.nativeEvent;
const cause = isErrorWithCause(error.cause) ? error.cause : undefined;
const error = event.nativeEvent
const cause = isErrorWithCause(error.cause) ? error.cause : undefined
this.props.onError(
// @ts-expect-error We're casting from unknown bridge types to TS unions, I expect it to hopefully work
new CameraRuntimeError(error.code, error.message, cause),
);
)
}
}
private onInitialized(): void {
this.props.onInitialized?.();
this.props.onInitialized?.()
}
//#endregion
//#region Lifecycle
private setFrameProcessor(frameProcessor: FrameProcessor): void {
VisionCameraProxy.setFrameProcessor(this.handle, frameProcessor);
VisionCameraProxy.setFrameProcessor(this.handle, frameProcessor)
}
private unsetFrameProcessor(): void {
VisionCameraProxy.removeFrameProcessor(this.handle);
VisionCameraProxy.removeFrameProcessor(this.handle)
}
private onViewReady(): void {
this.isNativeViewMounted = true;
this.isNativeViewMounted = true
if (this.props.frameProcessor != null) {
// user passed a `frameProcessor` but we didn't set it yet because the native view was not mounted yet. set it now.
this.setFrameProcessor(this.props.frameProcessor);
this.lastFrameProcessor = this.props.frameProcessor;
this.setFrameProcessor(this.props.frameProcessor)
this.lastFrameProcessor = this.props.frameProcessor
}
}
/** @internal */
componentDidUpdate(): void {
if (!this.isNativeViewMounted) return;
const frameProcessor = this.props.frameProcessor;
if (!this.isNativeViewMounted) return
const frameProcessor = this.props.frameProcessor
if (frameProcessor !== this.lastFrameProcessor) {
// frameProcessor argument identity changed. Update native to reflect the change.
if (frameProcessor != null) this.setFrameProcessor(frameProcessor);
else this.unsetFrameProcessor();
if (frameProcessor != null) this.setFrameProcessor(frameProcessor)
else this.unsetFrameProcessor()
this.lastFrameProcessor = frameProcessor;
this.lastFrameProcessor = frameProcessor
}
}
//#endregion
@@ -393,16 +393,16 @@ export class Camera extends React.PureComponent<CameraProps> {
/** @internal */
public render(): React.ReactNode {
// We remove the big `device` object from the props because we only need to pass `cameraId` to native.
const { device, frameProcessor, ...props } = this.props;
const { device, frameProcessor, ...props } = this.props
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (device == null) {
throw new Error(
'Camera: `device` is null! Select a valid Camera device. See: https://mrousavy.com/react-native-vision-camera/docs/guides/devices',
);
)
}
const shouldEnableBufferCompression = props.video === true && frameProcessor == null;
const shouldEnableBufferCompression = props.video === true && frameProcessor == null
return (
<NativeCameraView
@@ -415,7 +415,7 @@ export class Camera extends React.PureComponent<CameraProps> {
enableFrameProcessor={frameProcessor != null}
enableBufferCompression={props.enableBufferCompression ?? shouldEnableBufferCompression}
/>
);
)
}
}
//#endregion
@@ -425,4 +425,4 @@ const NativeCameraView = requireNativeComponent<NativeCameraViewProps>(
'CameraView',
// @ts-expect-error because the type declarations are kinda wrong, no?
Camera,
);
)

View File

@@ -1,5 +1,5 @@
import { Orientation } from './Orientation';
import type { PixelFormat } from './PixelFormat';
import { Orientation } from './Orientation'
import type { PixelFormat } from './PixelFormat'
/**
* Represents the camera device position.
@@ -8,7 +8,7 @@ import type { PixelFormat } from './PixelFormat';
* * `"front"`: Indicates that the device is physically located on the front of the phone
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the phone. (e.g. USB or Continuity Cameras)
*/
export type CameraPosition = 'front' | 'back' | 'external';
export type CameraPosition = 'front' | 'back' | 'external'
/**
* Indentifiers for a physical camera (one that actually exists on the back/front of the device)
@@ -23,7 +23,7 @@ export type CameraPosition = 'front' | 'back' | 'external';
* * `"wide-angle-camera"` + `"telephoto-camera"` = **dual camera**.
* * `"ultra-wide-angle-camera"` + `"wide-angle-camera"` + `"telephoto-camera"` = **triple camera**.
*/
export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-camera' | 'telephoto-camera';
export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-camera' | 'telephoto-camera'
/**
* Indicates a format's autofocus system.
@@ -32,7 +32,7 @@ export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-c
* * `"contrast-detection"`: Indicates that autofocus is achieved by contrast detection. Contrast detection performs a focus scan to find the optimal position
* * `"phase-detection"`: Indicates that autofocus is achieved by phase detection. Phase detection has the ability to achieve focus in many cases without a focus scan. Phase detection autofocus is typically less visually intrusive than contrast detection autofocus
*/
export type AutoFocusSystem = 'contrast-detection' | 'phase-detection' | 'none';
export type AutoFocusSystem = 'contrast-detection' | 'phase-detection' | 'none'
/**
* Indicates a format's supported video stabilization mode. Enabling video stabilization may introduce additional latency into the video capture pipeline.
@@ -43,7 +43,7 @@ export type AutoFocusSystem = 'contrast-detection' | 'phase-detection' | 'none';
* * `"cinematic-extended"`: Extended software- and hardware-based stabilization that aggressively crops and transforms the video to apply a smooth cinematic stabilization.
* * `"auto"`: Indicates that the most appropriate video stabilization mode for the device and format should be chosen automatically
*/
export type VideoStabilizationMode = 'off' | 'standard' | 'cinematic' | 'cinematic-extended' | 'auto';
export type VideoStabilizationMode = 'off' | 'standard' | 'cinematic' | 'cinematic-extended' | 'auto'
/**
* A Camera Device's stream-configuration format.
@@ -59,68 +59,68 @@ export interface CameraDeviceFormat {
/**
* The height of the highest resolution a still image (photo) can be produced in
*/
photoHeight: number;
photoHeight: number
/**
* The width of the highest resolution a still image (photo) can be produced in
*/
photoWidth: number;
photoWidth: number
/**
* The video resolutions's height
*/
videoHeight: number;
videoHeight: number
/**
* The video resolution's width
*/
videoWidth: number;
videoWidth: number
/**
* Maximum supported ISO value
*/
maxISO: number;
maxISO: number
/**
* Minimum supported ISO value
*/
minISO: number;
minISO: number
/**
* The video field of view in degrees
*/
fieldOfView: number;
fieldOfView: number
/**
* The maximum zoom factor (e.g. `128`)
*/
maxZoom: number;
maxZoom: number
/**
* Specifies whether this format supports HDR mode for video capture
*/
supportsVideoHDR: boolean;
supportsVideoHDR: boolean
/**
* Specifies whether this format supports HDR mode for photo capture
*/
supportsPhotoHDR: boolean;
supportsPhotoHDR: boolean
/**
* Specifies whether this format supports delivering depth data for photo or video capture.
*/
supportsDepthCapture: boolean;
supportsDepthCapture: boolean
/**
* The minum frame rate this Format needs to run at. High resolution formats often run at lower frame rates.
*/
minFps: number;
minFps: number
/**
* The maximum frame rate this Format is able to run at. High resolution formats often run at lower frame rates.
*/
maxFps: number;
maxFps: number
/**
* Specifies this format's auto focus system.
*/
autoFocusSystem: AutoFocusSystem;
autoFocusSystem: AutoFocusSystem
/**
* All supported video stabilization modes
*/
videoStabilizationModes: VideoStabilizationMode[];
videoStabilizationModes: VideoStabilizationMode[]
/**
* Specifies this format's supported pixel-formats.
* In most cases, this is `['native', 'yuv']`.
*/
pixelFormats: PixelFormat[];
pixelFormats: PixelFormat[]
}
/**
@@ -130,7 +130,7 @@ export interface CameraDevice {
/**
* The native ID of the camera device instance.
*/
id: string;
id: string
/**
* The physical devices this `CameraDevice` consists of.
*
@@ -139,7 +139,7 @@ export interface CameraDevice {
*
* You can check if the camera is a logical multi-camera by using the `isMultiCam` property.
*/
physicalDevices: PhysicalCameraDeviceType[];
physicalDevices: PhysicalCameraDeviceType[]
/**
* Specifies the physical position of this camera.
* - `back`: The Camera Device is located on the back of the phone. These devices can be used for capturing what's in front of the user.
@@ -149,19 +149,19 @@ export interface CameraDevice {
* - [Continuity Camera Devices](https://support.apple.com/en-us/HT213244) (e.g. your iPhone's or Mac's Camera connected through WiFi/Continuity)
* - Bluetooth/WiFi Camera Devices (if they are supported in the platform-native Camera APIs; Camera2 and AVFoundation)
*/
position: CameraPosition;
position: CameraPosition
/**
* A friendly localized name describing the camera.
*/
name: string;
name: string
/**
* Specifies whether this camera supports enabling flash for photo capture.
*/
hasFlash: boolean;
hasFlash: boolean
/**
* Specifies whether this camera supports continuously enabling the flash to act like a torch (flash with video capture)
*/
hasTorch: boolean;
hasTorch: boolean
/**
* A property indicating whether the device is a virtual multi-camera consisting of multiple combined physical cameras.
*
@@ -169,15 +169,15 @@ export interface CameraDevice {
* * The Dual Camera, which supports seamlessly switching between a wide and telephoto camera while zooming and generating depth data from the disparities between the different points of view of the physical cameras.
* * The TrueDepth Camera, which generates depth data from disparities between a YUV camera and an Infrared camera pointed in the same direction.
*/
isMultiCam: boolean;
isMultiCam: boolean
/**
* Minimum available zoom factor (e.g. `1`)
*/
minZoom: number;
minZoom: number
/**
* Maximum available zoom factor (e.g. `128`)
*/
maxZoom: number;
maxZoom: number
/**
* The zoom factor where the camera is "neutral".
*
@@ -193,36 +193,36 @@ export interface CameraDevice {
* zoom: zoom.value
* }))
*/
neutralZoom: number;
neutralZoom: number
/**
* All available formats for this camera device. Use this to find the best format for your use case and set it to the Camera's {@linkcode CameraProps.format | Camera's .format} property.
*
* See [the Camera Formats documentation](https://react-native-vision-camera.com/docs/guides/formats) for more information about Camera Formats.
*/
formats: CameraDeviceFormat[];
formats: CameraDeviceFormat[]
/**
* Whether this camera device supports low light boost.
*/
supportsLowLightBoost: boolean;
supportsLowLightBoost: boolean
/**
* Whether this camera supports taking photos in RAW format
*
* **! Work in Progress !**
*/
supportsRawCapture: boolean;
supportsRawCapture: boolean
/**
* Specifies whether this device supports focusing ({@linkcode Camera.focus | Camera.focus(...)})
*/
supportsFocus: boolean;
supportsFocus: boolean
/**
* The hardware level of the Camera.
* - On Android, some older devices are running at a `legacy` or `limited` level which means they are running in a backwards compatible mode.
* - On iOS, all devices are `full`.
*/
hardwareLevel: 'legacy' | 'limited' | 'full';
hardwareLevel: 'legacy' | 'limited' | 'full'
/**
* Represents the sensor's orientation relative to the phone.
* For most phones this will be landscape, as Camera sensors are usually always rotated by 90 degrees (i.e. width and height are flipped).
*/
sensorOrientation: Orientation;
sensorOrientation: Orientation
}

View File

@@ -1,27 +1,27 @@
import { NativeModules, NativeEventEmitter } from 'react-native';
import { CameraDevice } from './CameraDevice';
import { NativeModules, NativeEventEmitter } from 'react-native'
import { CameraDevice } from './CameraDevice'
const CameraDevicesManager = NativeModules.CameraDevices as {
getConstants: () => {
availableCameraDevices: CameraDevice[];
userPreferredCameraDevice: CameraDevice | undefined;
};
};
availableCameraDevices: CameraDevice[]
userPreferredCameraDevice: CameraDevice | undefined
}
}
const constants = CameraDevicesManager.getConstants();
let devices = constants.availableCameraDevices;
const constants = CameraDevicesManager.getConstants()
let devices = constants.availableCameraDevices
const DEVICES_CHANGED_NAME = 'CameraDevicesChanged';
const DEVICES_CHANGED_NAME = 'CameraDevicesChanged'
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const eventEmitter = new NativeEventEmitter(CameraDevicesManager as any);
const eventEmitter = new NativeEventEmitter(CameraDevicesManager as any)
eventEmitter.addListener(DEVICES_CHANGED_NAME, (newDevices: CameraDevice[]) => {
devices = newDevices;
});
devices = newDevices
})
export const CameraDevices = {
userPreferredCameraDevice: constants.userPreferredCameraDevice,
getAvailableCameraDevices: () => devices,
addCameraDevicesChangedListener: (callback: (newDevices: CameraDevice[]) => void) => {
return eventEmitter.addListener(DEVICES_CHANGED_NAME, callback);
return eventEmitter.addListener(DEVICES_CHANGED_NAME, callback)
},
};
}

View File

@@ -1,10 +1,10 @@
export type PermissionError = 'permission/microphone-permission-denied' | 'permission/camera-permission-denied';
export type PermissionError = 'permission/microphone-permission-denied' | 'permission/camera-permission-denied'
export type ParameterError =
| 'parameter/invalid-parameter'
| 'parameter/unsupported-os'
| 'parameter/unsupported-output'
| 'parameter/unsupported-input'
| 'parameter/invalid-combination';
| 'parameter/invalid-combination'
export type DeviceError =
| 'device/configuration-error'
| 'device/no-device'
@@ -14,21 +14,21 @@ export type DeviceError =
| 'device/pixel-format-not-supported'
| 'device/low-light-boost-not-supported'
| 'device/focus-not-supported'
| 'device/camera-not-available-on-simulator';
| 'device/camera-not-available-on-simulator'
export type FormatError =
| 'format/invalid-fps'
| 'format/invalid-hdr'
| 'format/incompatible-pixel-format-with-hdr-setting'
| 'format/invalid-low-light-boost'
| 'format/invalid-format'
| 'format/invalid-color-space';
| 'format/invalid-color-space'
export type SessionError =
| 'session/camera-not-ready'
| 'session/camera-cannot-be-opened'
| 'session/camera-has-been-disconnected'
| 'session/audio-session-setup-failed'
| 'session/audio-in-use-by-other-app'
| 'session/audio-session-failed-to-activate';
| 'session/audio-session-failed-to-activate'
export type CaptureError =
| 'capture/invalid-photo-format'
| 'capture/encoder-error'
@@ -50,13 +50,13 @@ export type CaptureError =
| 'capture/video-not-enabled'
| 'capture/photo-not-enabled'
| 'capture/aborted'
| 'capture/unknown';
| 'capture/unknown'
export type SystemError =
| 'system/camera-module-not-found'
| 'system/no-camera-manager'
| 'system/frame-processors-unavailable'
| 'system/view-not-found';
export type UnknownError = 'unknown/unknown';
| 'system/view-not-found'
export type UnknownError = 'unknown/unknown'
/**
* Represents a JSON-style error cause. This contains native `NSError`/`Throwable` information, and can have recursive {@linkcode ErrorWithCause.cause | .cause} properties until the ultimate cause has been found.
@@ -68,42 +68,42 @@ export interface ErrorWithCause {
* * iOS: `NSError.code`
* * Android: N/A
*/
code?: number;
code?: number
/**
* The native error's domain.
*
* * iOS: `NSError.domain`
* * Android: N/A
*/
domain?: string;
domain?: string
/**
* The native error description
*
* * iOS: `NSError.message`
* * Android: `Throwable.message`
*/
message: string;
message: string
/**
* Optional additional details
*
* * iOS: `NSError.userInfo`
* * Android: N/A
*/
details?: Record<string, unknown>;
details?: Record<string, unknown>
/**
* Optional Java stacktrace
*
* * iOS: N/A
* * Android: `Throwable.stacktrace.toString()`
*/
stacktrace?: string;
stacktrace?: string
/**
* Optional additional cause for nested errors
*
* * iOS: N/A
* * Android: `Throwable.cause`
*/
cause?: ErrorWithCause;
cause?: ErrorWithCause
}
type CameraErrorCode =
@@ -114,42 +114,42 @@ type CameraErrorCode =
| SessionError
| CaptureError
| SystemError
| UnknownError;
| UnknownError
/**
* Represents any kind of error that occured in the {@linkcode Camera} View Module.
*/
class CameraError<TCode extends CameraErrorCode> extends Error {
private readonly _code: TCode;
private readonly _message: string;
private readonly _cause?: ErrorWithCause;
private readonly _code: TCode
private readonly _message: string
private readonly _cause?: ErrorWithCause
public get code(): TCode {
return this._code;
return this._code
}
public get message(): string {
return this._message;
return this._message
}
public get cause(): Error | undefined {
const c = this._cause;
if (c == null) return undefined;
return new Error(`[${c.code}]: ${c.message}`);
const c = this._cause
if (c == null) return undefined
return new Error(`[${c.code}]: ${c.message}`)
}
/**
* @internal
*/
constructor(code: TCode, message: string, cause?: ErrorWithCause) {
super(`[${code}]: ${message}${cause != null ? ` (Cause: ${cause.message})` : ''}`);
super.name = code;
super.message = message;
this._code = code;
this._message = message;
this._cause = cause;
super(`[${code}]: ${message}${cause != null ? ` (Cause: ${cause.message})` : ''}`)
super.name = code
super.message = message
this._code = code
this._message = message
this._cause = cause
}
public toString(): string {
return `[${this.code}]: ${this.message}`;
return `[${this.code}]: ${this.message}`
}
}
@@ -182,7 +182,7 @@ export const isErrorWithCause = (error: unknown): error is ErrorWithCause =>
// @ts-expect-error error is still unknown
(typeof error.stacktrace === 'string' || error.stacktrace == null) &&
// @ts-expect-error error is still unknown
(isErrorWithCause(error.cause) || error.cause == null);
(isErrorWithCause(error.cause) || error.cause == null)
const isCameraErrorJson = (error: unknown): error is { code: string; message: string; cause?: ErrorWithCause } =>
typeof error === 'object' &&
@@ -192,7 +192,7 @@ const isCameraErrorJson = (error: unknown): error is { code: string; message: st
// @ts-expect-error error is still unknown
typeof error.message === 'string' &&
// @ts-expect-error error is still unknown
(typeof error.cause === 'object' || error.cause == null);
(typeof error.cause === 'object' || error.cause == null)
/**
* Tries to parse an error coming from native to a typed JS camera error.
@@ -203,16 +203,16 @@ const isCameraErrorJson = (error: unknown): error is { code: string; message: st
export const tryParseNativeCameraError = <T>(nativeError: T): (CameraRuntimeError | CameraCaptureError) | T => {
if (isCameraErrorJson(nativeError)) {
if (nativeError.code.startsWith('capture')) {
return new CameraCaptureError(nativeError.code as CaptureError, nativeError.message, nativeError.cause);
return new CameraCaptureError(nativeError.code as CaptureError, nativeError.message, nativeError.cause)
} else {
return new CameraRuntimeError(
// @ts-expect-error the code is string, we narrow it down to TS union.
nativeError.code,
nativeError.message,
nativeError.cause,
);
)
}
} else {
return nativeError;
return nativeError
}
};
}

View File

@@ -1,13 +1,13 @@
import type { ViewProps } from 'react-native';
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from './CameraDevice';
import type { CameraRuntimeError } from './CameraError';
import type { Frame } from './Frame';
import type { Orientation } from './Orientation';
import type { ViewProps } from 'react-native'
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from './CameraDevice'
import type { CameraRuntimeError } from './CameraError'
import type { Frame } from './Frame'
import type { Orientation } from './Orientation'
export type FrameProcessor = {
frameProcessor: (frame: Frame) => void;
type: 'frame-processor';
};
frameProcessor: (frame: Frame) => void
type: 'frame-processor'
}
// TODO: Replace `enableHighQualityPhotos: boolean` in favor of `priorization: 'photo' | 'video'`
// TODO: Use RCT_ENUM_PARSER for stuff like torch, videoStabilizationMode, and orientation
@@ -34,7 +34,7 @@ export interface CameraProps extends ViewProps {
* )
* ```
*/
device: CameraDevice;
device: CameraDevice
/**
* Whether the Camera should actively stream video frames, or not. See the [documentation about the `isActive` prop](https://react-native-vision-camera.com/docs/guides/lifecycle#the-isactive-prop) for more information.
*
@@ -42,23 +42,23 @@ export interface CameraProps extends ViewProps {
*
* > Note: If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again. In return, it will use less resources since the Camera will be completely destroyed when unmounted.
*/
isActive: boolean;
isActive: boolean
//#region Use-cases
/**
* Enables **photo capture** with the `takePhoto` function (see ["Taking Photos"](https://react-native-vision-camera.com/docs/guides/capturing#taking-photos))
*/
photo?: boolean;
photo?: boolean
/**
* Enables **video capture** with the `startRecording` function (see ["Recording Videos"](https://react-native-vision-camera.com/docs/guides/capturing/#recording-videos))
*
* Note: If both the `photo` and `video` properties are enabled at the same time and the device is running at a `hardwareLevel` of `'legacy'` or `'limited'`, VisionCamera _might_ use a lower resolution for video capture due to hardware constraints.
*/
video?: boolean;
video?: boolean
/**
* Enables **audio capture** for video recordings (see ["Recording Videos"](https://react-native-vision-camera.com/docs/guides/capturing/#recording-videos))
*/
audio?: boolean;
audio?: boolean
/**
* Specifies the pixel format for the video pipeline.
*
@@ -72,7 +72,7 @@ export interface CameraProps extends ViewProps {
*
* @default `native`
*/
pixelFormat?: 'native' | 'yuv' | 'rgb';
pixelFormat?: 'native' | 'yuv' | 'rgb'
//#endregion
//#region Common Props (torch, zoom)
@@ -83,7 +83,7 @@ export interface CameraProps extends ViewProps {
*
* @default "off"
*/
torch?: 'off' | 'on';
torch?: 'off' | 'on'
/**
* Specifies the zoom factor of the current camera, in "factor"/scale.
*
@@ -95,7 +95,7 @@ export interface CameraProps extends ViewProps {
*
* @default 1.0
*/
zoom?: number;
zoom?: number
/**
* Enables or disables the native pinch to zoom gesture.
*
@@ -103,14 +103,14 @@ export interface CameraProps extends ViewProps {
*
* @default false
*/
enableZoomGesture?: boolean;
enableZoomGesture?: boolean
//#endregion
//#region Format/Preset selection
/**
* Selects a given format. By default, the best matching format is chosen.
*/
format?: CameraDeviceFormat;
format?: CameraDeviceFormat
/**
* Specifies the Preview's resize mode.
* * `"cover"`: Keep aspect ratio and fill entire parent view (centered).
@@ -118,19 +118,19 @@ export interface CameraProps extends ViewProps {
*
* @default "cover"
*/
resizeMode?: 'cover' | 'contain';
resizeMode?: 'cover' | 'contain'
/**
* Specify the frames per second this camera should use. Make sure the given `format` includes a frame rate range with the given `fps`.
*
* Requires `format` to be set that supports the given `fps`.
*/
fps?: number;
fps?: number
/**
* Enables or disables HDR on this camera device. Make sure the given `format` supports HDR mode.
*
* Requires `format` to be set that supports `photoHDR`/`videoHDR`.
*/
hdr?: boolean;
hdr?: boolean
/**
* Enables or disables lossless buffer compression for the video stream.
* If you only use {@linkcode video} or a {@linkcode frameProcessor}, this
@@ -151,19 +151,19 @@ export interface CameraProps extends ViewProps {
* - true // if video={true} and frameProcessor={undefined}
* - false // otherwise
*/
enableBufferCompression?: boolean;
enableBufferCompression?: boolean
/**
* Enables or disables low-light boost on this camera device. Make sure the given `format` supports low-light boost.
*
* Requires a `format` to be set that supports `lowLightBoost`.
*/
lowLightBoost?: boolean;
lowLightBoost?: boolean
/**
* Specifies the video stabilization mode to use.
*
* Requires a `format` to be set that contains the given `videoStabilizationMode`.
*/
videoStabilizationMode?: VideoStabilizationMode;
videoStabilizationMode?: VideoStabilizationMode
//#endregion
/**
@@ -171,7 +171,7 @@ export interface CameraProps extends ViewProps {
*
* @default false
*/
enableDepthData?: boolean;
enableDepthData?: boolean
/**
* A boolean specifying whether the photo render pipeline is prepared for portrait effects matte delivery.
*
@@ -180,7 +180,7 @@ export interface CameraProps extends ViewProps {
* @platform iOS 12.0+
* @default false
*/
enablePortraitEffectsMatteDelivery?: boolean;
enablePortraitEffectsMatteDelivery?: boolean
/**
* Indicates whether the Camera should prepare the photo pipeline to provide maximum quality photos.
*
@@ -192,28 +192,28 @@ export interface CameraProps extends ViewProps {
*
* @default false
*/
enableHighQualityPhotos?: boolean;
enableHighQualityPhotos?: boolean
/**
* If `true`, show a debug view to display the FPS of the Camera session.
* This is useful for debugging your Frame Processor's speed.
*
* @default false
*/
enableFpsGraph?: boolean;
enableFpsGraph?: boolean
/**
* Represents the orientation of all Camera Outputs (Photo, Video, and Frame Processor). If this value is not set, the device orientation is used.
*/
orientation?: Orientation;
orientation?: Orientation
//#region Events
/**
* Called when any kind of runtime error occured.
*/
onError?: (error: CameraRuntimeError) => void;
onError?: (error: CameraRuntimeError) => void
/**
* Called when the camera was successfully initialized.
*/
onInitialized?: () => void;
onInitialized?: () => void
/**
* A worklet which will be called for every frame the Camera "sees".
*
@@ -230,6 +230,6 @@ export interface CameraProps extends ViewProps {
* return <Camera {...cameraProps} frameProcessor={frameProcessor} />
* ```
*/
frameProcessor?: FrameProcessor;
frameProcessor?: FrameProcessor
//#endregion
}

View File

@@ -1,5 +1,5 @@
import type { Orientation } from './Orientation';
import { PixelFormat } from './PixelFormat';
import type { Orientation } from './Orientation'
import { PixelFormat } from './PixelFormat'
/**
* A single frame, as seen by the camera. This is backed by a C++ HostObject wrapping the native GPU buffer.
@@ -18,42 +18,42 @@ export interface Frame {
* Whether the underlying buffer is still valid or not.
* A Frame is valid as long as your Frame Processor (or a `runAsync(..)` operation) is still running
*/
isValid: boolean;
isValid: boolean
/**
* Returns the width of the frame, in pixels.
*/
width: number;
width: number
/**
* Returns the height of the frame, in pixels.
*/
height: number;
height: number
/**
* Returns the amount of bytes per row.
*/
bytesPerRow: number;
bytesPerRow: number
/**
* Returns the number of planes this frame contains.
*/
planesCount: number;
planesCount: number
/**
* Returns whether the Frame is mirrored (selfie camera) or not.
*/
isMirrored: boolean;
isMirrored: boolean
/**
* Returns the timestamp of the Frame relative to the host sytem's clock.
*/
timestamp: number;
timestamp: number
/**
* Represents the orientation of the Frame.
*
* Some ML Models are trained for specific orientations, so they need to be taken into
* consideration when running a frame processor. See also: {@linkcode isMirrored}
*/
orientation: Orientation;
orientation: Orientation
/**
* Represents the pixel-format of the Frame.
*/
pixelFormat: PixelFormat;
pixelFormat: PixelFormat
/**
* Get the underlying data of the Frame as a uint8 array buffer.
@@ -73,7 +73,7 @@ export interface Frame {
* }, [])
* ```
*/
toArrayBuffer(): Uint8Array;
toArrayBuffer(): Uint8Array
/**
* Returns a string representation of the frame.
* @example
@@ -81,7 +81,7 @@ export interface Frame {
* console.log(frame.toString()) // -> "3840 x 2160 Frame"
* ```
*/
toString(): string;
toString(): string
}
/** @internal */
@@ -92,12 +92,12 @@ export interface FrameInternal extends Frame {
* This is a private API, do not use this.
* @internal
*/
incrementRefCount(): void;
incrementRefCount(): void
/**
* Increment the Frame Buffer ref-count by one.
*
* This is a private API, do not use this.
* @internal
*/
decrementRefCount(): void;
decrementRefCount(): void
}

View File

@@ -1,14 +1,14 @@
import type { Frame, FrameInternal } from './Frame';
import type { FrameProcessor } from './CameraProps';
import { CameraRuntimeError } from './CameraError';
import type { Frame, FrameInternal } from './Frame'
import type { FrameProcessor } from './CameraProps'
import { CameraRuntimeError } from './CameraError'
// only import typescript types
import type TWorklets from 'react-native-worklets-core';
import { CameraModule } from './NativeCameraModule';
import { assertJSIAvailable } from './JSIHelper';
import type TWorklets from 'react-native-worklets-core'
import { CameraModule } from './NativeCameraModule'
import { assertJSIAvailable } from './JSIHelper'
type BasicParameterType = string | number | boolean | undefined;
type ParameterType = BasicParameterType | BasicParameterType[] | Record<string, BasicParameterType | undefined>;
type BasicParameterType = string | number | boolean | undefined
type ParameterType = BasicParameterType | BasicParameterType[] | Record<string, BasicParameterType | undefined>
interface FrameProcessorPlugin {
/**
@@ -17,96 +17,97 @@ interface FrameProcessorPlugin {
* @param options (optional) Additional options. Options will be converted to a native dictionary
* @returns (optional) A value returned from the native Frame Processor Plugin (or undefined)
*/
call: (frame: Frame, options?: Record<string, ParameterType>) => ParameterType;
call: (frame: Frame, options?: Record<string, ParameterType>) => ParameterType
}
interface TVisionCameraProxy {
setFrameProcessor: (viewTag: number, frameProcessor: FrameProcessor) => void;
removeFrameProcessor: (viewTag: number) => void;
setFrameProcessor: (viewTag: number, frameProcessor: FrameProcessor) => void
removeFrameProcessor: (viewTag: number) => void
/**
* Creates a new instance of a Frame Processor Plugin.
* The Plugin has to be registered on the native side, otherwise this returns `undefined`
*/
getFrameProcessorPlugin: (name: string) => FrameProcessorPlugin | undefined;
getFrameProcessorPlugin: (name: string) => FrameProcessorPlugin | undefined
}
let hasWorklets = false;
let isAsyncContextBusy = { value: false };
let hasWorklets = false
let isAsyncContextBusy = { value: false }
let runOnAsyncContext = (_frame: Frame, _func: () => void): void => {
throw new CameraRuntimeError(
'system/frame-processors-unavailable',
'Frame Processors are not available, react-native-worklets-core is not installed!',
);
};
)
}
try {
assertJSIAvailable();
assertJSIAvailable()
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { Worklets } = require('react-native-worklets-core') as typeof TWorklets;
const { Worklets } = require('react-native-worklets-core') as typeof TWorklets
isAsyncContextBusy = Worklets.createSharedValue(false);
const asyncContext = Worklets.createContext('VisionCamera.async');
isAsyncContextBusy = Worklets.createSharedValue(false)
const asyncContext = Worklets.createContext('VisionCamera.async')
runOnAsyncContext = Worklets.createRunInContextFn((frame: Frame, func: () => void) => {
'worklet';
'worklet'
try {
// Call long-running function
func();
func()
} finally {
// Potentially delete Frame if we were the last ref
(frame as FrameInternal).decrementRefCount();
const internal = frame as FrameInternal
internal.decrementRefCount()
isAsyncContextBusy.value = false;
isAsyncContextBusy.value = false
}
}, asyncContext);
hasWorklets = true;
}, asyncContext)
hasWorklets = true
} catch (e) {
// Worklets are not installed, so Frame Processors are disabled.
}
let proxy: TVisionCameraProxy = {
getFrameProcessorPlugin: () => {
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!');
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!')
},
removeFrameProcessor: () => {
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!');
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!')
},
setFrameProcessor: () => {
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!');
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!')
},
};
}
if (hasWorklets) {
// Install native Frame Processor Runtime Manager
const result = CameraModule.installFrameProcessorBindings() as unknown;
const result = CameraModule.installFrameProcessorBindings() as unknown
if (result !== true)
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Failed to install Frame Processor JSI bindings!');
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Failed to install Frame Processor JSI bindings!')
// @ts-expect-error global is untyped, it's a C++ host-object
proxy = global.VisionCameraProxy as TVisionCameraProxy;
proxy = global.VisionCameraProxy as TVisionCameraProxy
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (proxy == null) {
throw new CameraRuntimeError(
'system/frame-processors-unavailable',
'Failed to install VisionCameraProxy. Are Frame Processors properly enabled?',
);
)
}
}
export const VisionCameraProxy = proxy;
export const VisionCameraProxy = proxy
declare global {
// eslint-disable-next-line no-var
var __frameProcessorRunAtTargetFpsMap: Record<string, number | undefined> | undefined;
var __frameProcessorRunAtTargetFpsMap: Record<string, number | undefined> | undefined
}
function getLastFrameProcessorCall(frameProcessorFuncId: string): number {
'worklet';
return global.__frameProcessorRunAtTargetFpsMap?.[frameProcessorFuncId] ?? 0;
'worklet'
return global.__frameProcessorRunAtTargetFpsMap?.[frameProcessorFuncId] ?? 0
}
function setLastFrameProcessorCall(frameProcessorFuncId: string, value: number): void {
'worklet';
if (global.__frameProcessorRunAtTargetFpsMap == null) global.__frameProcessorRunAtTargetFpsMap = {};
global.__frameProcessorRunAtTargetFpsMap[frameProcessorFuncId] = value;
'worklet'
if (global.__frameProcessorRunAtTargetFpsMap == null) global.__frameProcessorRunAtTargetFpsMap = {}
global.__frameProcessorRunAtTargetFpsMap[frameProcessorFuncId] = value
}
/**
@@ -134,20 +135,20 @@ function setLastFrameProcessorCall(frameProcessorFuncId: string, value: number):
* ```
*/
export function runAtTargetFps<T>(fps: number, func: () => T): T | undefined {
'worklet';
'worklet'
// @ts-expect-error
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const funcId = func.__workletHash ?? '1';
const funcId = func.__workletHash ?? '1'
const targetIntervalMs = 1000 / fps; // <-- 60 FPS => 16,6667ms interval
const now = performance.now();
const diffToLastCall = now - getLastFrameProcessorCall(funcId);
const targetIntervalMs = 1000 / fps // <-- 60 FPS => 16,6667ms interval
const now = performance.now()
const diffToLastCall = now - getLastFrameProcessorCall(funcId)
if (diffToLastCall >= targetIntervalMs) {
setLastFrameProcessorCall(funcId, now);
setLastFrameProcessorCall(funcId, now)
// Last Frame Processor call is already so long ago that we want to make a new call
return func();
return func()
}
return undefined;
return undefined
}
/**
@@ -175,19 +176,20 @@ export function runAtTargetFps<T>(fps: number, func: () => T): T | undefined {
* ```
*/
export function runAsync(frame: Frame, func: () => void): void {
'worklet';
'worklet'
if (isAsyncContextBusy.value) {
// async context is currently busy, we cannot schedule new work in time.
// drop this frame/runAsync call.
return;
return
}
// Increment ref count by one
(frame as FrameInternal).incrementRefCount();
const internal = frame as FrameInternal
internal.incrementRefCount()
isAsyncContextBusy.value = true;
isAsyncContextBusy.value = true
// Call in separate background context
runOnAsyncContext(frame, func);
runOnAsyncContext(frame, func)
}

View File

@@ -1,4 +1,4 @@
import { CameraRuntimeError } from './CameraError';
import { CameraRuntimeError } from './CameraError'
export function assertJSIAvailable(): void {
// Check if we are running on-device (JSI)
@@ -7,6 +7,6 @@ export function assertJSIAvailable(): void {
throw new CameraRuntimeError(
'system/frame-processors-unavailable',
'Failed to initialize VisionCamera Frame Processors: React Native is not running on-device. Frame Processors can only be used when synchronous method invocations (JSI) are possible. If you are using a remote debugger (e.g. Chrome), switch to an on-device debugger (e.g. Flipper) instead.',
);
)
}
}

View File

@@ -1,42 +1,42 @@
import { NativeModules, Platform } from 'react-native';
import { CameraRuntimeError } from './CameraError';
import { NativeModules, Platform } from 'react-native'
import { CameraRuntimeError } from './CameraError'
const supportedPlatforms = ['ios', 'android', 'macos'];
const supportedPlatforms = ['ios', 'android', 'macos']
// NativeModules automatically resolves 'CameraView' to 'CameraViewModule'
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
export const CameraModule = NativeModules.CameraView;
export const CameraModule = NativeModules.CameraView
if (CameraModule == null) {
if (!supportedPlatforms.includes(Platform.OS)) {
throw new CameraRuntimeError(
'system/camera-module-not-found',
`Failed to initialize VisionCamera: VisionCamera currently does not work on ${Platform.OS}.`,
);
)
}
let message = 'Failed to initialize VisionCamera: The native Camera Module (`NativeModules.CameraView`) could not be found.';
message += '\n* Make sure react-native-vision-camera is correctly autolinked (run `npx react-native config` to verify)';
if (Platform.OS === 'ios' || Platform.OS === 'macos') message += '\n* Make sure you ran `pod install` in the ios/ directory.';
let message = 'Failed to initialize VisionCamera: The native Camera Module (`NativeModules.CameraView`) could not be found.'
message += '\n* Make sure react-native-vision-camera is correctly autolinked (run `npx react-native config` to verify)'
if (Platform.OS === 'ios' || Platform.OS === 'macos') message += '\n* Make sure you ran `pod install` in the ios/ directory.'
if (Platform.OS === 'android') message += '\n* Make sure gradle is synced.';
if (Platform.OS === 'android') message += '\n* Make sure gradle is synced.'
// check if Expo
// @ts-expect-error expo global JSI modules are not typed
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const ExpoConstants = global.expo?.modules?.ExponentConstants;
const ExpoConstants = global.expo?.modules?.ExponentConstants
if (ExpoConstants != null) {
if (ExpoConstants.appOwnership === 'expo') {
// We're running Expo Go
throw new CameraRuntimeError(
'system/camera-module-not-found',
`react-native-vision-camera is not supported in Expo Go! Use EAS/expo prebuild instead (\`expo run:${Platform.OS}\`). For more info, see https://docs.expo.dev/workflow/prebuild/.`,
);
)
} else {
// We're running Expo bare / standalone
message += '\n* Make sure you ran `expo prebuild`.';
message += '\n* Make sure you ran `expo prebuild`.'
}
}
message += '\n* Make sure you rebuilt the app.';
throw new CameraRuntimeError('system/camera-module-not-found', message);
message += '\n* Make sure you rebuilt the app.'
throw new CameraRuntimeError('system/camera-module-not-found', message)
}

View File

@@ -7,4 +7,4 @@
* - `portrait-upside-down`: **180°** (home-button at the top)
* - `landscape-right`: **270°** (home-button on the right)
*/
export type Orientation = 'portrait' | 'portrait-upside-down' | 'landscape-left' | 'landscape-right';
export type Orientation = 'portrait' | 'portrait-upside-down' | 'landscape-left' | 'landscape-right'

View File

@@ -1,5 +1,5 @@
import { Orientation } from './Orientation';
import type { TemporaryFile } from './TemporaryFile';
import { Orientation } from './Orientation'
import type { TemporaryFile } from './TemporaryFile'
export interface TakePhotoOptions {
/**
@@ -11,25 +11,25 @@ export interface TakePhotoOptions {
*
* @default "balanced"
*/
qualityPrioritization?: 'quality' | 'balanced' | 'speed';
qualityPrioritization?: 'quality' | 'balanced' | 'speed'
/**
* Whether the Flash should be enabled or disabled
*
* @default "auto"
*/
flash?: 'on' | 'off' | 'auto';
flash?: 'on' | 'off' | 'auto'
/**
* Specifies whether red-eye reduction should be applied automatically on flash captures.
*
* @default false
*/
enableAutoRedEyeReduction?: boolean;
enableAutoRedEyeReduction?: boolean
/**
* Indicates whether still image stabilization will be enabled when capturing the photo
*
* @default false
*/
enableAutoStabilization?: boolean;
enableAutoStabilization?: boolean
/**
* Specifies whether the photo output should use content aware distortion correction on this photo request.
* For example, the algorithm may not apply correction to faces in the center of a photo, but may apply it to faces near the photos edges.
@@ -37,13 +37,13 @@ export interface TakePhotoOptions {
* @platform iOS
* @default false
*/
enableAutoDistortionCorrection?: boolean;
enableAutoDistortionCorrection?: boolean
/**
* Whether to play the default shutter "click" sound when taking a picture or not.
*
* @default true
*/
enableShutterSound?: boolean;
enableShutterSound?: boolean
}
/**
@@ -55,26 +55,26 @@ export interface PhotoFile extends TemporaryFile {
/**
* The width of the photo, in pixels.
*/
width: number;
width: number
/**
* The height of the photo, in pixels.
*/
height: number;
height: number
/**
* Whether this photo is in RAW format or not.
*/
isRawPhoto: boolean;
isRawPhoto: boolean
/**
* Display orientation of the photo, relative to the Camera's sensor orientation.
*
* Note that Camera sensors are landscape, so e.g. "portrait" photos will have a value of "landscape-left", etc.
*/
orientation: Orientation;
orientation: Orientation
/**
* Whether this photo is mirrored (selfies) or not.
*/
isMirrored: boolean;
thumbnail?: Record<string, unknown>;
isMirrored: boolean
thumbnail?: Record<string, unknown>
/**
* Metadata information describing the captured image. (iOS only)
*
@@ -95,66 +95,66 @@ export interface PhotoFile extends TemporaryFile {
* * 7 = 270 degrees: image has been flipped back-to-front and is on its far side.
* * 8 = 270 degrees, mirrored: image is on its far side.
*/
Orientation: number;
Orientation: number
/**
* @platform iOS
*/
DPIHeight: number;
DPIHeight: number
/**
* @platform iOS
*/
DPIWidth: number;
DPIWidth: number
/**
* Represents any data Apple cameras write to the metadata
*
* @platform iOS
*/
'{MakerApple}'?: Record<string, unknown>;
'{MakerApple}'?: Record<string, unknown>
'{TIFF}': {
ResolutionUnit: number;
Software: string;
Make: string;
DateTime: string;
XResolution: number;
ResolutionUnit: number
Software: string
Make: string
DateTime: string
XResolution: number
/**
* @platform iOS
*/
HostComputer?: string;
Model: string;
YResolution: number;
};
HostComputer?: string
Model: string
YResolution: number
}
'{Exif}': {
DateTimeOriginal: string;
ExposureTime: number;
FNumber: number;
LensSpecification: number[];
ExposureBiasValue: number;
ColorSpace: number;
FocalLenIn35mmFilm: number;
BrightnessValue: number;
ExposureMode: number;
LensModel: string;
SceneType: number;
PixelXDimension: number;
ShutterSpeedValue: number;
SensingMethod: number;
SubjectArea: number[];
ApertureValue: number;
SubsecTimeDigitized: string;
FocalLength: number;
LensMake: string;
SubsecTimeOriginal: string;
OffsetTimeDigitized: string;
PixelYDimension: number;
ISOSpeedRatings: number[];
WhiteBalance: number;
DateTimeDigitized: string;
OffsetTimeOriginal: string;
ExifVersion: string;
OffsetTime: string;
Flash: number;
ExposureProgram: number;
MeteringMode: number;
};
};
DateTimeOriginal: string
ExposureTime: number
FNumber: number
LensSpecification: number[]
ExposureBiasValue: number
ColorSpace: number
FocalLenIn35mmFilm: number
BrightnessValue: number
ExposureMode: number
LensModel: string
SceneType: number
PixelXDimension: number
ShutterSpeedValue: number
SensingMethod: number
SubjectArea: number[]
ApertureValue: number
SubsecTimeDigitized: string
FocalLength: number
LensMake: string
SubsecTimeOriginal: string
OffsetTimeDigitized: string
PixelYDimension: number
ISOSpeedRatings: number[]
WhiteBalance: number
DateTimeDigitized: string
OffsetTimeOriginal: string
ExifVersion: string
OffsetTime: string
Flash: number
ExposureProgram: number
MeteringMode: number
}
}
}

View File

@@ -12,4 +12,4 @@
* - `native`: Frame is in the Camera's native Hardware Buffer format (PRIVATE). This is the most efficient Format.
* - `unknown`: Frame has unknown/unsupported pixel-format.
*/
export type PixelFormat = 'yuv' | 'rgb' | 'dng' | 'native' | 'unknown';
export type PixelFormat = 'yuv' | 'rgb' | 'dng' | 'native' | 'unknown'

View File

@@ -5,9 +5,9 @@ export interface Point {
/**
* The X coordinate of this Point. (double)
*/
x: number;
x: number
/**
* The Y coordinate of this Point. (double)
*/
y: number;
y: number
}

View File

@@ -9,5 +9,5 @@ export interface TemporaryFile {
*
* * **Note:** This file might get deleted once the app closes because it lives in the temp directory.
*/
path: string;
path: string
}

View File

@@ -1,29 +1,29 @@
import type { CameraCaptureError } from './CameraError';
import type { TemporaryFile } from './TemporaryFile';
import type { CameraCaptureError } from './CameraError'
import type { TemporaryFile } from './TemporaryFile'
export interface RecordVideoOptions {
/**
* Set the video flash mode. Natively, this just enables the torch while recording.
*/
flash?: 'on' | 'off' | 'auto';
flash?: 'on' | 'off' | 'auto'
/**
* Specifies the output file type to record videos into.
*/
fileType?: 'mov' | 'mp4';
fileType?: 'mov' | 'mp4'
/**
* Called when there was an unexpected runtime error while recording the video.
*/
onRecordingError: (error: CameraCaptureError) => void;
onRecordingError: (error: CameraCaptureError) => void
/**
* Called when the recording has been successfully saved to file.
*/
onRecordingFinished: (video: VideoFile) => void;
onRecordingFinished: (video: VideoFile) => void
/**
* The Video Codec to record in.
* - `h264`: Widely supported, but might be less efficient, especially with larger sizes or framerates.
* - `h265`: The HEVC (High-Efficient-Video-Codec) for higher efficient video recordings.
*/
videoCodec?: 'h264' | 'h265';
videoCodec?: 'h264' | 'h265'
}
/**
@@ -35,5 +35,5 @@ export interface VideoFile extends TemporaryFile {
/**
* Represents the duration of the video, in seconds.
*/
duration: number;
duration: number
}

View File

@@ -1,13 +1,13 @@
import { Dimensions } from 'react-native';
import { FormatFilter } from './getCameraFormat';
import { Dimensions } from 'react-native'
import { FormatFilter } from './getCameraFormat'
type TTemplates = {
[key: string]: FormatFilter[];
};
[key: string]: FormatFilter[]
}
const SnapchatResolution = { width: 1920, height: 1080 };
const InstagramResolution = { width: 3840, height: 2160 };
const ScreenAspectRatio = Dimensions.get('window').height / Dimensions.get('window').width;
const SnapchatResolution = { width: 1920, height: 1080 }
const InstagramResolution = { width: 3840, height: 2160 }
const ScreenAspectRatio = Dimensions.get('window').height / Dimensions.get('window').width
/**
* Predefined templates for use in `useCameraFormat`/`getCameraFormat`.
@@ -69,4 +69,4 @@ export const Templates: TTemplates = {
{ photoAspectRatio: ScreenAspectRatio },
{ photoResolution: InstagramResolution },
],
};
}

View File

@@ -1,4 +1,4 @@
import { CameraDevice, CameraPosition, PhysicalCameraDeviceType } from '../CameraDevice';
import { CameraDevice, CameraPosition, PhysicalCameraDeviceType } from '../CameraDevice'
export interface DeviceFilter {
/**
@@ -19,7 +19,7 @@ export interface DeviceFilter {
* getCameraDevice({ physicalDevices: ['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera'] })
* ```
*/
physicalDevices?: PhysicalCameraDeviceType[];
physicalDevices?: PhysicalCameraDeviceType[]
}
/**
@@ -36,26 +36,26 @@ export interface DeviceFilter {
* ```
*/
export function getCameraDevice(devices: CameraDevice[], position: CameraPosition, filter: DeviceFilter = {}): CameraDevice | undefined {
const explicitlyWantsNonWideAngle = filter.physicalDevices != null && !filter.physicalDevices.includes('wide-angle-camera');
const explicitlyWantsNonWideAngle = filter.physicalDevices != null && !filter.physicalDevices.includes('wide-angle-camera')
const filtered = devices.filter((d) => d.position === position);
const filtered = devices.filter((d) => d.position === position)
let bestDevice = filtered[0];
if (bestDevice == null) return undefined;
let bestDevice = filtered[0]
if (bestDevice == null) return undefined
// Compare each device using a point scoring system
for (const device of devices) {
let leftPoints = 0;
let rightPoints = 0;
let leftPoints = 0
let rightPoints = 0
// prefer higher hardware-level
if (bestDevice.hardwareLevel === 'full') leftPoints += 4;
if (device.hardwareLevel === 'full') rightPoints += 4;
if (bestDevice.hardwareLevel === 'full') leftPoints += 4
if (device.hardwareLevel === 'full') rightPoints += 4
if (!explicitlyWantsNonWideAngle) {
// prefer wide-angle-camera as a default
if (bestDevice.physicalDevices.includes('wide-angle-camera')) leftPoints += 1;
if (device.physicalDevices.includes('wide-angle-camera')) rightPoints += 1;
if (bestDevice.physicalDevices.includes('wide-angle-camera')) leftPoints += 1
if (device.physicalDevices.includes('wide-angle-camera')) rightPoints += 1
}
// compare devices. two possible scenarios:
@@ -63,17 +63,17 @@ export function getCameraDevice(devices: CameraDevice[], position: CameraPositio
// 2. user wants only one ([wide]) for faster performance. prefer those devices that only have one camera, if they have more, we rank them lower.
if (filter.physicalDevices != null) {
for (const d of bestDevice.physicalDevices) {
if (filter.physicalDevices.includes(d)) leftPoints += 1;
else leftPoints -= 1;
if (filter.physicalDevices.includes(d)) leftPoints += 1
else leftPoints -= 1
}
for (const d of device.physicalDevices) {
if (filter.physicalDevices.includes(d)) rightPoints += 1;
else rightPoints -= 1;
if (filter.physicalDevices.includes(d)) rightPoints += 1
else rightPoints -= 1
}
}
if (rightPoints > leftPoints) bestDevice = device;
if (rightPoints > leftPoints) bestDevice = device
}
return bestDevice;
return bestDevice
}

View File

@@ -1,10 +1,10 @@
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from '../CameraDevice';
import { CameraRuntimeError } from '../CameraError';
import { PixelFormat } from '../PixelFormat';
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from '../CameraDevice'
import { CameraRuntimeError } from '../CameraError'
import { PixelFormat } from '../PixelFormat'
interface Size {
width: number;
height: number;
width: number
height: number
}
export interface FormatFilter {
@@ -12,12 +12,12 @@ export interface FormatFilter {
* The target resolution of the video (and frame processor) output pipeline.
* If no format supports the given resolution, the format closest to this value will be used.
*/
videoResolution?: Size | 'max';
videoResolution?: Size | 'max'
/**
* The target resolution of the photo output pipeline.
* If no format supports the given resolution, the format closest to this value will be used.
*/
photoResolution?: Size | 'max';
photoResolution?: Size | 'max'
/**
* The target aspect ratio of the video (and preview) output, expressed as a factor: `width / height`.
* (Note: Cameras are in landscape orientation)
@@ -30,7 +30,7 @@ export interface FormatFilter {
* targetVideoAspectRatio: screen.height / screen.width
* ```
*/
videoAspectRatio?: number;
videoAspectRatio?: number
/**
* The target aspect ratio of the photo output, expressed as a factor: `width / height`.
* (Note: Cameras are in landscape orientation)
@@ -44,39 +44,39 @@ export interface FormatFilter {
* targetPhotoAspectRatio: screen.height / screen.width
* ```
*/
photoAspectRatio?: number;
photoAspectRatio?: number
/**
* The target FPS you want to record video at.
* If the FPS requirements can not be met, the format closest to this value will be used.
*/
fps?: number;
fps?: number
/**
* The target video stabilization mode you want to use.
* If no format supports the target video stabilization mode, the best other matching format will be used.
*/
videoStabilizationMode?: VideoStabilizationMode;
videoStabilizationMode?: VideoStabilizationMode
/**
* The target pixel format you want to use.
* If no format supports the target pixel format, the best other matching format will be used.
*/
pixelFormat?: PixelFormat;
pixelFormat?: PixelFormat
/**
* Whether you want to find a format that supports Photo HDR.
*/
photoHDR?: boolean;
photoHDR?: boolean
/**
* Whether you want to find a format that supports Photo HDR.
*/
videoHDR?: boolean;
videoHDR?: boolean
}
type FilterWithPriority<T> = {
target: Exclude<T, null | undefined>;
priority: number;
};
target: Exclude<T, null | undefined>
priority: number
}
type FilterMap = {
[K in keyof FormatFilter]: FilterWithPriority<FormatFilter[K]>;
};
[K in keyof FormatFilter]: FilterWithPriority<FormatFilter[K]>
}
function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
return filters.reduce<FilterMap>((map, curr, index) => {
for (const key in curr) {
@@ -86,10 +86,10 @@ function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
target: curr[key],
priority: filters.length - index,
};
}
}
return map;
}, {});
return map
}, {})
}
/**
@@ -112,103 +112,103 @@ function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
*/
export function getCameraFormat(device: CameraDevice, filters: FormatFilter[]): CameraDeviceFormat {
// Combine filters into a single filter map for constant-time lookup
const filter = filtersToFilterMap(filters);
const filter = filtersToFilterMap(filters)
let bestFormat = device.formats[0];
let bestFormat = device.formats[0]
if (bestFormat == null)
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`);
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`)
// Compare each format using a point scoring system
for (const format of device.formats) {
let leftPoints = 0;
let rightPoints = 0;
let leftPoints = 0
let rightPoints = 0
const leftVideoResolution = bestFormat.videoWidth * bestFormat.videoHeight;
const rightVideoResolution = format.videoWidth * format.videoHeight;
const leftVideoResolution = bestFormat.videoWidth * bestFormat.videoHeight
const rightVideoResolution = format.videoWidth * format.videoHeight
if (filter.videoResolution != null) {
if (filter.videoResolution.target === 'max') {
// We just want the maximum resolution
if (leftVideoResolution > rightVideoResolution) leftPoints += filter.videoResolution.priority;
if (rightVideoResolution > leftVideoResolution) rightPoints += filter.videoResolution.priority;
if (leftVideoResolution > rightVideoResolution) leftPoints += filter.videoResolution.priority
if (rightVideoResolution > leftVideoResolution) rightPoints += filter.videoResolution.priority
} else {
// Find video resolution closest to the filter (ignoring orientation)
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height;
const leftDiff = Math.abs(leftVideoResolution - targetResolution);
const rightDiff = Math.abs(rightVideoResolution - targetResolution);
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority;
if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority;
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height
const leftDiff = Math.abs(leftVideoResolution - targetResolution)
const rightDiff = Math.abs(rightVideoResolution - targetResolution)
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority
if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority
}
}
const leftPhotoResolution = bestFormat.photoWidth * bestFormat.photoHeight;
const rightPhotoResolution = format.photoWidth * format.photoHeight;
const leftPhotoResolution = bestFormat.photoWidth * bestFormat.photoHeight
const rightPhotoResolution = format.photoWidth * format.photoHeight
if (filter.photoResolution != null) {
if (filter.photoResolution.target === 'max') {
// We just want the maximum resolution
if (leftPhotoResolution > rightPhotoResolution) leftPoints += filter.photoResolution.priority;
if (rightPhotoResolution > leftPhotoResolution) rightPoints += filter.photoResolution.priority;
if (leftPhotoResolution > rightPhotoResolution) leftPoints += filter.photoResolution.priority
if (rightPhotoResolution > leftPhotoResolution) rightPoints += filter.photoResolution.priority
} else {
// Find closest photo resolution to the filter (ignoring orientation)
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height;
const leftDiff = Math.abs(leftPhotoResolution - targetResolution);
const rightDiff = Math.abs(rightPhotoResolution - targetResolution);
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority;
if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority;
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height
const leftDiff = Math.abs(leftPhotoResolution - targetResolution)
const rightDiff = Math.abs(rightPhotoResolution - targetResolution)
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority
if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority
}
}
// Find closest aspect ratio (video)
if (filter.videoAspectRatio != null) {
const leftAspect = bestFormat.videoWidth / bestFormat.videoHeight;
const rightAspect = format.videoWidth / format.videoHeight;
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target);
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target);
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority;
if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority;
const leftAspect = bestFormat.videoWidth / bestFormat.videoHeight
const rightAspect = format.videoWidth / format.videoHeight
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target)
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target)
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority
if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority
}
// Find closest aspect ratio (photo)
if (filter.photoAspectRatio != null) {
const leftAspect = bestFormat.photoWidth / bestFormat.photoHeight;
const rightAspect = format.photoWidth / format.photoHeight;
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target);
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target);
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority;
if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority;
const leftAspect = bestFormat.photoWidth / bestFormat.photoHeight
const rightAspect = format.photoWidth / format.photoHeight
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target)
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target)
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority
if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority
}
// Find closest max FPS
if (filter.fps != null) {
if (bestFormat.maxFps >= filter.fps.target) leftPoints += filter.fps.priority;
if (format.maxFps >= filter.fps.target) rightPoints += filter.fps.priority;
if (bestFormat.maxFps >= filter.fps.target) leftPoints += filter.fps.priority
if (format.maxFps >= filter.fps.target) rightPoints += filter.fps.priority
}
// Find video stabilization mode
if (filter.videoStabilizationMode != null) {
if (bestFormat.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++;
if (format.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++;
if (bestFormat.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++
if (format.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++
}
// Find pixel format
if (filter.pixelFormat != null) {
if (bestFormat.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++;
if (format.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++;
if (bestFormat.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++
if (format.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++
}
// Find Photo HDR formats
if (filter.photoHDR != null) {
if (bestFormat.supportsPhotoHDR === filter.photoHDR.target) leftPoints++;
if (format.supportsPhotoHDR === filter.photoHDR.target) rightPoints++;
if (bestFormat.supportsPhotoHDR === filter.photoHDR.target) leftPoints++
if (format.supportsPhotoHDR === filter.photoHDR.target) rightPoints++
}
// Find Video HDR formats
if (filter.videoHDR != null) {
if (bestFormat.supportsVideoHDR === filter.videoHDR.target) leftPoints++;
if (format.supportsVideoHDR === filter.videoHDR.target) rightPoints++;
if (bestFormat.supportsVideoHDR === filter.videoHDR.target) leftPoints++
if (format.supportsVideoHDR === filter.videoHDR.target) rightPoints++
}
if (rightPoints > leftPoints) bestFormat = format;
if (rightPoints > leftPoints) bestFormat = format
}
return bestFormat;
return bestFormat
}

View File

@@ -1,23 +1,23 @@
import { ConfigPlugin, withGradleProperties } from '@expo/config-plugins';
import { ConfigPlugin, withGradleProperties } from '@expo/config-plugins'
/**
* Set the `VisionCamera_disableFrameProcessors` value in the static `gradle.properties` file.
* This is used to disable frame processors if you don't need it for android.
*/
export const withDisableFrameProcessorsAndroid: ConfigPlugin = (c) => {
const disableFrameProcessorsKey = 'VisionCamera_disableFrameProcessors';
const disableFrameProcessorsKey = 'VisionCamera_disableFrameProcessors'
return withGradleProperties(c, (config) => {
config.modResults = config.modResults.filter((item) => {
if (item.type === 'property' && item.key === disableFrameProcessorsKey) return false;
return true;
});
if (item.type === 'property' && item.key === disableFrameProcessorsKey) return false
return true
})
config.modResults.push({
type: 'property',
key: disableFrameProcessorsKey,
value: 'true',
});
})
return config;
});
};
return config
})
}

View File

@@ -1,4 +1,4 @@
import { ConfigPlugin, withPodfileProperties } from '@expo/config-plugins';
import { ConfigPlugin, withPodfileProperties } from '@expo/config-plugins'
/**
* Set the `disableFrameProcessors` inside of the XcodeProject.
@@ -7,7 +7,7 @@ import { ConfigPlugin, withPodfileProperties } from '@expo/config-plugins';
export const withDisableFrameProcessorsIOS: ConfigPlugin = (c) => {
return withPodfileProperties(c, (config) => {
// TODO: Implement Podfile writing
config.ios = config.ios;
return config;
});
};
config.ios = config.ios
return config
})
}

View File

@@ -1,37 +1,37 @@
import { withPlugins, AndroidConfig, ConfigPlugin, createRunOncePlugin } from '@expo/config-plugins';
import { withDisableFrameProcessorsAndroid } from './withDisableFrameProcessorsAndroid';
import { withDisableFrameProcessorsIOS } from './withDisableFrameProcessorsIOS';
import { withPlugins, AndroidConfig, ConfigPlugin, createRunOncePlugin } from '@expo/config-plugins'
import { withDisableFrameProcessorsAndroid } from './withDisableFrameProcessorsAndroid'
import { withDisableFrameProcessorsIOS } from './withDisableFrameProcessorsIOS'
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-unsafe-assignment
const pkg = require('../../../package.json');
const pkg = require('../../../package.json')
const CAMERA_USAGE = 'Allow $(PRODUCT_NAME) to access your camera';
const MICROPHONE_USAGE = 'Allow $(PRODUCT_NAME) to access your microphone';
const CAMERA_USAGE = 'Allow $(PRODUCT_NAME) to access your camera'
const MICROPHONE_USAGE = 'Allow $(PRODUCT_NAME) to access your microphone'
type Props = {
cameraPermissionText?: string;
enableMicrophonePermission?: boolean;
microphonePermissionText?: string;
disableFrameProcessors?: boolean;
};
cameraPermissionText?: string
enableMicrophonePermission?: boolean
microphonePermissionText?: string
disableFrameProcessors?: boolean
}
const withCamera: ConfigPlugin<Props> = (config, props = {}) => {
if (config.ios == null) config.ios = {};
if (config.ios.infoPlist == null) config.ios.infoPlist = {};
if (config.ios == null) config.ios = {}
if (config.ios.infoPlist == null) config.ios.infoPlist = {}
config.ios.infoPlist.NSCameraUsageDescription =
props.cameraPermissionText ?? (config.ios.infoPlist.NSCameraUsageDescription as string | undefined) ?? CAMERA_USAGE;
props.cameraPermissionText ?? (config.ios.infoPlist.NSCameraUsageDescription as string | undefined) ?? CAMERA_USAGE
if (props.enableMicrophonePermission) {
config.ios.infoPlist.NSMicrophoneUsageDescription =
props.microphonePermissionText ?? (config.ios.infoPlist.NSMicrophoneUsageDescription as string | undefined) ?? MICROPHONE_USAGE;
props.microphonePermissionText ?? (config.ios.infoPlist.NSMicrophoneUsageDescription as string | undefined) ?? MICROPHONE_USAGE
}
const androidPermissions = ['android.permission.CAMERA'];
if (props.enableMicrophonePermission) androidPermissions.push('android.permission.RECORD_AUDIO');
const androidPermissions = ['android.permission.CAMERA']
if (props.enableMicrophonePermission) androidPermissions.push('android.permission.RECORD_AUDIO')
if (props.disableFrameProcessors) {
config = withDisableFrameProcessorsAndroid(config);
config = withDisableFrameProcessorsIOS(config);
config = withDisableFrameProcessorsAndroid(config)
config = withDisableFrameProcessorsIOS(config)
}
return withPlugins(config, [[AndroidConfig.Permissions.withPermissions, androidPermissions]]);
};
return withPlugins(config, [[AndroidConfig.Permissions.withPermissions, androidPermissions]])
}
export default createRunOncePlugin(withCamera, pkg.name, pkg.version);
export default createRunOncePlugin(withCamera, pkg.name, pkg.version)

View File

@@ -1,7 +1,7 @@
import { useMemo } from 'react';
import { CameraDevice, CameraPosition } from '../CameraDevice';
import { getCameraDevice, DeviceFilter } from '../devices/getCameraDevice';
import { useCameraDevices } from './useCameraDevices';
import { useMemo } from 'react'
import { CameraDevice, CameraPosition } from '../CameraDevice'
import { getCameraDevice, DeviceFilter } from '../devices/getCameraDevice'
import { useCameraDevices } from './useCameraDevices'
/**
* Get the best matching Camera device that best satisfies your requirements using a sorting filter.
@@ -16,13 +16,13 @@ import { useCameraDevices } from './useCameraDevices';
* ```
*/
export function useCameraDevice(position: CameraPosition, filter?: DeviceFilter): CameraDevice | undefined {
const devices = useCameraDevices();
const devices = useCameraDevices()
const device = useMemo(
() => getCameraDevice(devices, position, filter),
// eslint-disable-next-line react-hooks/exhaustive-deps
[devices, position, JSON.stringify(filter)],
);
)
return device;
return device
}

View File

@@ -1,6 +1,6 @@
import { useEffect, useState } from 'react';
import type { CameraDevice } from '../CameraDevice';
import { CameraDevices } from '../CameraDevices';
import { useEffect, useState } from 'react'
import type { CameraDevice } from '../CameraDevice'
import { CameraDevices } from '../CameraDevices'
/**
* Get all available Camera Devices this phone has.
@@ -10,14 +10,14 @@ import { CameraDevices } from '../CameraDevices';
* so the result of this function might update over time.
*/
export function useCameraDevices(): CameraDevice[] {
const [devices, setDevices] = useState(() => CameraDevices.getAvailableCameraDevices());
const [devices, setDevices] = useState(() => CameraDevices.getAvailableCameraDevices())
useEffect(() => {
const listener = CameraDevices.addCameraDevicesChangedListener((newDevices) => {
setDevices(newDevices);
});
return () => listener.remove();
}, []);
setDevices(newDevices)
})
return () => listener.remove()
}, [])
return devices;
return devices
}

View File

@@ -1,6 +1,6 @@
import { useMemo } from 'react';
import { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
import { FormatFilter, getCameraFormat } from '../devices/getCameraFormat';
import { useMemo } from 'react'
import { CameraDevice, CameraDeviceFormat } from '../CameraDevice'
import { FormatFilter, getCameraFormat } from '../devices/getCameraFormat'
/**
* Get the best matching Camera format for the given device that satisfies your requirements using a sorting filter. By default, formats are sorted by highest to lowest resolution.
@@ -22,10 +22,10 @@ import { FormatFilter, getCameraFormat } from '../devices/getCameraFormat';
*/
export function useCameraFormat(device: CameraDevice | undefined, filters: FormatFilter[]): CameraDeviceFormat | undefined {
const format = useMemo(() => {
if (device == null) return undefined;
return getCameraFormat(device, filters);
if (device == null) return undefined
return getCameraFormat(device, filters)
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [device, JSON.stringify(filters)]);
}, [device, JSON.stringify(filters)])
return format;
return format
}

View File

@@ -1,17 +1,17 @@
import { useCallback, useEffect, useState } from 'react';
import { Camera } from '../Camera';
import { useCallback, useEffect, useState } from 'react'
import { Camera } from '../Camera'
interface PermissionState {
/**
* Whether the specified permission has explicitly been granted.
* By default, this will be `false`. To request permission, call `requestPermission()`.
*/
hasPermission: boolean;
hasPermission: boolean
/**
* Requests the specified permission from the user.
* @returns Whether the specified permission has now been granted, or not.
*/
requestPermission: () => Promise<boolean>;
requestPermission: () => Promise<boolean>
}
/**
@@ -31,23 +31,23 @@ interface PermissionState {
* ```
*/
export function useCameraPermission(): PermissionState {
const [hasPermission, setHasPermission] = useState(false);
const [hasPermission, setHasPermission] = useState(false)
const requestPermission = useCallback(async () => {
const result = await Camera.requestCameraPermission();
const hasPermissionNow = result === 'granted';
setHasPermission(hasPermissionNow);
return hasPermissionNow;
}, []);
const result = await Camera.requestCameraPermission()
const hasPermissionNow = result === 'granted'
setHasPermission(hasPermissionNow)
return hasPermissionNow
}, [])
useEffect(() => {
Camera.getCameraPermissionStatus().then((s) => setHasPermission(s === 'granted'));
}, []);
Camera.getCameraPermissionStatus().then((s) => setHasPermission(s === 'granted'))
}, [])
return {
hasPermission,
requestPermission,
};
}
}
/**
@@ -65,21 +65,21 @@ export function useCameraPermission(): PermissionState {
* ```
*/
export function useMicrophonePermission(): PermissionState {
const [hasPermission, setHasPermission] = useState(false);
const [hasPermission, setHasPermission] = useState(false)
const requestPermission = useCallback(async () => {
const result = await Camera.requestMicrophonePermission();
const hasPermissionNow = result === 'granted';
setHasPermission(hasPermissionNow);
return hasPermissionNow;
}, []);
const result = await Camera.requestMicrophonePermission()
const hasPermissionNow = result === 'granted'
setHasPermission(hasPermissionNow)
return hasPermissionNow
}, [])
useEffect(() => {
Camera.getMicrophonePermissionStatus().then((s) => setHasPermission(s === 'granted'));
}, []);
Camera.getMicrophonePermissionStatus().then((s) => setHasPermission(s === 'granted'))
}, [])
return {
hasPermission,
requestPermission,
};
}
}

View File

@@ -1,6 +1,6 @@
import { DependencyList, useMemo } from 'react';
import type { Frame, FrameInternal } from '../Frame';
import { FrameProcessor } from '../CameraProps';
import { DependencyList, useMemo } from 'react'
import type { Frame, FrameInternal } from '../Frame'
import { FrameProcessor } from '../CameraProps'
/**
* Create a new Frame Processor function which you can pass to the `<Camera>`.
@@ -13,19 +13,20 @@ import { FrameProcessor } from '../CameraProps';
export function createFrameProcessor(frameProcessor: FrameProcessor['frameProcessor'], type: FrameProcessor['type']): FrameProcessor {
return {
frameProcessor: (frame: Frame) => {
'worklet';
'worklet'
// Increment ref-count by one
(frame as FrameInternal).incrementRefCount();
const internal = frame as FrameInternal
internal.incrementRefCount()
try {
// Call sync frame processor
frameProcessor(frame);
frameProcessor(frame)
} finally {
// Potentially delete Frame if we were the last ref (no runAsync)
(frame as FrameInternal).decrementRefCount();
internal.decrementRefCount()
}
},
type: type,
};
}
}
/**
@@ -48,5 +49,5 @@ export function createFrameProcessor(frameProcessor: FrameProcessor['frameProces
*/
export function useFrameProcessor(frameProcessor: (frame: Frame) => void, dependencies: DependencyList): FrameProcessor {
// eslint-disable-next-line react-hooks/exhaustive-deps
return useMemo(() => createFrameProcessor(frameProcessor, 'frame-processor'), dependencies);
return useMemo(() => createFrameProcessor(frameProcessor, 'frame-processor'), dependencies)
}

View File

@@ -1,21 +1,21 @@
export * from './Camera';
export * from './CameraDevice';
export * from './CameraError';
export * from './CameraProps';
export * from './Frame';
export * from './FrameProcessorPlugins';
export * from './Orientation';
export * from './PhotoFile';
export * from './PixelFormat';
export * from './Point';
export * from './VideoFile';
export * from './Camera'
export * from './CameraDevice'
export * from './CameraError'
export * from './CameraProps'
export * from './Frame'
export * from './FrameProcessorPlugins'
export * from './Orientation'
export * from './PhotoFile'
export * from './PixelFormat'
export * from './Point'
export * from './VideoFile'
export * from './devices/getCameraFormat';
export * from './devices/getCameraDevice';
export * from './devices/Templates';
export * from './devices/getCameraFormat'
export * from './devices/getCameraDevice'
export * from './devices/Templates'
export * from './hooks/useCameraDevice';
export * from './hooks/useCameraDevices';
export * from './hooks/useCameraFormat';
export * from './hooks/useCameraPermission';
export * from './hooks/useFrameProcessor';
export * from './hooks/useCameraDevice'
export * from './hooks/useCameraDevices'
export * from './hooks/useCameraFormat'
export * from './hooks/useCameraPermission'
export * from './hooks/useFrameProcessor'