2021-03-23 10:15:09 -06:00
import type { ViewProps } from 'react-native' ;
2023-08-21 04:50:14 -06:00
import type { CameraDevice , CameraDeviceFormat , VideoStabilizationMode } from './CameraDevice' ;
2021-03-23 10:15:09 -06:00
import type { CameraRuntimeError } from './CameraError' ;
2023-09-01 04:20:17 -06:00
import type { Frame } from './Frame' ;
2023-02-21 07:00:48 -07:00
import type { Orientation } from './Orientation' ;
2021-03-23 10:15:09 -06:00
2023-09-01 04:20:17 -06:00
export type FrameProcessor = {
frameProcessor : ( frame : Frame ) = > void ;
type : 'frame-processor' ;
} ;
2023-07-20 07:30:04 -06:00
2023-08-21 04:50:14 -06:00
// TODO: Replace `enableHighQualityPhotos: boolean` in favor of `priorization: 'photo' | 'video'`
2023-09-01 04:20:17 -06:00
// TODO: Use RCT_ENUM_PARSER for stuff like torch, videoStabilizationMode, and orientation
2023-08-21 04:50:14 -06:00
// TODO: Use Photo HostObject for stuff like depthData, portraitEffects, etc.
// TODO: Add RAW capture support
2021-03-23 10:15:09 -06:00
export interface CameraProps extends ViewProps {
/ * *
* The Camera Device to use .
*
2023-03-06 03:03:40 -07:00
* See the [ Camera Devices ] ( https : //react-native-vision-camera.com/docs/guides/devices) section in the documentation for more information about Camera Devices.
2021-03-23 10:15:09 -06:00
*
* @example
* ` ` ` tsx
* const devices = useCameraDevices ( 'wide-angle-camera' )
* const device = devices . back
*
* return (
* < Camera
* device = { device }
* isActive = { true }
* style = { StyleSheet . absoluteFill }
* / >
* )
* ` ` `
* /
device : CameraDevice ;
/ * *
2023-03-06 03:03:40 -07:00
* Whether the Camera should actively stream video frames , or not . See the [ documentation about the ` isActive ` prop ] ( https : //react-native-vision-camera.com/docs/guides/lifecycle#the-isactive-prop) for more information.
2021-03-23 10:15:09 -06:00
*
* This can be compared to a Video component , where ` isActive ` specifies whether the video is paused or not .
*
* > Note : If you fully unmount the ` <Camera> ` component instead of using ` isActive={false} ` , the Camera will take a bit longer to start again . In return , it will use less resources since the Camera will be completely destroyed when unmounted .
* /
isActive : boolean ;
2021-06-07 05:08:40 -06:00
//#region Use-cases
/ * *
2023-03-06 03:03:40 -07:00
* Enables * * photo capture * * with the ` takePhoto ` function ( see [ "Taking Photos" ] ( https : //react-native-vision-camera.com/docs/guides/capturing#taking-photos))
2021-06-07 05:08:40 -06:00
* /
photo? : boolean ;
/ * *
2023-03-06 03:03:40 -07:00
* Enables * * video capture * * with the ` startRecording ` function ( see [ "Recording Videos" ] ( https : //react-native-vision-camera.com/docs/guides/capturing/#recording-videos))
2021-06-27 04:37:54 -06:00
*
2023-08-21 04:50:14 -06:00
* Note : If both the ` photo ` and ` video ` properties are enabled at the same time and the device is running at a ` hardwareLevel ` of ` 'legacy' ` or ` 'limited' ` , VisionCamera _might_ use a lower resolution for video capture due to hardware constraints .
2021-06-07 05:08:40 -06:00
* /
video? : boolean ;
/ * *
2023-03-06 03:03:40 -07:00
* Enables * * audio capture * * for video recordings ( see [ "Recording Videos" ] ( https : //react-native-vision-camera.com/docs/guides/capturing/#recording-videos))
2021-06-07 05:08:40 -06:00
* /
audio? : boolean ;
2023-08-21 04:50:14 -06:00
/ * *
* Specifies the pixel format for the video pipeline .
*
* Frames from a [ Frame Processor ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) will be streamed in the pixel format specified here.
*
* While ` native ` and ` yuv ` are the most efficient formats , some ML models ( such as MLKit Barcode detection ) require input Frames to be in RGB colorspace , otherwise they just output nonsense .
*
* - ` native ` : The hardware native GPU buffer format . This is the most efficient format . ( ` PRIVATE ` on Android , sometimes YUV on iOS )
* - ` yuv ` : The YUV ( Y ' CbCr 4 :2 : 0 or NV21 , 8 - bit ) format , either video - or full - range , depending on hardware capabilities . This is the second most efficient format .
* - ` rgb ` : The RGB ( RGB , RGBA or ABGRA , 8 - bit ) format . This is least efficient and requires explicit conversion .
*
* @default ` native `
* /
pixelFormat ? : 'native' | 'yuv' | 'rgb' ;
2021-06-07 05:08:40 -06:00
//#endregion
2021-03-23 10:15:09 -06:00
//#region Common Props (torch, zoom)
/ * *
* Set the current torch mode .
*
* Note : The torch is only available on ` "back" ` cameras , and isn ' t supported by every phone .
*
* @default "off"
* /
torch ? : 'off' | 'on' ;
/ * *
2021-07-29 03:44:22 -06:00
* Specifies the zoom factor of the current camera , in "factor" / scale .
*
* This value ranges from ` minZoom ` ( e . g . ` 1 ` ) to ` maxZoom ` ( e . g . ` 128 ` ) . It is recommended to set this value
* to the CameraDevice ' s ` neutralZoom ` per default and let the user zoom out to the fish - eye ( ultra - wide ) camera
* on demand ( if available )
2021-03-23 10:15:09 -06:00
*
* * * Note : * * Linearly increasing this value always appears logarithmic to the user .
*
2021-07-29 03:44:22 -06:00
* @default 1.0
2021-03-23 10:15:09 -06:00
* /
zoom? : number ;
/ * *
* Enables or disables the native pinch to zoom gesture .
*
2023-03-06 03:03:40 -07:00
* If you want to implement a custom zoom gesture , see [ the Zooming with Reanimated documentation ] ( https : //react-native-vision-camera.com/docs/guides/animated).
2021-03-23 10:15:09 -06:00
*
* @default false
* /
enableZoomGesture? : boolean ;
//#endregion
//#region Format/Preset selection
/ * *
2023-07-20 07:30:04 -06:00
* Selects a given format . By default , the best matching format is chosen .
2021-03-23 10:15:09 -06:00
* /
format? : CameraDeviceFormat ;
2023-09-22 09:32:34 -06:00
/ * *
* Specify how you want the preview to fit the container it ' s in
* /
resizeMode ? : 'cover' | 'contain' ;
2021-03-23 10:15:09 -06:00
/ * *
* Specify the frames per second this camera should use . Make sure the given ` format ` includes a frame rate range with the given ` fps ` .
*
* Requires ` format ` to be set .
* /
fps? : number ;
/ * *
* Enables or disables HDR on this camera device . Make sure the given ` format ` supports HDR mode .
*
* Requires ` format ` to be set .
* /
hdr? : boolean ;
2023-09-21 09:18:54 -06:00
/ * *
* Enables or disables lossless buffer compression for the video stream .
* If you only use { @linkcode video } or a { @linkcode frameProcessor } , this
* can increase the efficiency and lower memory usage of the Camera .
*
* If buffer compression is enabled , the video pipeline will try to use a
* lossless - compressed pixel format instead of the normal one .
*
* If you use a { @linkcode frameProcessor } , you might need to change how pixels
* are read inside your native frame processor function as this is different
* from the usual ` yuv ` or ` rgb ` layout .
*
* If buffer compression is not available but this property is enabled , the normal
* pixel formats will be used and no error will be thrown .
*
* @platform iOS
* @default
* - true // if video={true} and frameProcessor={undefined}
* - false // otherwise
* /
enableBufferCompression? : boolean ;
2021-03-23 10:15:09 -06:00
/ * *
* Enables or disables low - light boost on this camera device . Make sure the given ` format ` supports low - light boost .
*
* Requires ` format ` to be set .
* /
lowLightBoost? : boolean ;
/ * *
2023-08-21 04:50:14 -06:00
* Specifies the video stabilization mode to use .
2021-03-23 10:15:09 -06:00
*
2023-08-21 04:50:14 -06:00
* Requires a ` format ` to be set that contains the given ` videoStabilizationMode ` .
2021-06-03 07:42:02 -06:00
* /
videoStabilizationMode? : VideoStabilizationMode ;
2021-03-23 10:15:09 -06:00
//#endregion
/ * *
* Also captures data from depth - perception sensors . ( e . g . disparity maps )
*
* @default false
* /
enableDepthData? : boolean ;
/ * *
* A boolean specifying whether the photo render pipeline is prepared for portrait effects matte delivery .
*
* When enabling this , you must also set ` enableDepthData ` to ` true ` .
*
* @platform iOS 12.0 +
* @default false
* /
enablePortraitEffectsMatteDelivery? : boolean ;
/ * *
2021-06-10 05:49:34 -06:00
* Indicates whether the Camera should prepare the photo pipeline to provide maximum quality photos .
*
* This enables :
* * High Resolution Capture ( [ ` isHighResolutionCaptureEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/1648721-ishighresolutioncaptureenabled))
* * Virtual Device fusion for greater detail ( [ ` isVirtualDeviceConstituentPhotoDeliveryEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/3192189-isvirtualdeviceconstituentphotod))
* * Dual Device fusion for greater detail ( [ ` isDualCameraDualPhotoDeliveryEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotosettings/2873917-isdualcameradualphotodeliveryena))
* * Sets the maximum quality prioritization to ` .quality ` ( [ ` maxPhotoQualityPrioritization ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/3182995-maxphotoqualityprioritization))
2021-03-23 10:15:09 -06:00
*
* @default false
* /
2021-06-10 05:49:34 -06:00
enableHighQualityPhotos? : boolean ;
2023-02-21 07:00:48 -07:00
/ * *
* If ` true ` , show a debug view to display the FPS of the Camera session .
* This is useful for debugging your Frame Processor ' s speed .
*
* @default false
* /
enableFpsGraph? : boolean ;
2022-01-04 08:57:40 -07:00
/ * *
2022-02-02 08:58:16 -07:00
* Represents the orientation of all Camera Outputs ( Photo , Video , and Frame Processor ) . If this value is not set , the device orientation is used .
2022-01-04 08:57:40 -07:00
* /
2023-02-21 07:00:48 -07:00
orientation? : Orientation ;
2021-03-23 10:15:09 -06:00
//#region Events
/ * *
* Called when any kind of runtime error occured .
* /
onError ? : ( error : CameraRuntimeError ) = > void ;
/ * *
* Called when the camera was successfully initialized .
* /
onInitialized ? : ( ) = > void ;
2021-09-06 08:27:16 -06:00
/ * *
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
* A worklet which will be called for every frame the Camera "sees" .
2021-05-06 06:11:55 -06:00
*
2023-02-21 07:00:48 -07:00
* > See [ the Frame Processors documentation ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) for more information
*
2021-05-06 06:11:55 -06:00
* @example
* ` ` ` tsx
* const frameProcessor = useFrameProcessor ( ( frame ) = > {
* 'worklet'
* const qrCodes = scanQRCodes ( frame )
* console . log ( ` Detected QR Codes: ${ qrCodes } ` )
* } , [ ] )
*
* return < Camera { ...cameraProps } frameProcessor = { frameProcessor } / >
* ` ` `
* /
2023-07-20 07:30:04 -06:00
frameProcessor? : FrameProcessor ;
2021-03-23 10:15:09 -06:00
//#endregion
}