2023-09-26 03:39:17 -06:00
import type { ViewProps } from 'react-native'
import type { CameraDevice , CameraDeviceFormat , VideoStabilizationMode } from './CameraDevice'
import type { CameraRuntimeError } from './CameraError'
2023-10-04 04:53:52 -06:00
import { CodeScanner } from './CodeScanner'
2023-09-26 03:39:17 -06:00
import type { Frame } from './Frame'
import type { Orientation } from './Orientation'
2021-03-23 10:15:09 -06:00
2023-09-01 04:20:17 -06:00
export type FrameProcessor = {
2023-09-26 03:39:17 -06:00
frameProcessor : ( frame : Frame ) = > void
type : 'frame-processor'
}
2023-07-20 07:30:04 -06:00
2023-08-21 04:50:14 -06:00
// TODO: Replace `enableHighQualityPhotos: boolean` in favor of `priorization: 'photo' | 'video'`
2023-09-01 04:20:17 -06:00
// TODO: Use RCT_ENUM_PARSER for stuff like torch, videoStabilizationMode, and orientation
2023-08-21 04:50:14 -06:00
// TODO: Use Photo HostObject for stuff like depthData, portraitEffects, etc.
// TODO: Add RAW capture support
2021-03-23 10:15:09 -06:00
export interface CameraProps extends ViewProps {
/ * *
* The Camera Device to use .
*
2023-03-06 03:03:40 -07:00
* See the [ Camera Devices ] ( https : //react-native-vision-camera.com/docs/guides/devices) section in the documentation for more information about Camera Devices.
2021-03-23 10:15:09 -06:00
*
* @example
* ` ` ` tsx
2023-09-25 04:57:03 -06:00
* const device = useCameraDevice ( 'back' )
2021-03-23 10:15:09 -06:00
*
2023-09-25 04:57:03 -06:00
* if ( device == null ) return < NoCameraErrorView / >
2021-03-23 10:15:09 -06:00
* return (
* < Camera
* device = { device }
* isActive = { true }
* style = { StyleSheet . absoluteFill }
* / >
* )
* ` ` `
* /
2023-09-26 03:39:17 -06:00
device : CameraDevice
2021-03-23 10:15:09 -06:00
/ * *
2023-03-06 03:03:40 -07:00
* Whether the Camera should actively stream video frames , or not . See the [ documentation about the ` isActive ` prop ] ( https : //react-native-vision-camera.com/docs/guides/lifecycle#the-isactive-prop) for more information.
2021-03-23 10:15:09 -06:00
*
* This can be compared to a Video component , where ` isActive ` specifies whether the video is paused or not .
*
* > Note : If you fully unmount the ` <Camera> ` component instead of using ` isActive={false} ` , the Camera will take a bit longer to start again . In return , it will use less resources since the Camera will be completely destroyed when unmounted .
* /
2023-09-26 03:39:17 -06:00
isActive : boolean
2021-03-23 10:15:09 -06:00
2021-06-07 05:08:40 -06:00
//#region Use-cases
/ * *
2023-09-29 07:27:09 -06:00
* Enables * * photo capture * * with the ` takePhoto ` function ( see [ "Taking Photos" ] ( https : //react-native-vision-camera.com/docs/guides/taking-photos))
2021-06-07 05:08:40 -06:00
* /
2023-09-26 03:39:17 -06:00
photo? : boolean
2021-06-07 05:08:40 -06:00
/ * *
2023-09-29 07:27:09 -06:00
* Enables * * video capture * * with the ` startRecording ` function ( see [ "Recording Videos" ] ( https : //react-native-vision-camera.com/docs/guides/recording-videos))
2021-06-07 05:08:40 -06:00
* /
2023-09-26 03:39:17 -06:00
video? : boolean
2021-06-07 05:08:40 -06:00
/ * *
2023-09-29 07:27:09 -06:00
* Enables * * audio capture * * for video recordings ( see [ "Recording Videos" ] ( https : //react-native-vision-camera.com/docs/guides/recording-videos))
2021-06-07 05:08:40 -06:00
* /
2023-09-26 03:39:17 -06:00
audio? : boolean
2023-08-21 04:50:14 -06:00
/ * *
* Specifies the pixel format for the video pipeline .
*
2023-10-06 08:11:09 -06:00
* Make sure the given { @linkcode format } supports the given { @linkcode pixelFormat } ( see { @linkcode CameraDeviceFormat . pixelFormats format . pixelFormats } ) .
*
* Affects :
* * { @linkcode frameProcessor } : The format of Frames from a [ Frame Processor ] ( https : //react-native-vision-camera.com/docs/guides/frame-processors).
* While ` 'native' ` and ` 'yuv' ` are the most efficient formats , some ML models ( such as TensorFlow Face Detection Models ) require input Frames to be in RGB colorspace , otherwise they just output nonsense .
* * { @linkcode video } : The format of Frames streamed in the Video Pipeline . The format ` 'native' ` is most efficient here .
2023-08-21 04:50:14 -06:00
*
2023-10-06 08:11:09 -06:00
* The following values are supported :
2023-08-21 04:50:14 -06:00
*
* - ` native ` : The hardware native GPU buffer format . This is the most efficient format . ( ` PRIVATE ` on Android , sometimes YUV on iOS )
* - ` yuv ` : The YUV ( Y ' CbCr 4 :2 : 0 or NV21 , 8 - bit ) format , either video - or full - range , depending on hardware capabilities . This is the second most efficient format .
* - ` rgb ` : The RGB ( RGB , RGBA or ABGRA , 8 - bit ) format . This is least efficient and requires explicit conversion .
*
* @default ` native `
* /
2023-09-26 03:39:17 -06:00
pixelFormat ? : 'native' | 'yuv' | 'rgb'
2021-06-07 05:08:40 -06:00
//#endregion
2021-03-23 10:15:09 -06:00
//#region Common Props (torch, zoom)
/ * *
* Set the current torch mode .
*
2023-10-06 08:11:09 -06:00
* Make sure the given { @linkcode device } has a torch ( see { @linkcode CameraDevice . hasTorch device . hasTorch } ) .
2021-03-23 10:15:09 -06:00
*
* @default "off"
* /
2023-09-26 03:39:17 -06:00
torch ? : 'off' | 'on'
2021-03-23 10:15:09 -06:00
/ * *
2021-07-29 03:44:22 -06:00
* Specifies the zoom factor of the current camera , in "factor" / scale .
*
* This value ranges from ` minZoom ` ( e . g . ` 1 ` ) to ` maxZoom ` ( e . g . ` 128 ` ) . It is recommended to set this value
* to the CameraDevice ' s ` neutralZoom ` per default and let the user zoom out to the fish - eye ( ultra - wide ) camera
* on demand ( if available )
2021-03-23 10:15:09 -06:00
*
* * * Note : * * Linearly increasing this value always appears logarithmic to the user .
*
2021-07-29 03:44:22 -06:00
* @default 1.0
2021-03-23 10:15:09 -06:00
* /
2023-09-26 03:39:17 -06:00
zoom? : number
2021-03-23 10:15:09 -06:00
/ * *
* Enables or disables the native pinch to zoom gesture .
*
2023-10-06 08:11:09 -06:00
* If you want to implement a custom zoom gesture , see [ the Zooming with Reanimated documentation ] ( https : //react-native-vision-camera.com/docs/guides/zooming).
2021-03-23 10:15:09 -06:00
*
* @default false
* /
2023-09-26 03:39:17 -06:00
enableZoomGesture? : boolean
2021-03-23 10:15:09 -06:00
//#endregion
//#region Format/Preset selection
/ * *
2023-07-20 07:30:04 -06:00
* Selects a given format . By default , the best matching format is chosen .
2023-10-06 08:11:09 -06:00
*
* The format defines the possible values for properties like :
* - { @linkcode fps } : { @linkcode CameraDeviceFormat . minFps format . minFps } . . . { @linkcode CameraDeviceFormat . maxFps format . maxFps }
* - { @linkcode hdr } : { @linkcode CameraDeviceFormat . supportsVideoHDR format . supportsVideoHDR }
* - { @linkcode pixelFormat } : { @linkcode CameraDeviceFormat . pixelFormats format . pixelFormats }
* - { @linkcode enableDepthData } : { @linkcode CameraDeviceFormat . supportsDepthCapture format . supportsDepthCapture }
* - { @linkcode videoStabilizationMode } : { @linkcode CameraDeviceFormat . videoStabilizationModes format . videoStabilizationModes }
*
* In other words ; { @linkcode enableDepthData } can only be set to true if { @linkcode CameraDeviceFormat . supportsDepthCapture format . supportsDepthCapture } is true .
2021-03-23 10:15:09 -06:00
* /
2023-09-26 03:39:17 -06:00
format? : CameraDeviceFormat
2023-09-22 09:32:34 -06:00
/ * *
2023-09-23 02:14:27 -06:00
* Specifies the Preview ' s resize mode .
* * ` "cover" ` : Keep aspect ratio and fill entire parent view ( centered ) .
* * ` "contain" ` : Keep aspect ratio and make sure the entire content is visible inside the parent view , even if it introduces additional blank areas ( centered ) .
*
* @default "cover"
2023-09-22 09:32:34 -06:00
* /
2023-09-26 03:39:17 -06:00
resizeMode ? : 'cover' | 'contain'
2021-03-23 10:15:09 -06:00
/ * *
2023-10-06 08:11:09 -06:00
* Specify the frames per second this camera should stream frames at .
2021-03-23 10:15:09 -06:00
*
2023-10-06 08:11:09 -06:00
* Make sure the given { @linkcode format } can stream at the target { @linkcode fps } value ( see { @linkcode CameraDeviceFormat . minFps format . minFps } and { @linkcode CameraDeviceFormat . maxFps format . maxFps } ) .
2021-03-23 10:15:09 -06:00
* /
2023-09-26 03:39:17 -06:00
fps? : number
2021-03-23 10:15:09 -06:00
/ * *
2023-10-06 08:11:09 -06:00
* Enables or disables HDR streaming .
2021-03-23 10:15:09 -06:00
*
2023-10-06 08:11:09 -06:00
* Make sure the given { @linkcode format } supports HDR ( see { @linkcode CameraDeviceFormat . supportsVideoHDR format . supportsVideoHDR } ) .
2021-03-23 10:15:09 -06:00
* /
2023-09-26 03:39:17 -06:00
hdr? : boolean
2023-09-21 09:18:54 -06:00
/ * *
* Enables or disables lossless buffer compression for the video stream .
* If you only use { @linkcode video } or a { @linkcode frameProcessor } , this
* can increase the efficiency and lower memory usage of the Camera .
*
* If buffer compression is enabled , the video pipeline will try to use a
* lossless - compressed pixel format instead of the normal one .
*
* If you use a { @linkcode frameProcessor } , you might need to change how pixels
* are read inside your native frame processor function as this is different
* from the usual ` yuv ` or ` rgb ` layout .
*
* If buffer compression is not available but this property is enabled , the normal
* pixel formats will be used and no error will be thrown .
*
* @platform iOS
* @default
* - true // if video={true} and frameProcessor={undefined}
* - false // otherwise
* /
2023-09-26 03:39:17 -06:00
enableBufferCompression? : boolean
2021-03-23 10:15:09 -06:00
/ * *
2023-10-06 08:11:09 -06:00
* Enables or disables low - light boost on this camera device .
2021-03-23 10:15:09 -06:00
*
2023-10-06 08:11:09 -06:00
* Make sure the given { @linkcode device } supports low - light - boost ( see { @linkcode CameraDevice . supportsLowLightBoost device . supportsLowLightBoost } ) .
2021-03-23 10:15:09 -06:00
* /
2023-09-26 03:39:17 -06:00
lowLightBoost? : boolean
2021-03-23 10:15:09 -06:00
/ * *
2023-08-21 04:50:14 -06:00
* Specifies the video stabilization mode to use .
2021-03-23 10:15:09 -06:00
*
2023-10-06 08:11:09 -06:00
* Make sure the given { @linkcode format } supports the given { @linkcode videoStabilizationMode } ( see { @linkcode CameraDeviceFormat . videoStabilizationModes format . videoStabilizationModes } ) .
2021-06-03 07:42:02 -06:00
* /
2023-09-26 03:39:17 -06:00
videoStabilizationMode? : VideoStabilizationMode
2021-03-23 10:15:09 -06:00
//#endregion
/ * *
2023-10-06 08:11:09 -06:00
* Enables or disables depth data delivery for photo capture .
*
* Make sure the given { @linkcode format } supports depth data ( see { @linkcode CameraDeviceFormat . supportsDepthCapture format . supportsDepthCapture } ) .
2021-03-23 10:15:09 -06:00
*
* @default false
* /
2023-09-26 03:39:17 -06:00
enableDepthData? : boolean
2021-03-23 10:15:09 -06:00
/ * *
* A boolean specifying whether the photo render pipeline is prepared for portrait effects matte delivery .
*
* When enabling this , you must also set ` enableDepthData ` to ` true ` .
*
* @platform iOS 12.0 +
* @default false
* /
2023-09-26 03:39:17 -06:00
enablePortraitEffectsMatteDelivery? : boolean
2021-03-23 10:15:09 -06:00
/ * *
2021-06-10 05:49:34 -06:00
* Indicates whether the Camera should prepare the photo pipeline to provide maximum quality photos .
*
* This enables :
* * High Resolution Capture ( [ ` isHighResolutionCaptureEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/1648721-ishighresolutioncaptureenabled))
* * Virtual Device fusion for greater detail ( [ ` isVirtualDeviceConstituentPhotoDeliveryEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/3192189-isvirtualdeviceconstituentphotod))
* * Dual Device fusion for greater detail ( [ ` isDualCameraDualPhotoDeliveryEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotosettings/2873917-isdualcameradualphotodeliveryena))
* * Sets the maximum quality prioritization to ` .quality ` ( [ ` maxPhotoQualityPrioritization ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/3182995-maxphotoqualityprioritization))
2021-03-23 10:15:09 -06:00
*
* @default false
* /
2023-09-26 03:39:17 -06:00
enableHighQualityPhotos? : boolean
2023-02-21 07:00:48 -07:00
/ * *
* If ` true ` , show a debug view to display the FPS of the Camera session .
* This is useful for debugging your Frame Processor ' s speed .
*
* @default false
* /
2023-09-26 03:39:17 -06:00
enableFpsGraph? : boolean
2022-01-04 08:57:40 -07:00
/ * *
2022-02-02 08:58:16 -07:00
* Represents the orientation of all Camera Outputs ( Photo , Video , and Frame Processor ) . If this value is not set , the device orientation is used .
2022-01-04 08:57:40 -07:00
* /
2023-09-26 03:39:17 -06:00
orientation? : Orientation
2021-03-23 10:15:09 -06:00
//#region Events
/ * *
* Called when any kind of runtime error occured .
* /
2023-09-26 03:39:17 -06:00
onError ? : ( error : CameraRuntimeError ) = > void
2021-03-23 10:15:09 -06:00
/ * *
* Called when the camera was successfully initialized .
* /
2023-09-26 03:39:17 -06:00
onInitialized ? : ( ) = > void
2021-09-06 08:27:16 -06:00
/ * *
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
* A worklet which will be called for every frame the Camera "sees" .
2021-05-06 06:11:55 -06:00
*
2023-10-04 05:00:23 -06:00
* > See [ the Frame Processors documentation ] ( https : //react-native-vision-camera.com/docs/guides/frame-processors) for more information
2023-02-21 07:00:48 -07:00
*
2021-05-06 06:11:55 -06:00
* @example
* ` ` ` tsx
* const frameProcessor = useFrameProcessor ( ( frame ) = > {
* 'worklet'
2023-10-04 04:53:52 -06:00
* const faces = scanFaces ( frame )
* console . log ( ` Faces: ${ faces } ` )
2021-05-06 06:11:55 -06:00
* } , [ ] )
*
* return < Camera { ...cameraProps } frameProcessor = { frameProcessor } / >
* ` ` `
* /
2023-09-26 03:39:17 -06:00
frameProcessor? : FrameProcessor
2023-10-04 04:53:52 -06:00
/ * *
2023-10-04 05:00:23 -06:00
* A CodeScanner that can detect QR - Codes or Barcodes using platform - native APIs .
*
* > See [ the Code Scanner documentation ] ( https : //react-native-vision-camera.com/docs/guides/code-scanning) for more information
*
* @example
* ` ` ` tsx
* const codeScanner = useCodeScanner ( {
* codeTypes : [ 'qr' , 'ean-13' ] ,
* onCodeScanned : ( codes ) = > {
* console . log ( ` Scanned ${ codes . length } codes! ` )
* }
* } )
*
* return < Camera { ...props } codeScanner = { codeScanner } / >
2023-10-04 04:53:52 -06:00
* /
codeScanner? : CodeScanner
2021-03-23 10:15:09 -06:00
//#endregion
}