2021-03-23 10:15:09 -06:00
import type { ViewProps } from 'react-native' ;
2021-06-03 07:42:02 -06:00
import type { CameraDevice , CameraDeviceFormat , ColorSpace , VideoStabilizationMode } from './CameraDevice' ;
2021-03-23 10:15:09 -06:00
import type { CameraRuntimeError } from './CameraError' ;
import type { CameraPreset } from './CameraPreset' ;
2021-05-06 06:11:55 -06:00
import type { Frame } from './Frame' ;
2023-02-21 07:00:48 -07:00
import type { Orientation } from './Orientation' ;
2021-03-23 10:15:09 -06:00
export interface CameraProps extends ViewProps {
/ * *
* The Camera Device to use .
*
2021-06-21 14:42:46 -06:00
* See the [ Camera Devices ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/devices) section in the documentation for more information about Camera Devices.
2021-03-23 10:15:09 -06:00
*
* @example
* ` ` ` tsx
* const devices = useCameraDevices ( 'wide-angle-camera' )
* const device = devices . back
*
* return (
* < Camera
* device = { device }
* isActive = { true }
* style = { StyleSheet . absoluteFill }
* / >
* )
* ` ` `
* /
device : CameraDevice ;
/ * *
2021-06-21 14:42:46 -06:00
* Whether the Camera should actively stream video frames , or not . See the [ documentation about the ` isActive ` prop ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/lifecycle#the-isactive-prop) for more information.
2021-03-23 10:15:09 -06:00
*
* This can be compared to a Video component , where ` isActive ` specifies whether the video is paused or not .
*
* > Note : If you fully unmount the ` <Camera> ` component instead of using ` isActive={false} ` , the Camera will take a bit longer to start again . In return , it will use less resources since the Camera will be completely destroyed when unmounted .
* /
isActive : boolean ;
2021-06-07 05:08:40 -06:00
//#region Use-cases
/ * *
2021-06-27 04:37:54 -06:00
* Enables * * photo capture * * with the ` takePhoto ` function ( see [ "Taking Photos" ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/capturing#taking-photos))
2021-06-07 05:08:40 -06:00
* /
photo? : boolean ;
/ * *
2021-06-27 04:37:54 -06:00
* Enables * * video capture * * with the ` startRecording ` function ( see [ "Recording Videos" ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/capturing/#recording-videos))
*
2021-07-06 08:59:09 -06:00
* Note : If you want to use ` video ` and ` frameProcessor ` simultaneously , make sure [ ` supportsParallelVideoProcessing ` ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/devices#the-supportsparallelvideoprocessing-prop) is `true`.
2021-06-07 05:08:40 -06:00
* /
video? : boolean ;
/ * *
2021-06-27 04:37:54 -06:00
* Enables * * audio capture * * for video recordings ( see [ "Recording Videos" ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/capturing/#recording-videos))
2021-06-07 05:08:40 -06:00
* /
audio? : boolean ;
//#endregion
2021-03-23 10:15:09 -06:00
//#region Common Props (torch, zoom)
/ * *
* Set the current torch mode .
*
* Note : The torch is only available on ` "back" ` cameras , and isn ' t supported by every phone .
*
* @default "off"
* /
torch ? : 'off' | 'on' ;
/ * *
2021-07-29 03:44:22 -06:00
* Specifies the zoom factor of the current camera , in "factor" / scale .
*
* This value ranges from ` minZoom ` ( e . g . ` 1 ` ) to ` maxZoom ` ( e . g . ` 128 ` ) . It is recommended to set this value
* to the CameraDevice ' s ` neutralZoom ` per default and let the user zoom out to the fish - eye ( ultra - wide ) camera
* on demand ( if available )
2021-03-23 10:15:09 -06:00
*
* * * Note : * * Linearly increasing this value always appears logarithmic to the user .
*
2021-07-29 03:44:22 -06:00
* @default 1.0
2021-03-23 10:15:09 -06:00
* /
zoom? : number ;
/ * *
* Enables or disables the native pinch to zoom gesture .
*
2021-06-21 14:42:46 -06:00
* If you want to implement a custom zoom gesture , see [ the Zooming with Reanimated documentation ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/animated).
2021-03-23 10:15:09 -06:00
*
* @default false
* /
enableZoomGesture? : boolean ;
//#endregion
//#region Format/Preset selection
/ * *
* Automatically selects a camera format which best matches the given preset . Must be ` undefined ` when ` format ` is set !
* /
preset? : CameraPreset ;
/ * *
* Selects a given format . Must be ` undefined ` when ` preset ` is set !
* /
format? : CameraDeviceFormat ;
/ * *
* Specify the frames per second this camera should use . Make sure the given ` format ` includes a frame rate range with the given ` fps ` .
*
* Requires ` format ` to be set .
* /
fps? : number ;
/ * *
* Enables or disables HDR on this camera device . Make sure the given ` format ` supports HDR mode .
*
* Requires ` format ` to be set .
* /
hdr? : boolean ;
/ * *
* Enables or disables low - light boost on this camera device . Make sure the given ` format ` supports low - light boost .
*
* Requires ` format ` to be set .
* /
lowLightBoost? : boolean ;
/ * *
* Specifies the color space to use for this camera device . Make sure the given ` format ` contains the given ` colorSpace ` .
*
* Requires ` format ` to be set .
* /
colorSpace? : ColorSpace ;
2021-06-03 07:42:02 -06:00
/ * *
* Specifies the video stabilization mode to use for this camera device . Make sure the given ` format ` contains the given ` videoStabilizationMode ` .
*
* Requires ` format ` to be set .
* @platform iOS
* /
videoStabilizationMode? : VideoStabilizationMode ;
2021-03-23 10:15:09 -06:00
//#endregion
/ * *
* Also captures data from depth - perception sensors . ( e . g . disparity maps )
*
* @default false
* /
enableDepthData? : boolean ;
/ * *
* A boolean specifying whether the photo render pipeline is prepared for portrait effects matte delivery .
*
* When enabling this , you must also set ` enableDepthData ` to ` true ` .
*
* @platform iOS 12.0 +
* @default false
* /
enablePortraitEffectsMatteDelivery? : boolean ;
/ * *
2021-06-10 05:49:34 -06:00
* Indicates whether the Camera should prepare the photo pipeline to provide maximum quality photos .
*
* This enables :
* * High Resolution Capture ( [ ` isHighResolutionCaptureEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/1648721-ishighresolutioncaptureenabled))
* * Virtual Device fusion for greater detail ( [ ` isVirtualDeviceConstituentPhotoDeliveryEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/3192189-isvirtualdeviceconstituentphotod))
* * Dual Device fusion for greater detail ( [ ` isDualCameraDualPhotoDeliveryEnabled ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotosettings/2873917-isdualcameradualphotodeliveryena))
* * Sets the maximum quality prioritization to ` .quality ` ( [ ` maxPhotoQualityPrioritization ` ] ( https : //developer.apple.com/documentation/avfoundation/avcapturephotooutput/3182995-maxphotoqualityprioritization))
2021-03-23 10:15:09 -06:00
*
* @default false
* /
2021-06-10 05:49:34 -06:00
enableHighQualityPhotos? : boolean ;
2023-02-21 07:00:48 -07:00
/ * *
* If ` true ` , show a debug view to display the FPS of the Camera session .
* This is useful for debugging your Frame Processor ' s speed .
*
* @default false
* /
enableFpsGraph? : boolean ;
2022-01-04 08:57:40 -07:00
/ * *
2022-02-02 08:58:16 -07:00
* Represents the orientation of all Camera Outputs ( Photo , Video , and Frame Processor ) . If this value is not set , the device orientation is used .
2022-01-04 08:57:40 -07:00
* /
2023-02-21 07:00:48 -07:00
orientation? : Orientation ;
/ * *
* Render type of the Camera Preview Layer .
*
* * ` native ` : Uses the default platform native preview Layer . Uses less resources and is more efficient .
* * ` skia ` : Uses a Skia Canvas for rendering Camera frames to the screen . This allows you to draw to the screen using the react - native - skia API inside a Frame Processor .
*
* @default 'native'
* /
previewType ? : 'native' | 'skia' ;
2021-03-23 10:15:09 -06:00
//#region Events
/ * *
* Called when any kind of runtime error occured .
* /
onError ? : ( error : CameraRuntimeError ) = > void ;
/ * *
* Called when the camera was successfully initialized .
* /
onInitialized ? : ( ) = > void ;
2021-09-06 08:27:16 -06:00
/ * *
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
2023-02-15 08:47:09 -07:00
* A worklet which will be called for every frame the Camera "sees" .
2021-05-06 06:11:55 -06:00
*
2023-02-21 07:00:48 -07:00
* If { @linkcode CameraProps . previewType | previewType } is set to ` "skia" ` , you can draw content to the ` Frame ` using the react - native - skia API .
2021-05-06 06:11:55 -06:00
*
2021-07-06 08:59:09 -06:00
* Note : If you want to use ` video ` and ` frameProcessor ` simultaneously , make sure [ ` supportsParallelVideoProcessing ` ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/devices#the-supportsparallelvideoprocessing-prop) is `true`.
2021-06-27 04:37:54 -06:00
*
2023-02-21 07:00:48 -07:00
* > See [ the Frame Processors documentation ] ( https : //mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) for more information
*
2021-05-06 06:11:55 -06:00
* @example
* ` ` ` tsx
* const frameProcessor = useFrameProcessor ( ( frame ) = > {
* 'worklet'
* const qrCodes = scanQRCodes ( frame )
* console . log ( ` Detected QR Codes: ${ qrCodes } ` )
* } , [ ] )
*
* return < Camera { ...cameraProps } frameProcessor = { frameProcessor } / >
* ` ` `
* /
frameProcessor ? : ( frame : Frame ) = > void ;
2021-03-23 10:15:09 -06:00
//#endregion
}