Compare commits

..

No commits in common. "0e05fc314fb759ec0944bf09c07aba9ad753fc2b" and "009838db7523d6a7ee48c8c194b3d4daac50350d" have entirely different histories.

7 changed files with 410 additions and 440 deletions

View File

@ -11,7 +11,6 @@ import com.mrousavy.camera.core.CodeScannerFrame
import com.mrousavy.camera.core.UnknownCameraError import com.mrousavy.camera.core.UnknownCameraError
import com.mrousavy.camera.core.code import com.mrousavy.camera.core.code
import com.mrousavy.camera.types.CodeType import com.mrousavy.camera.types.CodeType
import java.io.File
fun CameraView.invokeOnInitialized() { fun CameraView.invokeOnInitialized() {
Log.i(CameraView.TAG, "invokeOnInitialized()") Log.i(CameraView.TAG, "invokeOnInitialized()")
@ -34,15 +33,6 @@ fun CameraView.invokeOnStopped() {
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null) reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null)
} }
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
Log.e(CameraView.TAG, "invokeOnError(...):")
val event = Arguments.createMap()
event.putInt("index", index)
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
}
fun CameraView.invokeOnError(error: Throwable) { fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace() error.printStackTrace()

View File

@ -25,7 +25,6 @@ import com.mrousavy.camera.types.Torch
import com.mrousavy.camera.types.VideoStabilizationMode import com.mrousavy.camera.types.VideoStabilizationMode
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import java.io.File
// //
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity. // TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
@ -266,10 +265,6 @@ class CameraView(context: Context) :
invokeOnStopped() invokeOnStopped()
} }
override fun onVideoChunkReady(filepath: File, index: Int) {
invokeOnChunkReady(filepath, index)
}
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) { override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
invokeOnCodeScanned(codes, scannerFrame) invokeOnCodeScanned(codes, scannerFrame)
} }

View File

@ -29,7 +29,6 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraStopped", MapBuilder.of("registrationName", "onStopped")) .put("cameraStopped", MapBuilder.of("registrationName", "onStopped"))
.put("cameraError", MapBuilder.of("registrationName", "onError")) .put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned")) .put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.build() .build()
override fun getName(): String = TAG override fun getName(): String = TAG

View File

@ -54,7 +54,6 @@ import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.sync.Mutex import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock import kotlinx.coroutines.sync.withLock
import java.io.File
class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) : class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) :
CameraManager.AvailabilityCallback(), CameraManager.AvailabilityCallback(),
@ -641,8 +640,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
orientation, orientation,
options, options,
callback, callback,
onError, onError
this.callback,
) )
recording.start() recording.start()
this.recording = recording this.recording = recording
@ -726,7 +724,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized() fun onInitialized()
fun onStarted() fun onStarted()
fun onStopped() fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
} }
} }

View File

@ -13,13 +13,12 @@ import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File import java.io.File
import java.nio.ByteBuffer import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) : class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) :
MediaCodec.Callback() { MediaCodec.Callback() {
companion object { companion object {
private const val TAG = "ChunkedRecorder" private const val TAG = "ChunkedRecorder"
fun fromParams( fun fromParams(
callbacks: CameraSession.Callback,
size: Size, size: Size,
enableAudio: Boolean, enableAudio: Boolean,
fps: Int? = null, fps: Int? = null,
@ -58,7 +57,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
// Create a MediaCodec encoder, and configure it with our format. Get a Surface // Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work. // we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval, callbacks) return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval)
} }
} }
@ -80,7 +79,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
} }
// Muxer specific // Muxer specific
private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat) { private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) {
val videoTrack: Int = muxer.addTrack(encodedFormat) val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs val startTimeUs: Long = startTimeUs
@ -98,10 +97,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private var muxerContext: MuxerContext? = null private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) { private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.let { muxerContext?.finish()
it.finish()
this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex)
}
chunkIndex++ chunkIndex++
val newFileName = "$chunkIndex.mp4" val newFileName = "$chunkIndex.mp4"
@ -113,7 +109,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
) )
muxer.setOrientationHint(orientationHint) muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext( muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!! muxer, bufferInfo.presentationTimeUs, this.encodedFormat!!
) )
} }

View File

@ -24,8 +24,7 @@ class RecordingSession(
private val cameraOrientation: Orientation, private val cameraOrientation: Orientation,
private val options: RecordVideoOptions, private val options: RecordVideoOptions,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit, private val onError: (error: CameraError) -> Unit
private val allCallbacks: CameraSession.Callback,
) { ) {
companion object { companion object {
private const val TAG = "RecordingSession" private const val TAG = "RecordingSession"
@ -46,7 +45,6 @@ class RecordingSession(
private val bitRate = getBitRate() private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams( private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks,
size, size,
enableAudio, enableAudio,
fps, fps,

View File

@ -18,37 +18,32 @@ export type CameraPermissionStatus = 'granted' | 'not-determined' | 'denied' | '
export type CameraPermissionRequestResult = 'granted' | 'denied' export type CameraPermissionRequestResult = 'granted' | 'denied'
interface OnCodeScannedEvent { interface OnCodeScannedEvent {
codes: Code[] codes: Code[]
frame: CodeScannerFrame frame: CodeScannerFrame
} }
interface OnErrorEvent { interface OnErrorEvent {
code: string code: string
message: string message: string
cause?: ErrorWithCause cause?: ErrorWithCause
}
interface OnVideoChunkReadyEvent {
filepath: string
index: int
} }
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor' | 'codeScanner'> & { type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor' | 'codeScanner'> & {
cameraId: string cameraId: string
enableFrameProcessor: boolean enableFrameProcessor: boolean
codeScannerOptions?: Omit<CodeScanner, 'onCodeScanned'> codeScannerOptions?: Omit<CodeScanner, 'onCodeScanned'>
onInitialized?: (event: NativeSyntheticEvent<void>) => void onInitialized?: (event: NativeSyntheticEvent<void>) => void
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void
onCodeScanned?: (event: NativeSyntheticEvent<OnCodeScannedEvent>) => void onCodeScanned?: (event: NativeSyntheticEvent<OnCodeScannedEvent>) => void
onStarted?: (event: NativeSyntheticEvent<void>) => void onStarted?: (event: NativeSyntheticEvent<void>) => void
onStopped?: (event: NativeSyntheticEvent<void>) => void onStopped?: (event: NativeSyntheticEvent<void>) => void
onVideoChunkReady?: (event: NativeSyntheticEvent<OnVideoChunkReadyEvent>) => void onViewReady: () => void
onViewReady: () => void
} }
type NativeRecordVideoOptions = Omit<RecordVideoOptions, 'onRecordingError' | 'onRecordingFinished' | 'videoBitRate'> & { type NativeRecordVideoOptions = Omit<RecordVideoOptions, 'onRecordingError' | 'onRecordingFinished' | 'videoBitRate'> & {
videoBitRateOverride?: number videoBitRateOverride?: number
videoBitRateMultiplier?: number videoBitRateMultiplier?: number
} }
type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods> type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>
interface CameraState { interface CameraState {
isRecordingWithFlash: boolean isRecordingWithFlash: boolean
} }
//#endregion //#endregion
@ -82,427 +77,427 @@ interface CameraState {
* @component * @component
*/ */
export class Camera extends React.PureComponent<CameraProps, CameraState> { export class Camera extends React.PureComponent<CameraProps, CameraState> {
/** @internal */ /** @internal */
static displayName = 'Camera' static displayName = 'Camera'
/** @internal */ /** @internal */
displayName = Camera.displayName displayName = Camera.displayName
private lastFrameProcessor: FrameProcessor | undefined private lastFrameProcessor: FrameProcessor | undefined
private isNativeViewMounted = false private isNativeViewMounted = false
private readonly ref: React.RefObject<RefType> private readonly ref: React.RefObject<RefType>
/** @internal */ /** @internal */
constructor(props: CameraProps) { constructor(props: CameraProps) {
super(props) super(props)
this.onViewReady = this.onViewReady.bind(this) this.onViewReady = this.onViewReady.bind(this)
this.onInitialized = this.onInitialized.bind(this) this.onInitialized = this.onInitialized.bind(this)
this.onStarted = this.onStarted.bind(this) this.onStarted = this.onStarted.bind(this)
this.onStopped = this.onStopped.bind(this) this.onStopped = this.onStopped.bind(this)
this.onError = this.onError.bind(this) this.onError = this.onError.bind(this)
this.onCodeScanned = this.onCodeScanned.bind(this) this.onCodeScanned = this.onCodeScanned.bind(this)
this.ref = React.createRef<RefType>() this.ref = React.createRef<RefType>()
this.lastFrameProcessor = undefined this.lastFrameProcessor = undefined
this.state = { this.state = {
isRecordingWithFlash: false, isRecordingWithFlash: false,
} }
} }
private get handle(): number { private get handle(): number {
const nodeHandle = findNodeHandle(this.ref.current) const nodeHandle = findNodeHandle(this.ref.current)
if (nodeHandle == null || nodeHandle === -1) { if (nodeHandle == null || nodeHandle === -1) {
throw new CameraRuntimeError( throw new CameraRuntimeError(
'system/view-not-found', 'system/view-not-found',
"Could not get the Camera's native view tag! Does the Camera View exist in the native view-tree?", "Could not get the Camera's native view tag! Does the Camera View exist in the native view-tree?",
) )
} }
return nodeHandle return nodeHandle
} }
//#region View-specific functions (UIViewManager) //#region View-specific functions (UIViewManager)
/** /**
* Take a single photo and write it's content to a temporary file. * Take a single photo and write it's content to a temporary file.
* *
* @throws {@linkcode CameraCaptureError} When any kind of error occured while capturing the photo. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error * @throws {@linkcode CameraCaptureError} When any kind of error occured while capturing the photo. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* @example * @example
* ```ts * ```ts
* const photo = await camera.current.takePhoto({ * const photo = await camera.current.takePhoto({
* qualityPrioritization: 'quality', * qualityPrioritization: 'quality',
* flash: 'on', * flash: 'on',
* enableAutoRedEyeReduction: true * enableAutoRedEyeReduction: true
* }) * })
* ``` * ```
*/ */
public async takePhoto(options?: TakePhotoOptions): Promise<PhotoFile> { public async takePhoto(options?: TakePhotoOptions): Promise<PhotoFile> {
try { try {
return await CameraModule.takePhoto(this.handle, options ?? {}) return await CameraModule.takePhoto(this.handle, options ?? {})
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
private getBitRateMultiplier(bitRate: RecordVideoOptions['videoBitRate']): number { private getBitRateMultiplier(bitRate: RecordVideoOptions['videoBitRate']): number {
if (typeof bitRate === 'number' || bitRate == null) return 1 if (typeof bitRate === 'number' || bitRate == null) return 1
switch (bitRate) { switch (bitRate) {
case 'extra-low': case 'extra-low':
return 0.6 return 0.6
case 'low': case 'low':
return 0.8 return 0.8
case 'normal': case 'normal':
return 1 return 1
case 'high': case 'high':
return 1.2 return 1.2
case 'extra-high': case 'extra-high':
return 1.4 return 1.4
} }
} }
/** /**
* Start a new video recording. * Start a new video recording.
* *
* @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error * @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* *
* @example * @example
* ```ts * ```ts
* camera.current.startRecording({ * camera.current.startRecording({
* onRecordingFinished: (video) => console.log(video), * onRecordingFinished: (video) => console.log(video),
* onRecordingError: (error) => console.error(error), * onRecordingError: (error) => console.error(error),
* }) * })
* setTimeout(() => { * setTimeout(() => {
* camera.current.stopRecording() * camera.current.stopRecording()
* }, 5000) * }, 5000)
* ``` * ```
*/ */
public startRecording(options: RecordVideoOptions): void { public startRecording(options: RecordVideoOptions): void {
const { onRecordingError, onRecordingFinished, videoBitRate, ...passThruOptions } = options const { onRecordingError, onRecordingFinished, videoBitRate, ...passThruOptions } = options
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function') if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!') throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!')
if (options.flash === 'on') { if (options.flash === 'on') {
// Enable torch for video recording // Enable torch for video recording
this.setState({ this.setState({
isRecordingWithFlash: true, isRecordingWithFlash: true,
}) })
} }
const nativeOptions: NativeRecordVideoOptions = passThruOptions const nativeOptions: NativeRecordVideoOptions = passThruOptions
if (typeof videoBitRate === 'number') { if (typeof videoBitRate === 'number') {
// If the user passed an absolute number as a bit-rate, we just use this as a full override. // If the user passed an absolute number as a bit-rate, we just use this as a full override.
nativeOptions.videoBitRateOverride = videoBitRate nativeOptions.videoBitRateOverride = videoBitRate
} else if (typeof videoBitRate === 'string' && videoBitRate !== 'normal') { } else if (typeof videoBitRate === 'string' && videoBitRate !== 'normal') {
// If the user passed 'low'/'normal'/'high', we need to apply this as a multiplier to the native bitrate instead of absolutely setting it // If the user passed 'low'/'normal'/'high', we need to apply this as a multiplier to the native bitrate instead of absolutely setting it
nativeOptions.videoBitRateMultiplier = this.getBitRateMultiplier(videoBitRate) nativeOptions.videoBitRateMultiplier = this.getBitRateMultiplier(videoBitRate)
} }
const onRecordCallback = (video?: VideoFile, error?: CameraCaptureError): void => { const onRecordCallback = (video?: VideoFile, error?: CameraCaptureError): void => {
if (this.state.isRecordingWithFlash) { if (this.state.isRecordingWithFlash) {
// disable torch again if it was enabled // disable torch again if it was enabled
this.setState({ this.setState({
isRecordingWithFlash: false, isRecordingWithFlash: false,
}) })
} }
if (error != null) return onRecordingError(error) if (error != null) return onRecordingError(error)
if (video != null) return onRecordingFinished(video) if (video != null) return onRecordingFinished(video)
} }
try { try {
// TODO: Use TurboModules to make this awaitable. // TODO: Use TurboModules to make this awaitable.
CameraModule.startRecording(this.handle, nativeOptions, onRecordCallback) CameraModule.startRecording(this.handle, nativeOptions, onRecordCallback)
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
/** /**
* Pauses the current video recording. * Pauses the current video recording.
* *
* @throws {@linkcode CameraCaptureError} When any kind of error occured while pausing the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error * @throws {@linkcode CameraCaptureError} When any kind of error occured while pausing the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* *
* @example * @example
* ```ts * ```ts
* // Start * // Start
* await camera.current.startRecording() * await camera.current.startRecording()
* await timeout(1000) * await timeout(1000)
* // Pause * // Pause
* await camera.current.pauseRecording() * await camera.current.pauseRecording()
* await timeout(500) * await timeout(500)
* // Resume * // Resume
* await camera.current.resumeRecording() * await camera.current.resumeRecording()
* await timeout(2000) * await timeout(2000)
* // Stop * // Stop
* const video = await camera.current.stopRecording() * const video = await camera.current.stopRecording()
* ``` * ```
*/ */
public async pauseRecording(): Promise<void> { public async pauseRecording(): Promise<void> {
try { try {
return await CameraModule.pauseRecording(this.handle) return await CameraModule.pauseRecording(this.handle)
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
/** /**
* Resumes a currently paused video recording. * Resumes a currently paused video recording.
* *
* @throws {@linkcode CameraCaptureError} When any kind of error occured while resuming the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error * @throws {@linkcode CameraCaptureError} When any kind of error occured while resuming the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* *
* @example * @example
* ```ts * ```ts
* // Start * // Start
* await camera.current.startRecording() * await camera.current.startRecording()
* await timeout(1000) * await timeout(1000)
* // Pause * // Pause
* await camera.current.pauseRecording() * await camera.current.pauseRecording()
* await timeout(500) * await timeout(500)
* // Resume * // Resume
* await camera.current.resumeRecording() * await camera.current.resumeRecording()
* await timeout(2000) * await timeout(2000)
* // Stop * // Stop
* const video = await camera.current.stopRecording() * const video = await camera.current.stopRecording()
* ``` * ```
*/ */
public async resumeRecording(): Promise<void> { public async resumeRecording(): Promise<void> {
try { try {
return await CameraModule.resumeRecording(this.handle) return await CameraModule.resumeRecording(this.handle)
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
/** /**
* Stop the current video recording. * Stop the current video recording.
* *
* @throws {@linkcode CameraCaptureError} When any kind of error occured while stopping the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error * @throws {@linkcode CameraCaptureError} When any kind of error occured while stopping the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* *
* @example * @example
* ```ts * ```ts
* await camera.current.startRecording() * await camera.current.startRecording()
* setTimeout(async () => { * setTimeout(async () => {
* const video = await camera.current.stopRecording() * const video = await camera.current.stopRecording()
* }, 5000) * }, 5000)
* ``` * ```
*/ */
public async stopRecording(): Promise<void> { public async stopRecording(): Promise<void> {
try { try {
return await CameraModule.stopRecording(this.handle) return await CameraModule.stopRecording(this.handle)
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
/** /**
* Focus the camera to a specific point in the coordinate system. * Focus the camera to a specific point in the coordinate system.
* @param {Point} point The point to focus to. This should be relative * @param {Point} point The point to focus to. This should be relative
* to the Camera view's coordinate system and is expressed in points. * to the Camera view's coordinate system and is expressed in points.
* * `(0, 0)` means **top left**. * * `(0, 0)` means **top left**.
* * `(CameraView.width, CameraView.height)` means **bottom right**. * * `(CameraView.width, CameraView.height)` means **bottom right**.
* *
* Make sure the value doesn't exceed the CameraView's dimensions. * Make sure the value doesn't exceed the CameraView's dimensions.
* *
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while focussing. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error * @throws {@linkcode CameraRuntimeError} When any kind of error occured while focussing. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
* @example * @example
* ```ts * ```ts
* await camera.current.focus({ * await camera.current.focus({
* x: tapEvent.x, * x: tapEvent.x,
* y: tapEvent.y * y: tapEvent.y
* }) * })
* ``` * ```
*/ */
public async focus(point: Point): Promise<void> { public async focus(point: Point): Promise<void> {
try { try {
return await CameraModule.focus(this.handle, point) return await CameraModule.focus(this.handle, point)
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
//#endregion //#endregion
//#region Static Functions (NativeModule) //#region Static Functions (NativeModule)
/** /**
* Get a list of all available camera devices on the current phone. * Get a list of all available camera devices on the current phone.
* *
* If you use Hooks, use the `useCameraDevices(..)` hook instead. * If you use Hooks, use the `useCameraDevices(..)` hook instead.
* *
* * For Camera Devices attached to the phone, it is safe to assume that this will never change. * * For Camera Devices attached to the phone, it is safe to assume that this will never change.
* * For external Camera Devices (USB cameras, Mac continuity cameras, etc.) the available Camera Devices could change over time when the external Camera device gets plugged in or plugged out, so use {@link addCameraDevicesChangedListener | addCameraDevicesChangedListener(...)} to listen for such changes. * * For external Camera Devices (USB cameras, Mac continuity cameras, etc.) the available Camera Devices could change over time when the external Camera device gets plugged in or plugged out, so use {@link addCameraDevicesChangedListener | addCameraDevicesChangedListener(...)} to listen for such changes.
* *
* @example * @example
* ```ts * ```ts
* const devices = Camera.getAvailableCameraDevices() * const devices = Camera.getAvailableCameraDevices()
* const backCameras = devices.filter((d) => d.position === "back") * const backCameras = devices.filter((d) => d.position === "back")
* const frontCameras = devices.filter((d) => d.position === "front") * const frontCameras = devices.filter((d) => d.position === "front")
* ``` * ```
*/ */
public static getAvailableCameraDevices(): CameraDevice[] { public static getAvailableCameraDevices(): CameraDevice[] {
return CameraDevices.getAvailableCameraDevices() return CameraDevices.getAvailableCameraDevices()
} }
/** /**
* Adds a listener that gets called everytime the Camera Devices change, for example * Adds a listener that gets called everytime the Camera Devices change, for example
* when an external Camera Device (USB or continuity Camera) gets plugged in or plugged out. * when an external Camera Device (USB or continuity Camera) gets plugged in or plugged out.
* *
* If you use Hooks, use the `useCameraDevices()` hook instead. * If you use Hooks, use the `useCameraDevices()` hook instead.
*/ */
public static addCameraDevicesChangedListener(listener: (newDevices: CameraDevice[]) => void): EmitterSubscription { public static addCameraDevicesChangedListener(listener: (newDevices: CameraDevice[]) => void): EmitterSubscription {
return CameraDevices.addCameraDevicesChangedListener(listener) return CameraDevices.addCameraDevicesChangedListener(listener)
} }
/** /**
* Gets the current Camera Permission Status. Check this before mounting the Camera to ensure * Gets the current Camera Permission Status. Check this before mounting the Camera to ensure
* the user has permitted the app to use the camera. * the user has permitted the app to use the camera.
* *
* To actually prompt the user for camera permission, use {@linkcode Camera.requestCameraPermission | requestCameraPermission()}. * To actually prompt the user for camera permission, use {@linkcode Camera.requestCameraPermission | requestCameraPermission()}.
*/ */
public static getCameraPermissionStatus(): CameraPermissionStatus { public static getCameraPermissionStatus(): CameraPermissionStatus {
return CameraModule.getCameraPermissionStatus() return CameraModule.getCameraPermissionStatus()
} }
/** /**
* Gets the current Microphone-Recording Permission Status. Check this before mounting the Camera to ensure * Gets the current Microphone-Recording Permission Status. Check this before mounting the Camera to ensure
* the user has permitted the app to use the microphone. * the user has permitted the app to use the microphone.
* *
* To actually prompt the user for microphone permission, use {@linkcode Camera.requestMicrophonePermission | requestMicrophonePermission()}. * To actually prompt the user for microphone permission, use {@linkcode Camera.requestMicrophonePermission | requestMicrophonePermission()}.
*/ */
public static getMicrophonePermissionStatus(): CameraPermissionStatus { public static getMicrophonePermissionStatus(): CameraPermissionStatus {
return CameraModule.getMicrophonePermissionStatus() return CameraModule.getMicrophonePermissionStatus()
} }
/** /**
* Shows a "request permission" alert to the user, and resolves with the new camera permission status. * Shows a "request permission" alert to the user, and resolves with the new camera permission status.
* *
* If the user has previously blocked the app from using the camera, the alert will not be shown * If the user has previously blocked the app from using the camera, the alert will not be shown
* and `"denied"` will be returned. * and `"denied"` will be returned.
* *
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error * @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
*/ */
public static async requestCameraPermission(): Promise<CameraPermissionRequestResult> { public static async requestCameraPermission(): Promise<CameraPermissionRequestResult> {
try { try {
return await CameraModule.requestCameraPermission() return await CameraModule.requestCameraPermission()
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
/** /**
* Shows a "request permission" alert to the user, and resolves with the new microphone permission status. * Shows a "request permission" alert to the user, and resolves with the new microphone permission status.
* *
* If the user has previously blocked the app from using the microphone, the alert will not be shown * If the user has previously blocked the app from using the microphone, the alert will not be shown
* and `"denied"` will be returned. * and `"denied"` will be returned.
* *
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error * @throws {@linkcode CameraRuntimeError} When any kind of error occured while requesting permission. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error
*/ */
public static async requestMicrophonePermission(): Promise<CameraPermissionRequestResult> { public static async requestMicrophonePermission(): Promise<CameraPermissionRequestResult> {
try { try {
return await CameraModule.requestMicrophonePermission() return await CameraModule.requestMicrophonePermission()
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
//#endregion //#endregion
//#region Events (Wrapped to maintain reference equality) //#region Events (Wrapped to maintain reference equality)
private onError(event: NativeSyntheticEvent<OnErrorEvent>): void { private onError(event: NativeSyntheticEvent<OnErrorEvent>): void {
const error = event.nativeEvent const error = event.nativeEvent
const cause = isErrorWithCause(error.cause) ? error.cause : undefined const cause = isErrorWithCause(error.cause) ? error.cause : undefined
// @ts-expect-error We're casting from unknown bridge types to TS unions, I expect it to hopefully work // @ts-expect-error We're casting from unknown bridge types to TS unions, I expect it to hopefully work
const cameraError = new CameraRuntimeError(error.code, error.message, cause) const cameraError = new CameraRuntimeError(error.code, error.message, cause)
if (this.props.onError != null) { if (this.props.onError != null) {
this.props.onError(cameraError) this.props.onError(cameraError)
} else { } else {
// User didn't pass an `onError` handler, so just log it to console // User didn't pass an `onError` handler, so just log it to console
console.error(`Camera.onError(${cameraError.code}): ${cameraError.message}`, cameraError) console.error(`Camera.onError(${cameraError.code}): ${cameraError.message}`, cameraError)
} }
} }
private onInitialized(): void { private onInitialized(): void {
this.props.onInitialized?.() this.props.onInitialized?.()
} }
private onStarted(): void { private onStarted(): void {
this.props.onStarted?.() this.props.onStarted?.()
} }
private onStopped(): void { private onStopped(): void {
this.props.onStopped?.() this.props.onStopped?.()
} }
//#endregion //#endregion
private onCodeScanned(event: NativeSyntheticEvent<OnCodeScannedEvent>): void { private onCodeScanned(event: NativeSyntheticEvent<OnCodeScannedEvent>): void {
const codeScanner = this.props.codeScanner const codeScanner = this.props.codeScanner
if (codeScanner == null) return if (codeScanner == null) return
codeScanner.onCodeScanned(event.nativeEvent.codes, event.nativeEvent.frame) codeScanner.onCodeScanned(event.nativeEvent.codes, event.nativeEvent.frame)
} }
//#region Lifecycle //#region Lifecycle
private setFrameProcessor(frameProcessor: FrameProcessor): void { private setFrameProcessor(frameProcessor: FrameProcessor): void {
VisionCameraProxy.setFrameProcessor(this.handle, frameProcessor) VisionCameraProxy.setFrameProcessor(this.handle, frameProcessor)
} }
private unsetFrameProcessor(): void { private unsetFrameProcessor(): void {
VisionCameraProxy.removeFrameProcessor(this.handle) VisionCameraProxy.removeFrameProcessor(this.handle)
} }
private onViewReady(): void { private onViewReady(): void {
this.isNativeViewMounted = true this.isNativeViewMounted = true
if (this.props.frameProcessor != null) { if (this.props.frameProcessor != null) {
// user passed a `frameProcessor` but we didn't set it yet because the native view was not mounted yet. set it now. // user passed a `frameProcessor` but we didn't set it yet because the native view was not mounted yet. set it now.
this.setFrameProcessor(this.props.frameProcessor) this.setFrameProcessor(this.props.frameProcessor)
this.lastFrameProcessor = this.props.frameProcessor this.lastFrameProcessor = this.props.frameProcessor
} }
} }
/** @internal */ /** @internal */
componentDidUpdate(): void { componentDidUpdate(): void {
if (!this.isNativeViewMounted) return if (!this.isNativeViewMounted) return
const frameProcessor = this.props.frameProcessor const frameProcessor = this.props.frameProcessor
if (frameProcessor !== this.lastFrameProcessor) { if (frameProcessor !== this.lastFrameProcessor) {
// frameProcessor argument identity changed. Update native to reflect the change. // frameProcessor argument identity changed. Update native to reflect the change.
if (frameProcessor != null) this.setFrameProcessor(frameProcessor) if (frameProcessor != null) this.setFrameProcessor(frameProcessor)
else this.unsetFrameProcessor() else this.unsetFrameProcessor()
this.lastFrameProcessor = frameProcessor this.lastFrameProcessor = frameProcessor
} }
} }
//#endregion //#endregion
/** @internal */ /** @internal */
public render(): React.ReactNode { public render(): React.ReactNode {
// We remove the big `device` object from the props because we only need to pass `cameraId` to native. // We remove the big `device` object from the props because we only need to pass `cameraId` to native.
const { device, frameProcessor, codeScanner, ...props } = this.props const { device, frameProcessor, codeScanner, ...props } = this.props
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (device == null) { if (device == null) {
throw new Error( throw new Error(
'Camera: `device` is null! Select a valid Camera device. See: https://mrousavy.com/react-native-vision-camera/docs/guides/devices', 'Camera: `device` is null! Select a valid Camera device. See: https://mrousavy.com/react-native-vision-camera/docs/guides/devices',
) )
} }
const shouldEnableBufferCompression = props.video === true && frameProcessor == null const shouldEnableBufferCompression = props.video === true && frameProcessor == null
const pixelFormat = props.pixelFormat ?? (frameProcessor != null ? 'yuv' : 'native') const pixelFormat = props.pixelFormat ?? (frameProcessor != null ? 'yuv' : 'native')
const torch = this.state.isRecordingWithFlash ? 'on' : props.torch const torch = this.state.isRecordingWithFlash ? 'on' : props.torch
return ( return (
<NativeCameraView <NativeCameraView
{...props} {...props}
cameraId={device.id} cameraId={device.id}
ref={this.ref} ref={this.ref}
torch={torch} torch={torch}
onViewReady={this.onViewReady} onViewReady={this.onViewReady}
onInitialized={this.onInitialized} onInitialized={this.onInitialized}
onCodeScanned={this.onCodeScanned} onCodeScanned={this.onCodeScanned}
onStarted={this.onStarted} onStarted={this.onStarted}
onStopped={this.onStopped} onStopped={this.onStopped}
onError={this.onError} onError={this.onError}
codeScannerOptions={codeScanner} codeScannerOptions={codeScanner}
enableFrameProcessor={frameProcessor != null} enableFrameProcessor={frameProcessor != null}
enableBufferCompression={props.enableBufferCompression ?? shouldEnableBufferCompression} enableBufferCompression={props.enableBufferCompression ?? shouldEnableBufferCompression}
pixelFormat={pixelFormat} pixelFormat={pixelFormat}
/> />
) )
} }
} }
//#endregion //#endregion
// requireNativeComponent automatically resolves 'CameraView' to 'CameraViewManager' // requireNativeComponent automatically resolves 'CameraView' to 'CameraViewManager'
const NativeCameraView = requireNativeComponent<NativeCameraViewProps>( const NativeCameraView = requireNativeComponent<NativeCameraViewProps>(
'CameraView', 'CameraView',
// @ts-expect-error because the type declarations are kinda wrong, no? // @ts-expect-error because the type declarations are kinda wrong, no?
Camera, Camera,
) )