feat: Separate usecases (decouple microphone, video, photo) (#168)

* Add props

* add props (iOS)

* Add use-cases conditionally

* Update CameraView+RecordVideo.swift

* Update RecordingSession.swift

* reconfigure on change

* Throw correct errors

* Check for audio permission

* Move `#if` outward

* Throw appropriate errors

* Update CameraView+RecordVideo.swift

* fix Splashscreen

* Dynamic filePath

* Fix video extension

* add `avci` and `m4v` file types

* Fix RecordVideo errors

* Fix audio setup

* Enable `photo`, `video` and `audio`

* Check for `video={true}` in frameProcessor

* format

* Remove unused DispatchQueue

* Update docs

* Add `supportsPhotoAndVideoCapture`

* Fix view manager

* Fix error not being propagated

* Catch normal errors too

* Update DEVICES.mdx

* Update CAPTURING.mdx

* Update classdocs
This commit is contained in:
Marc Rousavy 2021-06-07 13:08:40 +02:00 committed by GitHub
parent 555474be7d
commit 72a1fad78e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 412 additions and 167 deletions

View File

@ -15,10 +15,18 @@ data class TemporaryFile(val path: String)
@SuppressLint("RestrictedApi", "MissingPermission") @SuppressLint("RestrictedApi", "MissingPermission")
suspend fun CameraView.startRecording(options: ReadableMap, onRecordCallback: Callback): TemporaryFile { suspend fun CameraView.startRecording(options: ReadableMap, onRecordCallback: Callback): TemporaryFile {
if (videoCapture == null) { if (videoCapture == null) {
throw CameraNotReadyError() if (video == true) {
throw CameraNotReadyError()
} else {
throw VideoNotEnabledError()
}
} }
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
throw MicrophonePermissionError() // check audio permission
if (audio == true) {
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
throw MicrophonePermissionError()
}
} }
if (options.hasKey("flash")) { if (options.hasKey("flash")) {

View File

@ -19,11 +19,17 @@ import kotlin.system.measureTimeMillis
suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope { suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope {
val startFunc = System.nanoTime() val startFunc = System.nanoTime()
Log.d(CameraView.TAG, "takePhoto() called") Log.d(CameraView.TAG, "takePhoto() called")
val imageCapture = imageCapture ?: throw CameraNotReadyError() if (imageCapture == null) {
if (photo == true) {
throw CameraNotReadyError()
} else {
throw PhotoNotEnabledError()
}
}
if (options.hasKey("flash")) { if (options.hasKey("flash")) {
val flashMode = options.getString("flash") val flashMode = options.getString("flash")
imageCapture.flashMode = when (flashMode) { imageCapture!!.flashMode = when (flashMode) {
"on" -> ImageCapture.FLASH_MODE_ON "on" -> ImageCapture.FLASH_MODE_ON
"off" -> ImageCapture.FLASH_MODE_OFF "off" -> ImageCapture.FLASH_MODE_OFF
"auto" -> ImageCapture.FLASH_MODE_AUTO "auto" -> ImageCapture.FLASH_MODE_AUTO
@ -61,7 +67,7 @@ suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineS
async(coroutineContext) { async(coroutineContext) {
Log.d(CameraView.TAG, "Taking picture...") Log.d(CameraView.TAG, "Taking picture...")
val startCapture = System.nanoTime() val startCapture = System.nanoTime()
val pic = imageCapture.takePicture(takePhotoExecutor) val pic = imageCapture!!.takePicture(takePhotoExecutor)
val endCapture = System.nanoTime() val endCapture = System.nanoTime()
Log.i(CameraView.TAG_PERF, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms") Log.i(CameraView.TAG_PERF, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms")
pic pic

View File

@ -68,6 +68,10 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
var enableDepthData = false var enableDepthData = false
var enableHighResolutionCapture: Boolean? = null var enableHighResolutionCapture: Boolean? = null
var enablePortraitEffectsMatteDelivery = false var enablePortraitEffectsMatteDelivery = false
// use-cases
var photo: Boolean? = null
var video: Boolean? = null
var audio: Boolean? = null
// props that require format reconfiguring // props that require format reconfiguring
var format: ReadableMap? = null var format: ReadableMap? = null
var fps: Int? = null var fps: Int? = null
@ -220,9 +224,6 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
throw CameraPermissionError() throw CameraPermissionError()
} }
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
throw MicrophonePermissionError()
}
if (cameraId == null) { if (cameraId == null) {
throw NoCameraDeviceError() throw NoCameraDeviceError()
} }
@ -249,7 +250,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
if (format == null) { if (format == null) {
// let CameraX automatically find best resolution for the target aspect ratio // let CameraX automatically find best resolution for the target aspect ratio
Log.i(TAG, "No custom format has been set, CameraX will automatically determine best configuration...") Log.i(TAG, "No custom format has been set, CameraX will automatically determine best configuration...")
val aspectRatio = aspectRatio(previewView.width, previewView.height) val aspectRatio = aspectRatio(previewView.height, previewView.width) // flipped because it's in sensor orientation.
previewBuilder.setTargetAspectRatio(aspectRatio) previewBuilder.setTargetAspectRatio(aspectRatio)
imageCaptureBuilder.setTargetAspectRatio(aspectRatio) imageCaptureBuilder.setTargetAspectRatio(aspectRatio)
videoCaptureBuilder.setTargetAspectRatio(aspectRatio) videoCaptureBuilder.setTargetAspectRatio(aspectRatio)
@ -257,7 +258,8 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
// User has selected a custom format={}. Use that // User has selected a custom format={}. Use that
val format = DeviceFormat(format!!) val format = DeviceFormat(format!!)
Log.i(TAG, "Using custom format - photo: ${format.photoSize}, video: ${format.videoSize} @ $fps FPS") Log.i(TAG, "Using custom format - photo: ${format.photoSize}, video: ${format.videoSize} @ $fps FPS")
previewBuilder.setDefaultResolution(format.photoSize) val aspectRatio = aspectRatio(format.photoSize.width, format.photoSize.height)
previewBuilder.setTargetAspectRatio(aspectRatio)
imageCaptureBuilder.setDefaultResolution(format.photoSize) imageCaptureBuilder.setDefaultResolution(format.photoSize)
videoCaptureBuilder.setDefaultResolution(format.photoSize) videoCaptureBuilder.setDefaultResolution(format.photoSize)
@ -311,14 +313,23 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
} }
val preview = previewBuilder.build() val preview = previewBuilder.build()
imageCapture = imageCaptureBuilder.build()
videoCapture = videoCaptureBuilder.build()
// Unbind use cases before rebinding // Unbind use cases before rebinding
videoCapture = null
imageCapture = null
cameraProvider.unbindAll() cameraProvider.unbindAll()
// Bind use cases to camera // Bind use cases to camera
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageCapture!!, videoCapture!!) val useCases = ArrayList<UseCase>()
if (video == true) {
videoCapture = videoCaptureBuilder.build()
useCases.add(videoCapture!!)
}
if (photo == true) {
imageCapture = imageCaptureBuilder.build()
useCases.add(imageCapture!!)
}
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, *useCases.toTypedArray())
preview.setSurfaceProvider(previewView.surfaceProvider) preview.setSurfaceProvider(previewView.surfaceProvider)
minZoom = camera!!.cameraInfo.zoomState.value?.minZoomRatio ?: 1f minZoom = camera!!.cameraInfo.zoomState.value?.minZoomRatio ?: 1f
@ -371,7 +382,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
const val TAG = "CameraView" const val TAG = "CameraView"
const val TAG_PERF = "CameraView.performance" const val TAG_PERF = "CameraView.performance"
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost") private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video")
private val arrayListOfZoom = arrayListOf("zoom") private val arrayListOfZoom = arrayListOf("zoom")
} }

View File

@ -23,6 +23,27 @@ class CameraViewManager : SimpleViewManager<CameraView>() {
view.cameraId = cameraId view.cameraId = cameraId
} }
@ReactProp(name = "photo")
fun setPhoto(view: CameraView, photo: Boolean?) {
if (view.photo != photo)
addChangedPropToTransaction(view, "photo")
view.photo = photo
}
@ReactProp(name = "video")
fun setVideo(view: CameraView, video: Boolean?) {
if (view.video != video)
addChangedPropToTransaction(view, "video")
view.video = video
}
@ReactProp(name = "audio")
fun setAudio(view: CameraView, audio: Boolean?) {
if (view.audio != audio)
addChangedPropToTransaction(view, "audio")
view.audio = audio
}
@ReactProp(name = "enableDepthData") @ReactProp(name = "enableDepthData")
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) { fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
if (view.enableDepthData != enableDepthData) if (view.enableDepthData != enableDepthData)

View File

@ -63,11 +63,19 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
} }
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that // TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
@ReactMethod(isBlockingSynchronousMethod = true) @ReactMethod
fun startRecording(viewTag: Int, options: ReadableMap, onRecordCallback: Callback) { fun startRecording(viewTag: Int, options: ReadableMap, onRecordCallback: Callback) {
GlobalScope.launch(Dispatchers.Main) { GlobalScope.launch(Dispatchers.Main) {
val view = findCameraView(viewTag) val view = findCameraView(viewTag)
view.startRecording(options, onRecordCallback) try {
view.startRecording(options, onRecordCallback)
} catch (error: CameraError) {
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
onRecordCallback(null, map)
} catch (error: Throwable) {
val map = makeErrorMap("capture/unknown", "An unknown error occured while trying to start a video recording!", error)
onRecordCallback(null, map)
}
} }
} }
@ -115,16 +123,6 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
val characteristics = manager.getCameraCharacteristics(id) val characteristics = manager.getCameraCharacteristics(id)
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!! val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
// Filters out cameras that are LEGACY hardware level. Those don't support Preview + Photo Capture + Video Capture at the same time.
if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
Log.i(
REACT_CLASS,
"Skipping Camera #$id because it does not meet the minimum requirements for react-native-vision-camera. " +
"See the tables at https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture for more information."
)
return@loop
}
val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!! val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!!
val isMultiCam = Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && val isMultiCam = Build.VERSION.SDK_INT >= Build.VERSION_CODES.P &&
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)
@ -162,6 +160,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
map.putBoolean("hasFlash", hasFlash) map.putBoolean("hasFlash", hasFlash)
map.putBoolean("hasTorch", hasFlash) map.putBoolean("hasTorch", hasFlash)
map.putBoolean("isMultiCam", isMultiCam) map.putBoolean("isMultiCam", isMultiCam)
map.putBoolean("supportsPhotoAndVideoCapture", hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY)
map.putBoolean("supportsRawCapture", supportsRawCapture) map.putBoolean("supportsRawCapture", supportsRawCapture)
map.putBoolean("supportsDepthCapture", supportsDepthCapture) map.putBoolean("supportsDepthCapture", supportsDepthCapture)
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost) map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)

View File

@ -30,7 +30,7 @@ abstract class CameraError(
val CameraError.code: String val CameraError.code: String
get() = "$domain/$id" get() = "$domain/$id"
class MicrophonePermissionError : CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied!") class MicrophonePermissionError : CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied! If you want to record Video without sound, pass `audio={false}`.")
class CameraPermissionError : CameraError("permission", "camera-permission-denied", "The Camera permission was denied!") class CameraPermissionError : CameraError("permission", "camera-permission-denied", "The Camera permission was denied!")
class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)") class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
@ -52,6 +52,9 @@ class LowLightBoostNotContainedInFormatError() : CameraError(
class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
class VideoNotEnabledError : CameraError("capture", "video-not-enabled", "Video capture is disabled! Pass `video={true}` to enable video recordings.")
class PhotoNotEnabledError : CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.")
class InvalidFormatError(format: Int) : CameraError("capture", "invalid-photo-format", "The Photo has an invalid format! Expected ${ImageFormat.YUV_420_888}, actual: $format") class InvalidFormatError(format: Int) : CameraError("capture", "invalid-photo-format", "The Photo has an invalid format! Expected ${ImageFormat.YUV_420_888}, actual: $format")
class VideoEncoderError(message: String, cause: Throwable? = null) : CameraError("capture", "encoder-error", message, cause) class VideoEncoderError(message: String, cause: Throwable? = null) : CameraError("capture", "encoder-error", message, cause)
class VideoMuxerError(message: String, cause: Throwable? = null) : CameraError("capture", "muxer-error", message, cause) class VideoMuxerError(message: String, cause: Throwable? = null) : CameraError("capture", "muxer-error", message, cause)

View File

@ -40,7 +40,13 @@ The most important actions are:
## Taking Photos ## Taking Photos
To take a photo, simply use the Camera's [`takePhoto(...)`](../api/classes/camera.camera-1#takephoto) function: To take a photo you first have to enable photo capture:
```tsx
<Camera {...props} photo={true} />
```
Then, simply use the Camera's [`takePhoto(...)`](../api/classes/camera.camera-1#takephoto) function:
```ts ```ts
const photo = await camera.current.takePhoto({ const photo = await camera.current.takePhoto({
@ -71,9 +77,23 @@ const snapshot = await camera.current.takeSnapshot({
While taking snapshots is faster than taking photos, the resulting image has way lower quality. You can combine both functions to create a snapshot to present to the user at first, then deliver the actual high-res photo afterwards. While taking snapshots is faster than taking photos, the resulting image has way lower quality. You can combine both functions to create a snapshot to present to the user at first, then deliver the actual high-res photo afterwards.
::: :::
:::note
The `takeSnapshot` function also works with `photo={false}`. For this reason, devices that do not support photo and video capture at the same time can use `video={true}` and fall back to snapshot capture for photos.
:::
## Recording Videos ## Recording Videos
To start a video recording, use the Camera's [`startRecording(...)`](../api/classes/camera.camera-1#startrecording) function: To start a video recording you first have to enable video capture:
```tsx
<Camera
{...props}
video={true}
audio={true} // <-- optional
/>
```
Then, simply use the Camera's [`startRecording(...)`](../api/classes/camera.camera-1#startrecording) function:
```ts ```ts
camera.current.startRecording({ camera.current.startRecording({
@ -85,10 +105,6 @@ camera.current.startRecording({
For any error that occured _while recording the video_, the `onRecordingError` callback will be invoked with a [`CaptureError`](../api/classes/cameraerror.cameracaptureerror) and the recording is therefore cancelled. For any error that occured _while recording the video_, the `onRecordingError` callback will be invoked with a [`CaptureError`](../api/classes/cameraerror.cameracaptureerror) and the recording is therefore cancelled.
:::note
Due to limitations of the React Native Bridge, this function can not be awaited. This means, any errors thrown while trying to start the recording (e.g. `capture/recording-in-progress`) can only be caught synchronously (`isBlockingSynchronousMethod`). This will change with the upcoming React Native Re-Architecture.
:::
To stop the video recording, you can call [`stopRecording(...)`](../api/classes/camera.camera-1#stoprecording): To stop the video recording, you can call [`stopRecording(...)`](../api/classes/camera.camera-1#stoprecording):
```ts ```ts

View File

@ -60,7 +60,7 @@ The `CameraDevice` type also contains other useful information describing a came
Make sure to be careful when filtering out unneeded camera devices, since not every phone supports all camera device types. Some phones don't even have front-cameras. You always want to have a camera device, even when it's not the one that has the best features. Make sure to be careful when filtering out unneeded camera devices, since not every phone supports all camera device types. Some phones don't even have front-cameras. You always want to have a camera device, even when it's not the one that has the best features.
::: :::
### `useCameraDevices` hook ### The `useCameraDevices` hook
The react-native-vision-camera library provides a hook to make camera device selection a lot easier. The react-native-vision-camera library provides a hook to make camera device selection a lot easier.
@ -100,6 +100,14 @@ function App() {
} }
``` ```
### The `supportsPhotoAndVideoCapture` prop
Camera devices provide the `supportsPhotoAndVideoCapture` property which determines whether the device allows enabling photo- and video-capture at the same time. While every iOS device supports this feature, there are some older Android devices which only allow enabling one of each - either photo capture or video capture. (Those are `LEGACY` devices, see [this table](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture).)
:::note
If you need photo- and video-capture for devices where `supportsPhotoAndVideoCapture` is `false`, you can fall back to snapshot capture (see [**"Taking Snapshots"**](https://cuvent.github.io/react-native-vision-camera/docs/guides/capturing#taking-snapshots)) instead.
:::
### The `isActive` prop ### The `isActive` prop
The Camera's `isActive` property can be used to _pause_ the session (`isActive={false}`) while still keeping the session "warm". This is more desirable than completely unmounting the camera, since _resuming_ the session (`isActive={true}`) will be **much faster** than re-mounting the camera view. The Camera's `isActive` property can be used to _pause_ the session (`isActive={false}`) while still keeping the session "warm". This is more desirable than completely unmounting the camera, since _resuming_ the session (`isActive={true}`) will be **much faster** than re-mounting the camera view.

View File

@ -214,6 +214,9 @@ export const CameraPage: NavigationFunctionComponent = ({ componentId }) => {
onError={onError} onError={onError}
enableZoomGesture={false} enableZoomGesture={false}
animatedProps={cameraAnimatedProps} animatedProps={cameraAnimatedProps}
photo={true}
video={true}
audio={true}
// frameProcessor={frameProcessor} // frameProcessor={frameProcessor}
// frameProcessorFps={1} // frameProcessorFps={1}
/> />

View File

@ -1,5 +1,5 @@
import React, { useCallback, useEffect, useState } from 'react'; import React, { useCallback, useEffect, useState } from 'react';
import type { ImageRequireSource } from 'react-native'; import { ImageRequireSource, Linking } from 'react-native';
import { StyleSheet, View, Text, Image } from 'react-native'; import { StyleSheet, View, Text, Image } from 'react-native';
import { Navigation, NavigationFunctionComponent } from 'react-native-navigation'; import { Navigation, NavigationFunctionComponent } from 'react-native-navigation';
@ -17,6 +17,8 @@ export const Splash: NavigationFunctionComponent = ({ componentId }) => {
console.log('Requesting microphone permission...'); console.log('Requesting microphone permission...');
const permission = await Camera.requestMicrophonePermission(); const permission = await Camera.requestMicrophonePermission();
console.log(`Microphone permission status: ${permission}`); console.log(`Microphone permission status: ${permission}`);
if (permission === 'denied') Linking.openSettings();
setMicrophonePermissionStatus(permission); setMicrophonePermissionStatus(permission);
}, []); }, []);
@ -24,6 +26,8 @@ export const Splash: NavigationFunctionComponent = ({ componentId }) => {
console.log('Requesting camera permission...'); console.log('Requesting camera permission...');
const permission = await Camera.requestCameraPermission(); const permission = await Camera.requestCameraPermission();
console.log(`Camera permission status: ${permission}`); console.log(`Camera permission status: ${permission}`);
if (permission === 'denied') Linking.openSettings();
setCameraPermissionStatus(permission); setCameraPermissionStatus(permission);
}, []); }, []);
@ -43,14 +47,14 @@ export const Splash: NavigationFunctionComponent = ({ componentId }) => {
}, []); }, []);
useEffect(() => { useEffect(() => {
if (cameraPermissionStatus === 'authorized' && microphonePermissionStatus === 'authorized') { if (cameraPermissionStatus === 'authorized' && microphonePermissionStatus !== 'not-determined') {
Navigation.setRoot({ Navigation.setRoot({
root: { root: {
stack: { stack: {
children: [ children: [
{ {
component: { component: {
name: 'Home', name: 'CameraPage',
}, },
}, },
], ],
@ -73,7 +77,7 @@ export const Splash: NavigationFunctionComponent = ({ componentId }) => {
</Text> </Text>
</Text> </Text>
)} )}
{microphonePermissionStatus !== 'authorized' && ( {microphonePermissionStatus === 'not-determined' && (
<Text style={styles.permissionText}> <Text style={styles.permissionText}>
Vision Camera needs <Text style={styles.bold}>Microphone permission</Text>. Vision Camera needs <Text style={styles.bold}>Microphone permission</Text>.
<Text style={styles.hyperlink} onPress={requestMicrophonePermission}> <Text style={styles.hyperlink} onPress={requestMicrophonePermission}>

View File

@ -21,7 +21,7 @@ enum PermissionError: String {
var message: String { var message: String {
switch self { switch self {
case .microphone: case .microphone:
return "The Microphone permission was denied!" return "The Microphone permission was denied! If you want to record Videos without sound, pass `audio={false}`."
case .camera: case .camera:
return "The Camera permission was denied!" return "The Camera permission was denied!"
} }
@ -186,6 +186,8 @@ enum CaptureError {
case createTempFileError case createTempFileError
case createRecorderError(message: String? = nil) case createRecorderError(message: String? = nil)
case invalidPhotoCodec case invalidPhotoCodec
case videoNotEnabled
case photoNotEnabled
case unknown(message: String? = nil) case unknown(message: String? = nil)
var code: String { var code: String {
@ -204,6 +206,10 @@ enum CaptureError {
return "create-recorder-error" return "create-recorder-error"
case .invalidPhotoCodec: case .invalidPhotoCodec:
return "invalid-photo-codec" return "invalid-photo-codec"
case .videoNotEnabled:
return "video-not-enabled"
case .photoNotEnabled:
return "photo-not-enabled"
case .unknown: case .unknown:
return "unknown" return "unknown"
} }
@ -225,6 +231,10 @@ enum CaptureError {
return "Failed to create a temporary file!" return "Failed to create a temporary file!"
case let .createRecorderError(message: message): case let .createRecorderError(message: message):
return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")" return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")"
case .videoNotEnabled:
return "Video capture is disabled! Pass `video={true}` to enable video recordings."
case .photoNotEnabled:
return "Photo capture is disabled! Pass `photo={true}` to enable photo capture."
case let .unknown(message: message): case let .unknown(message: message):
return message ?? "An unknown error occured while capturing a video/photo." return message ?? "An unknown error occured while capturing a video/photo."
} }

View File

@ -23,7 +23,6 @@ public class CameraQueues: NSObject {
autoreleaseFrequency: .inherit, autoreleaseFrequency: .inherit,
target: nil) target: nil)
// TODO: Is it a good idea to use a separate queue for audio output processing?
/// The serial execution queue for output processing of audio buffers. /// The serial execution queue for output processing of audio buffers.
@objc public static let audioQueue = DispatchQueue(label: "com.mrousavy.vision.audio-queue", @objc public static let audioQueue = DispatchQueue(label: "com.mrousavy.vision.audio-queue",
qos: .userInteractive, qos: .userInteractive,

View File

@ -25,6 +25,15 @@ extension CameraView {
} }
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
let enableAudio = audio?.boolValue == true
// check microphone permission
if enableAudio {
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
if audioPermissionStatus != .authorized {
return invokeOnError(.permission(.microphone))
}
}
// Audio Input // Audio Input
do { do {
@ -32,15 +41,17 @@ extension CameraView {
audioCaptureSession.removeInput(audioDeviceInput) audioCaptureSession.removeInput(audioDeviceInput)
self.audioDeviceInput = nil self.audioDeviceInput = nil
} }
ReactLogger.log(level: .info, message: "Adding Audio input...") if enableAudio {
guard let audioDevice = AVCaptureDevice.default(for: .audio) else { ReactLogger.log(level: .info, message: "Adding Audio input...")
return invokeOnError(.device(.microphoneUnavailable)) guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
audioCaptureSession.addInput(audioDeviceInput!)
} }
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
audioCaptureSession.addInput(audioDeviceInput!)
} catch let error as NSError { } catch let error as NSError {
return invokeOnError(.device(.microphoneUnavailable), cause: error) return invokeOnError(.device(.microphoneUnavailable), cause: error)
} }
@ -50,13 +61,15 @@ extension CameraView {
audioCaptureSession.removeOutput(audioOutput) audioCaptureSession.removeOutput(audioOutput)
self.audioOutput = nil self.audioOutput = nil
} }
ReactLogger.log(level: .info, message: "Adding Audio Data output...") if enableAudio {
audioOutput = AVCaptureAudioDataOutput() ReactLogger.log(level: .info, message: "Adding Audio Data output...")
guard audioCaptureSession.canAddOutput(audioOutput!) else { audioOutput = AVCaptureAudioDataOutput()
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output"))) guard audioCaptureSession.canAddOutput(audioOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
}
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
audioCaptureSession.addOutput(audioOutput!)
} }
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
audioCaptureSession.addOutput(audioOutput!)
} }
/** /**

View File

@ -84,21 +84,23 @@ extension CameraView {
captureSession.removeOutput(photoOutput) captureSession.removeOutput(photoOutput)
self.photoOutput = nil self.photoOutput = nil
} }
ReactLogger.log(level: .info, message: "Adding Photo output...") if photo?.boolValue == true {
photoOutput = AVCapturePhotoOutput() ReactLogger.log(level: .info, message: "Adding Photo output...")
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData photoOutput = AVCapturePhotoOutput()
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue { photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
} photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture
if #available(iOS 12.0, *) { }
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery if #available(iOS 12.0, *) {
} photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery
guard captureSession.canAddOutput(photoOutput!) else { }
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output"))) guard captureSession.canAddOutput(photoOutput!) else {
} return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
captureSession.addOutput(photoOutput!) }
if videoDeviceInput!.device.position == .front { captureSession.addOutput(photoOutput!)
photoOutput!.mirror() if videoDeviceInput!.device.position == .front {
photoOutput!.mirror()
}
} }
// Video Output + Frame Processor // Video Output + Frame Processor
@ -106,16 +108,18 @@ extension CameraView {
captureSession.removeOutput(videoOutput) captureSession.removeOutput(videoOutput)
self.videoOutput = nil self.videoOutput = nil
} }
ReactLogger.log(level: .info, message: "Adding Video Data output...") if video?.boolValue == true {
videoOutput = AVCaptureVideoDataOutput() ReactLogger.log(level: .info, message: "Adding Video Data output...")
guard captureSession.canAddOutput(videoOutput!) else { videoOutput = AVCaptureVideoDataOutput()
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output"))) guard captureSession.canAddOutput(videoOutput!) else {
} return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue) }
videoOutput!.alwaysDiscardsLateVideoFrames = true videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
captureSession.addOutput(videoOutput!) videoOutput!.alwaysDiscardsLateVideoFrames = true
if videoDeviceInput!.device.position == .front { captureSession.addOutput(videoOutput!)
videoOutput!.mirror() if videoDeviceInput!.device.position == .front {
videoOutput!.mirror()
}
} }
invokeOnInitialized() invokeOnInitialized()
@ -223,7 +227,7 @@ extension CameraView {
if isActive { if isActive {
// restart capture session after an error occured // restart capture session after an error occured
queue.async { cameraQueue.async {
self.captureSession.startRunning() self.captureSession.startRunning()
} }
} }

View File

@ -16,72 +16,95 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
/** /**
Starts a video + audio recording with a custom Asset Writer. Starts a video + audio recording with a custom Asset Writer.
*/ */
func startRecording(options: NSDictionary, callback: @escaping RCTResponseSenderBlock) { func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
cameraQueue.async { cameraQueue.async {
ReactLogger.log(level: .info, message: "Starting Video recording...") ReactLogger.log(level: .info, message: "Starting Video recording...")
let callback = Callback(jsCallbackFunc)
do { var fileType = AVFileType.mov
let errorPointer = ErrorPointer(nilLiteral: ()) if let fileTypeOption = options["fileType"] as? String {
guard let tempFilePath = RCTTempFilePath("mov", errorPointer) else { guard let parsed = try? AVFileType(withString: fileTypeOption) else {
return callback([NSNull(), makeReactError(.capture(.createTempFileError), cause: errorPointer?.pointee)]) return callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption)))
} }
fileType = parsed
}
let tempURL = URL(string: "file://\(tempFilePath)")! let errorPointer = ErrorPointer(nilLiteral: ())
if let flashMode = options["flash"] as? String { let fileExtension = fileType.descriptor ?? "mov"
// use the torch as the video's flash guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
self.setTorchMode(flashMode) return callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee)
}
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
let tempURL = URL(string: "file://\(tempFilePath)")!
if let flashMode = options["flash"] as? String {
// use the torch as the video's flash
self.setTorchMode(flashMode)
}
guard let videoOutput = self.videoOutput else {
if self.video?.boolValue == true {
return callback.reject(error: .session(.cameraNotReady))
} else {
return callback.reject(error: .capture(.videoNotEnabled))
} }
}
var fileType = AVFileType.mov // TODO: The startRecording() func cannot be async because RN doesn't allow
if let fileTypeOption = options["fileType"] as? String { // both a callback and a Promise in a single function. Wait for TurboModules?
fileType = AVFileType(withString: fileTypeOption) // This means that any errors that occur in this function have to be delegated through
} // the callback, but I'd prefer for them to throw for the original function instead.
// TODO: The startRecording() func cannot be async because RN doesn't allow let enableAudio = self.audio?.boolValue == true
// both a callback and a Promise in a single function. Wait for TurboModules?
// This means that any errors that occur in this function have to be delegated through
// the callback, but I'd prefer for them to throw for the original function instead.
let onFinish = { (status: AVAssetWriter.Status, error: Error?) -> Void in let onFinish = { (status: AVAssetWriter.Status, error: Error?) -> Void in
defer { defer {
self.recordingSession = nil self.recordingSession = nil
if enableAudio {
self.audioQueue.async { self.audioQueue.async {
self.deactivateAudioSession() self.deactivateAudioSession()
} }
} }
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).") }
if let error = error { ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
let description = (error as NSError).description if let error = error as NSError? {
return callback([NSNull(), CameraError.capture(.unknown(message: "An unknown recording error occured! \(description)"))]) let description = error.description
return callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(description)")), cause: error)
} else {
if status == .completed {
return callback.resolve([
"path": self.recordingSession!.url.absoluteString,
"duration": self.recordingSession!.duration,
])
} else { } else {
if status == .completed { return callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
return callback([[
"path": self.recordingSession!.url.absoluteString,
"duration": self.recordingSession!.duration,
], NSNull()])
} else {
return callback([NSNull(), CameraError.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)")])
}
} }
} }
}
do {
self.recordingSession = try RecordingSession(url: tempURL, self.recordingSession = try RecordingSession(url: tempURL,
fileType: fileType, fileType: fileType,
completion: onFinish) completion: onFinish)
} catch let error as NSError {
return callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error)
}
// Init Video // Init Video
guard let videoOutput = self.videoOutput, guard let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType),
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType), !videoSettings.isEmpty else {
!videoSettings.isEmpty else { return callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
throw CameraError.capture(.createRecorderError(message: "Failed to get video settings!")) }
} self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings, isVideoMirrored: self.videoOutput!.isMirrored)
isVideoMirrored: self.videoOutput!.isMirrored)
// Init Audio (optional, async) // Init Audio (optional, async)
if enableAudio {
self.audioQueue.async { self.audioQueue.async {
// Activate Audio Session (blocking) // Activate Audio Session (blocking)
self.activateAudioSession() self.activateAudioSession()
guard let recordingSession = self.recordingSession else { guard let recordingSession = self.recordingSession else {
// recording has already been cancelled // recording has already been cancelled
return return
@ -95,10 +118,10 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
recordingSession.start() recordingSession.start()
self.isRecording = true self.isRecording = true
} }
} catch EnumParserError.invalidValue { } else {
return callback([NSNull(), EnumParserError.invalidValue]) // start recording session without audio.
} catch let error as NSError { self.recordingSession!.start()
return callback([NSNull(), makeReactError(.capture(.createTempFileError), cause: error)]) self.isRecording = true
} }
} }
} }
@ -175,8 +198,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
} }
} }
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) { #if DEBUG
#if DEBUG public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput { if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
let reason = findFrameDropReason(inBuffer: buffer) let reason = findFrameDropReason(inBuffer: buffer)
ReactLogger.log(level: .warning, ReactLogger.log(level: .warning,
@ -185,16 +208,16 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
alsoLogToJS: true) alsoLogToJS: true)
hasLoggedFrameDropWarning = true hasLoggedFrameDropWarning = true
} }
#endif
}
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {
var mode: CMAttachmentMode = 0
guard let reason = CMGetAttachment(buffer,
key: kCMSampleBufferAttachmentKey_DroppedFrameReason,
attachmentModeOut: &mode) else {
return "unknown"
} }
return String(describing: reason)
} private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {
var mode: CMAttachmentMode = 0
guard let reason = CMGetAttachment(buffer,
key: kCMSampleBufferAttachmentKey_DroppedFrameReason,
attachmentModeOut: &mode) else {
return "unknown"
}
return String(describing: reason)
}
#endif
} }

View File

@ -25,8 +25,13 @@ struct TakePhotoOptions {
extension CameraView { extension CameraView {
func takePhoto(options: NSDictionary, promise: Promise) { func takePhoto(options: NSDictionary, promise: Promise) {
cameraQueue.async { cameraQueue.async {
guard let photoOutput = self.photoOutput, let videoDeviceInput = self.videoDeviceInput else { guard let photoOutput = self.photoOutput,
return promise.reject(error: .session(.cameraNotReady)) let videoDeviceInput = self.videoDeviceInput else {
if self.photo?.boolValue == true {
return promise.reject(error: .session(.cameraNotReady))
} else {
return promise.reject(error: .capture(.photoNotEnabled))
}
} }
var photoSettings = AVCapturePhotoSettings() var photoSettings = AVCapturePhotoSettings()

View File

@ -15,7 +15,6 @@ import UIKit
// //
// CameraView+RecordVideo // CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI) // TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
// TODO: videoStabilizationMode
// CameraView+TakePhoto // CameraView+TakePhoto
// TODO: Photo HDR // TODO: Photo HDR
@ -24,7 +23,9 @@ private let propsThatRequireReconfiguration = ["cameraId",
"enableDepthData", "enableDepthData",
"enableHighResolutionCapture", "enableHighResolutionCapture",
"enablePortraitEffectsMatteDelivery", "enablePortraitEffectsMatteDelivery",
"preset"] "preset",
"photo",
"video"]
private let propsThatRequireDeviceReconfiguration = ["fps", private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr", "hdr",
"lowLightBoost", "lowLightBoost",
@ -42,6 +43,10 @@ public final class CameraView: UIView {
@objc var enableHighResolutionCapture: NSNumber? // nullable bool @objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false @objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String? @objc var preset: String?
// use cases
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool
@objc var audio: NSNumber? // nullable bool
// props that require format reconfiguring // props that require format reconfiguring
@objc var format: NSDictionary? @objc var format: NSDictionary?
@objc var fps: NSNumber? @objc var fps: NSNumber?
@ -71,8 +76,6 @@ public final class CameraView: UIView {
// pragma MARK: Internal Properties // pragma MARK: Internal Properties
internal var isReady = false internal var isReady = false
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo and record video)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
// Capture Session // Capture Session
internal let captureSession = AVCaptureSession() internal let captureSession = AVCaptureSession()
internal let audioCaptureSession = AVCaptureSession() internal let audioCaptureSession = AVCaptureSession()
@ -130,10 +133,6 @@ public final class CameraView: UIView {
selector: #selector(audioSessionInterrupted), selector: #selector(audioSessionInterrupted),
name: AVAudioSession.interruptionNotification, name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance) object: AVAudioSession.sharedInstance)
audioQueue.async {
self.configureAudioSession()
}
} }
@available(*, unavailable) @available(*, unavailable)
@ -159,6 +158,7 @@ public final class CameraView: UIView {
let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) } let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) }
let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format") let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format")
let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) } let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) }
let shouldReconfigureAudioSession = changedProps.contains("audio")
let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice
@ -168,6 +168,7 @@ public final class CameraView: UIView {
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode") let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
if shouldReconfigure || if shouldReconfigure ||
shouldReconfigureAudioSession ||
shouldCheckActive || shouldCheckActive ||
shouldUpdateTorch || shouldUpdateTorch ||
shouldUpdateZoom || shouldUpdateZoom ||
@ -214,6 +215,13 @@ public final class CameraView: UIView {
} }
} }
} }
// Audio Configuration
if shouldReconfigureAudioSession {
audioQueue.async {
self.configureAudioSession()
}
}
} }
} }

View File

@ -27,6 +27,10 @@ RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString);
RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL); RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableHighResolutionCapture, NSNumber); // nullable bool RCT_EXPORT_VIEW_PROPERTY(enableHighResolutionCapture, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL); RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
// use cases
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool
// device format // device format
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary); RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber); RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);

View File

@ -99,6 +99,7 @@ final class CameraViewManager: RCTViewManager {
"neutralZoom": $0.neutralZoomFactor, "neutralZoom": $0.neutralZoomFactor,
"maxZoom": $0.maxAvailableVideoZoomFactor, "maxZoom": $0.maxAvailableVideoZoomFactor,
"isMultiCam": $0.isMultiCam, "isMultiCam": $0.isMultiCam,
"supportsPhotoAndVideoCapture": true,
"supportsDepthCapture": false, // TODO: supportsDepthCapture "supportsDepthCapture": false, // TODO: supportsDepthCapture
"supportsRawCapture": false, // TODO: supportsRawCapture "supportsRawCapture": false, // TODO: supportsRawCapture
"supportsLowLightBoost": $0.isLowLightBoostSupported, "supportsLowLightBoost": $0.isLowLightBoostSupported,

View File

@ -75,10 +75,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer); auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
return jsi::Value((double) planesCount); return jsi::Value((double) planesCount);
} }
if (name == "buffer") {
// TODO: Actually return the pixels of the buffer. Not sure if this will be a huge performance hit or not
return jsi::Array(runtime, 0);
}
return jsi::Value::undefined(); return jsi::Value::undefined();
} }

View File

@ -10,11 +10,33 @@ import AVFoundation
import Foundation import Foundation
extension AVFileType { extension AVFileType {
init(withString string: String) { init(withString string: String) throws {
self.init(rawValue: string) switch string {
case "mov":
self = .mov
case "mp4":
self = .mp4
case "avci":
self = .avci
case "m4v":
self = .m4v
default:
throw EnumParserError.invalidValue
}
} }
var descriptor: String { var descriptor: String? {
return rawValue switch self {
case .mov:
return "mov"
case .mp4:
return "mp4"
case .avci:
return "avci"
case .m4v:
return "m4v"
default:
return nil
}
} }
} }

View File

@ -20,8 +20,4 @@ enum EnumParserError: Error {
Raised when the descriptor does not match any of the possible values. Raised when the descriptor does not match any of the possible values.
*/ */
case invalidValue case invalidValue
/**
Raised when no descriptor for the given enum is available.
*/
case noDescriptorAvailable
} }

View File

@ -0,0 +1,38 @@
//
// Callback.swift
// VisionCamera
//
// Created by Marc Rousavy on 07.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import Foundation
/**
Represents a callback to JavaScript. Syntax is the same as with Promise.
*/
class Callback {
init(_ callback: @escaping RCTResponseSenderBlock) {
self.callback = callback
}
func reject(error: CameraError, cause: NSError?) {
callback([NSNull(), makeReactError(error, cause: cause)])
}
func reject(error: CameraError) {
reject(error: error, cause: nil)
}
func resolve(_ value: Any?) {
callback([value, NSNull()])
}
func resolve() {
resolve(nil)
}
// MARK: Private
private let callback: RCTResponseSenderBlock
}

View File

@ -11,7 +11,7 @@ import Foundation
// MARK: - BufferType // MARK: - BufferType
enum BufferType: String { enum BufferType {
case audio case audio
case video case video
} }
@ -112,7 +112,7 @@ class RecordingSession {
} }
guard let initialTimestamp = initialTimestamp else { guard let initialTimestamp = initialTimestamp else {
ReactLogger.log(level: .error, ReactLogger.log(level: .error,
message: "A \(bufferType.rawValue) frame arrived, but initialTimestamp was nil. Is this RecordingSession running?", message: "A frame arrived, but initialTimestamp was nil. Is this RecordingSession running?",
alsoLogToJS: true) alsoLogToJS: true)
return return
} }

View File

@ -54,6 +54,7 @@
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; }; B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; }; B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; }; B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; }; B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; }; B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; }; B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
@ -132,6 +133,7 @@
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; }; B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; }; B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; }; B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; }; B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; }; B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; }; B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
@ -212,6 +214,7 @@
B887516E25E0102000DB86D6 /* MakeReactError.swift */, B887516E25E0102000DB86D6 /* MakeReactError.swift */,
B887516F25E0102000DB86D6 /* ReactLogger.swift */, B887516F25E0102000DB86D6 /* ReactLogger.swift */,
B887517025E0102000DB86D6 /* Promise.swift */, B887517025E0102000DB86D6 /* Promise.swift */,
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */, B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */,
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */, B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */,
B81D41EF263C86F900B041FD /* JSIUtils.h */, B81D41EF263C86F900B041FD /* JSIUtils.h */,
@ -391,6 +394,7 @@
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */, B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */, B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */, B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */, B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */, B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */,
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */, B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,

View File

@ -358,6 +358,13 @@ export class Camera extends React.PureComponent<CameraProps> {
this.assertFrameProcessorsEnabled(); this.assertFrameProcessorsEnabled();
// frameProcessor argument changed. Update native to reflect the change. // frameProcessor argument changed. Update native to reflect the change.
if (this.props.frameProcessor != null) { if (this.props.frameProcessor != null) {
if (this.props.video !== true) {
throw new CameraCaptureError(
'capture/video-not-enabled',
'Video capture is disabled! Pass `video={true}` to enable frame processors.',
);
}
// 1. Spawn threaded JSI Runtime (if not already done) // 1. Spawn threaded JSI Runtime (if not already done)
// 2. Add video data output to Camera stream (if not already done) // 2. Add video data output to Camera stream (if not already done)
// 3. Workletize the frameProcessor and prepare it for being called with frames // 3. Workletize the frameProcessor and prepare it for being called with frames

View File

@ -250,6 +250,28 @@ export interface CameraDevice {
* See [the Camera Formats documentation](https://cuvent.github.io/react-native-vision-camera/docs/guides/formats) for more information about Camera Formats. * See [the Camera Formats documentation](https://cuvent.github.io/react-native-vision-camera/docs/guides/formats) for more information about Camera Formats.
*/ */
formats: CameraDeviceFormat[]; formats: CameraDeviceFormat[];
/**
* Whether this camera device supports enabling photo and video capture at the same time.
*
* * On **iOS** devices this value is always `true`.
* * On newer **Android** devices this value is always `true`.
* * On older **Android** devices this value is `true` if the device's hardware level is `LIMITED` and above, `false` otherwise. (`LEGACY`) (See [this table](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture))
*
* If the device does not allow enabling `photo` and `video` capture at the same time, you might want to fall back to **snapshot capture** (See ["Taking Snapshots"](https://cuvent.github.io/react-native-vision-camera/docs/guides/capturing#taking-snapshots)) instead:
*
* @example
* ```tsx
* const captureMode = device.supportsPhotoAndVideoCapture ? "photo" : "snapshot"
* return (
* <Camera
* photo={captureMode === "photo"}
* video={true}
* audio={true}
* />
* )
* ```
*/
supportsPhotoAndVideoCapture: boolean;
/** /**
* Whether this camera device supports low light boost. * Whether this camera device supports low light boost.
*/ */

View File

@ -37,6 +37,8 @@ export type CaptureError =
| 'capture/invalid-photo-codec' | 'capture/invalid-photo-codec'
| 'capture/not-bound-error' | 'capture/not-bound-error'
| 'capture/capture-type-not-supported' | 'capture/capture-type-not-supported'
| 'capture/video-not-enabled'
| 'capture/photo-not-enabled'
| 'capture/unknown'; | 'capture/unknown';
export type SystemError = 'system/no-camera-manager'; export type SystemError = 'system/no-camera-manager';
export type UnknownError = 'unknown/unknown'; export type UnknownError = 'unknown/unknown';

View File

@ -34,6 +34,22 @@ export interface CameraProps extends ViewProps {
*/ */
isActive: boolean; isActive: boolean;
//#region Use-cases
/**
* * Enables **photo capture** with the `takePhoto` function (see ["Taking Photos"](https://cuvent.github.io/react-native-vision-camera/docs/guides/capturing#taking-photos))
*/
photo?: boolean;
/**
* * Enables **video capture** with the `startRecording` function (see ["Recording Videos"](https://cuvent.github.io/react-native-vision-camera/docs/guides/capturing/#recording-videos))
* * Enables **frame processing** (see ["Frame Processors"](https://cuvent.github.io/react-native-vision-camera/docs/guides/frame-processors))
*/
video?: boolean;
/**
* * Enables **audio capture** for video recordings (see ["Recording Videos"](https://cuvent.github.io/react-native-vision-camera/docs/guides/capturing/#recording-videos))
*/
audio?: boolean;
//#endregion
//#region Common Props (torch, zoom) //#region Common Props (torch, zoom)
/** /**
* Set the current torch mode. * Set the current torch mode.

View File

@ -2,10 +2,6 @@
* A single frame, as seen by the camera. * A single frame, as seen by the camera.
*/ */
export interface Frame { export interface Frame {
/**
* The raw pixel buffer.
*/
buffer: unknown[];
/** /**
* Whether the underlying buffer is still valid or not. The buffer will be released after the frame processor returns. * Whether the underlying buffer is still valid or not. The buffer will be released after the frame processor returns.
*/ */