feat: New JS API for useCameraDevice and useCameraFormat and much faster getAvailableCameraDevices() (#1784)

* Update podfile

* Update useCameraFormat.ts

* Update API

* Delete FormatFilter.md

* Format CameraViewManager.m ObjC style

* Make `getAvailableCameraDevices` synchronous/blocking

* Create some docs

* fix: Fix HardwareLevel types

* fix: Use new device/format API

* Use 60 FPS format as an example

* Replace `Camera.getAvailableCameraDevices` with new `CameraDevices` API/Module

* Fix Lint

* KTLint options

* Use continuation indent of 8

* Use 2 spaces for indent

* Update .editorconfig

* Format code

* Update .editorconfig

* Format more

* Update VideoStabilizationMode.kt

* fix: Expose `CameraDevicesManager` to ObjC

* Update CameraPage.tsx

* fix: `requiresMainQueueSetup() -> false`

* Always prefer higher resolution

* Update CameraDevicesManager.swift

* Update CameraPage.tsx

* Also filter pixelFormat

* fix: Add AVFoundation import
This commit is contained in:
Marc Rousavy 2023-09-21 11:20:33 +02:00 committed by GitHub
parent 9eed89aac6
commit 977b859e46
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 1110 additions and 815 deletions

View File

@ -57,17 +57,6 @@ For debugging purposes you can use the `id` or `name` properties to log and comp
* For a single Wide-Angle camera, this would be `["wide-angle-camera"]` * For a single Wide-Angle camera, this would be `["wide-angle-camera"]`
* For a Triple-Camera, this would be `["wide-angle-camera", "ultra-wide-angle-camera", "telephoto-camera"]` * For a Triple-Camera, this would be `["wide-angle-camera", "ultra-wide-angle-camera", "telephoto-camera"]`
You can use the helper function `parsePhysicalDeviceTypes` to convert a list of physical devices to a single device descriptor type which can also describe virtual devices:
```ts
console.log(device.devices)
// --> ["wide-angle-camera", "ultra-wide-angle-camera", "telephoto-camera"]
const deviceType = parsePhysicalDeviceTypes(device.devices)
console.log(deviceType)
// --> "triple-camera"
```
Always choose a camera device that is best fitted for your use-case; so you might filter out any cameras that do not support flash, have low zoom values, are not on the back side of the phone, do not contain a format with high resolution or fps, and more. Always choose a camera device that is best fitted for your use-case; so you might filter out any cameras that do not support flash, have low zoom values, are not on the back side of the phone, do not contain a format with high resolution or fps, and more.
:::caution :::caution

View File

@ -1,6 +1,15 @@
[*.{kt,kts}] [*.{kt,kts}]
indent_size=2
indent_style=space indent_style=space
indent_size=2
continuation_indent_size=4
insert_final_newline=true insert_final_newline=true
max_line_length=off max_line_length=140
disabled_rules=no-wildcard-imports ktlint_code_style=android_studio
ktlint_standard=enabled
ktlint_experimental=enabled
ktlint_standard_filename=disabled # dont require PascalCase filenames
ktlint_standard_no-wildcard-imports=disabled # allow .* imports
ktlint_function_signature_rule_force_multiline_when_parameter_count_greater_or_equal_than=5
ktlint_function_signature_body_expression_wrapping=multiline
ij_kotlin_allow_trailing_comma_on_call_site=false
ij_kotlin_allow_trailing_comma=false

View File

@ -0,0 +1,84 @@
package com.mrousavy.camera
import android.content.Context
import android.hardware.camera2.CameraManager
import android.util.Log
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.ReactContextBaseJavaModule
import com.facebook.react.bridge.ReactMethod
import com.facebook.react.bridge.ReadableArray
import com.facebook.react.modules.core.DeviceEventManagerModule
import com.mrousavy.camera.core.CameraDeviceDetails
class CameraDevicesManager(private val reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
companion object {
private const val TAG = "CameraDevices"
}
private val cameraManager = reactContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
private val callback = object : CameraManager.AvailabilityCallback() {
private var devices = cameraManager.cameraIdList.toMutableList()
// Check if device is still physically connected (even if onCameraUnavailable() is called)
private fun isDeviceConnected(cameraId: String): Boolean =
try {
cameraManager.getCameraCharacteristics(cameraId)
true
} catch (_: Throwable) {
false
}
override fun onCameraAvailable(cameraId: String) {
Log.i(TAG, "Camera #$cameraId: Available!")
if (!devices.contains(cameraId)) {
devices.add(cameraId)
sendAvailableDevicesChangedEvent()
}
}
override fun onCameraUnavailable(cameraId: String) {
Log.i(TAG, "Camera #$cameraId: Unavailable!")
if (devices.contains(cameraId) && !isDeviceConnected(cameraId)) {
devices.remove(cameraId)
sendAvailableDevicesChangedEvent()
}
}
}
override fun getName(): String = TAG
override fun initialize() {
cameraManager.registerAvailabilityCallback(callback, null)
}
override fun invalidate() {
cameraManager.unregisterAvailabilityCallback(callback)
super.invalidate()
}
private fun getDevicesJson(): ReadableArray {
val devices = Arguments.createArray()
cameraManager.cameraIdList.forEach { cameraId ->
val device = CameraDeviceDetails(cameraManager, cameraId)
devices.pushMap(device.toMap())
}
return devices
}
fun sendAvailableDevicesChangedEvent() {
val eventEmitter = reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java)
eventEmitter.emit("CameraDevicesChanged", getDevicesJson())
}
override fun hasConstants(): Boolean = true
override fun getConstants(): MutableMap<String, Any> = mutableMapOf("availableCameraDevices" to getDevicesJson())
// Required for NativeEventEmitter, this is just a dummy implementation:
@ReactMethod
fun addListener(eventName: String) {}
@ReactMethod
fun removeListeners(count: Int) {}
}

View File

@ -6,11 +6,11 @@ import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.uimanager.ViewManager import com.facebook.react.uimanager.ViewManager
class CameraPackage : ReactPackage { class CameraPackage : ReactPackage {
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> { override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> =
return listOf(CameraViewModule(reactContext)) listOf(
} CameraViewModule(reactContext),
CameraDevicesManager(reactContext)
)
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> { override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> = listOf(CameraViewManager())
return listOf(CameraViewManager())
}
} }

View File

@ -2,10 +2,10 @@ package com.mrousavy.camera
import android.os.Handler import android.os.Handler
import android.os.HandlerThread import android.os.HandlerThread
import java.util.concurrent.Executor
import kotlinx.coroutines.CoroutineDispatcher import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.android.asCoroutineDispatcher import kotlinx.coroutines.android.asCoroutineDispatcher
import kotlinx.coroutines.asExecutor import kotlinx.coroutines.asExecutor
import java.util.concurrent.Executor
class CameraQueues { class CameraQueues {
companion object { companion object {
@ -32,4 +32,3 @@ class CameraQueues {
} }
} }
} }

View File

@ -5,10 +5,10 @@ import android.annotation.SuppressLint
import android.content.pm.PackageManager import android.content.pm.PackageManager
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import com.facebook.react.bridge.* import com.facebook.react.bridge.*
import com.mrousavy.camera.core.RecordingSession
import com.mrousavy.camera.parsers.Torch import com.mrousavy.camera.parsers.Torch
import com.mrousavy.camera.parsers.VideoCodec import com.mrousavy.camera.parsers.VideoCodec
import com.mrousavy.camera.parsers.VideoFileType import com.mrousavy.camera.parsers.VideoFileType
import com.mrousavy.camera.core.RecordingSession
import com.mrousavy.camera.utils.makeErrorMap import com.mrousavy.camera.utils.makeErrorMap
import java.util.* import java.util.*

View File

@ -15,10 +15,9 @@ import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.parsers.Flash import com.mrousavy.camera.parsers.Flash
import com.mrousavy.camera.parsers.QualityPrioritization import com.mrousavy.camera.parsers.QualityPrioritization
import com.mrousavy.camera.utils.* import com.mrousavy.camera.utils.*
import kotlinx.coroutines.*
import java.io.File import java.io.File
import java.io.FileOutputStream import java.io.FileOutputStream
import java.io.OutputStream import kotlinx.coroutines.*
private const val TAG = "CameraView.takePhoto" private const val TAG = "CameraView.takePhoto"
@ -36,12 +35,14 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap {
val flashMode = Flash.fromUnionValue(flash) val flashMode = Flash.fromUnionValue(flash)
val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization) val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization)
val photo = cameraSession.takePhoto(qualityPrioritizationMode, val photo = cameraSession.takePhoto(
qualityPrioritizationMode,
flashMode, flashMode,
enableShutterSound, enableShutterSound,
enableAutoRedEyeReduction, enableAutoRedEyeReduction,
enableAutoStabilization, enableAutoStabilization,
outputOrientation) outputOrientation
)
photo.use { photo.use {
Log.i(TAG, "Successfully captured ${photo.image.width} x ${photo.image.height} photo!") Log.i(TAG, "Successfully captured ${photo.image.width} x ${photo.image.height} photo!")
@ -83,10 +84,12 @@ private fun writePhotoToFile(photo: CameraSession.CapturedPhoto, file: File) {
} }
} }
private suspend fun savePhotoToFile(context: Context, private suspend fun savePhotoToFile(
context: Context,
cameraCharacteristics: CameraCharacteristics, cameraCharacteristics: CameraCharacteristics,
photo: CameraSession.CapturedPhoto): String { photo: CameraSession.CapturedPhoto
return withContext(Dispatchers.IO) { ): String =
withContext(Dispatchers.IO) {
when (photo.format) { when (photo.format) {
// When the format is JPEG or DEPTH JPEG we can simply save the bytes as-is // When the format is JPEG or DEPTH JPEG we can simply save the bytes as-is
ImageFormat.JPEG, ImageFormat.DEPTH_JPEG -> { ImageFormat.JPEG, ImageFormat.DEPTH_JPEG -> {
@ -111,8 +114,8 @@ private suspend fun savePhotoToFile(context: Context,
} }
} }
} }
}
private fun createFile(context: Context, extension: String): File { private fun createFile(context: Context, extension: String): File =
return File.createTempFile("mrousavy", extension, context.cacheDir).apply { deleteOnExit() } File.createTempFile("mrousavy", extension, context.cacheDir).apply {
} deleteOnExit()
}

View File

@ -15,6 +15,7 @@ import androidx.core.content.ContextCompat
import com.facebook.react.bridge.ReadableMap import com.facebook.react.bridge.ReadableMap
import com.mrousavy.camera.core.CameraSession import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.core.PreviewView import com.mrousavy.camera.core.PreviewView
import com.mrousavy.camera.core.outputs.CameraOutputs
import com.mrousavy.camera.extensions.containsAny import com.mrousavy.camera.extensions.containsAny
import com.mrousavy.camera.extensions.installHierarchyFitter import com.mrousavy.camera.extensions.installHierarchyFitter
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
@ -22,7 +23,6 @@ import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.PixelFormat import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.Torch import com.mrousavy.camera.parsers.Torch
import com.mrousavy.camera.parsers.VideoStabilizationMode import com.mrousavy.camera.parsers.VideoStabilizationMode
import com.mrousavy.camera.core.outputs.CameraOutputs
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
@ -42,7 +42,8 @@ class CameraView(context: Context) : FrameLayout(context) {
const val TAG = "CameraView" const val TAG = "CameraView"
private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId") private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId")
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat") private val propsThatRequireSessionReconfiguration =
arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat")
private val propsThatRequireFormatReconfiguration = arrayListOf("fps", "hdr", "videoStabilizationMode", "lowLightBoost") private val propsThatRequireFormatReconfiguration = arrayListOf("fps", "hdr", "videoStabilizationMode", "lowLightBoost")
} }
@ -52,18 +53,21 @@ class CameraView(context: Context) : FrameLayout(context) {
var enableDepthData = false var enableDepthData = false
var enableHighQualityPhotos: Boolean? = null var enableHighQualityPhotos: Boolean? = null
var enablePortraitEffectsMatteDelivery = false var enablePortraitEffectsMatteDelivery = false
// use-cases // use-cases
var photo: Boolean? = null var photo: Boolean? = null
var video: Boolean? = null var video: Boolean? = null
var audio: Boolean? = null var audio: Boolean? = null
var enableFrameProcessor = false var enableFrameProcessor = false
var pixelFormat: PixelFormat = PixelFormat.NATIVE var pixelFormat: PixelFormat = PixelFormat.NATIVE
// props that require format reconfiguring // props that require format reconfiguring
var format: ReadableMap? = null var format: ReadableMap? = null
var fps: Int? = null var fps: Int? = null
var videoStabilizationMode: VideoStabilizationMode? = null var videoStabilizationMode: VideoStabilizationMode? = null
var hdr: Boolean? = null // nullable bool var hdr: Boolean? = null // nullable bool
var lowLightBoost: Boolean? = null // nullable bool var lowLightBoost: Boolean? = null // nullable bool
// other props // other props
var isActive = false var isActive = false
var torch: Torch = Torch.OFF var torch: Torch = Torch.OFF
@ -182,10 +186,14 @@ class CameraView(context: Context) : FrameLayout(context) {
val previewOutput = CameraOutputs.PreviewOutput(previewSurface) val previewOutput = CameraOutputs.PreviewOutput(previewSurface)
val photoOutput = if (photo == true) { val photoOutput = if (photo == true) {
CameraOutputs.PhotoOutput(targetPhotoSize) CameraOutputs.PhotoOutput(targetPhotoSize)
} else null } else {
null
}
val videoOutput = if (video == true || enableFrameProcessor) { val videoOutput = if (video == true || enableFrameProcessor) {
CameraOutputs.VideoOutput(targetVideoSize, video == true, enableFrameProcessor, pixelFormat.toImageFormat()) CameraOutputs.VideoOutput(targetVideoSize, video == true, enableFrameProcessor, pixelFormat.toImageFormat())
} else null } else {
null
}
cameraSession.configureSession(cameraId, previewOutput, photoOutput, videoOutput) cameraSession.configureSession(cameraId, previewOutput, photoOutput, videoOutput)
} catch (e: Throwable) { } catch (e: Throwable) {
@ -215,13 +223,16 @@ class CameraView(context: Context) : FrameLayout(context) {
@SuppressLint("ClickableViewAccessibility") @SuppressLint("ClickableViewAccessibility")
private fun updateZoomGesture() { private fun updateZoomGesture() {
if (enableZoomGesture) { if (enableZoomGesture) {
val scaleGestureDetector = ScaleGestureDetector(context, object: ScaleGestureDetector.SimpleOnScaleGestureListener() { val scaleGestureDetector = ScaleGestureDetector(
context,
object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
override fun onScale(detector: ScaleGestureDetector): Boolean { override fun onScale(detector: ScaleGestureDetector): Boolean {
zoom *= detector.scaleFactor zoom *= detector.scaleFactor
cameraSession.setZoom(zoom) cameraSession.setZoom(zoom)
return true return true
} }
}) }
)
setOnTouchListener { _, event -> setOnTouchListener { _, event ->
scaleGestureDetector.onTouchEvent(event) scaleGestureDetector.onTouchEvent(event)
} }

View File

@ -12,9 +12,7 @@ import com.mrousavy.camera.parsers.VideoStabilizationMode
@Suppress("unused") @Suppress("unused")
class CameraViewManager : ViewGroupManager<CameraView>() { class CameraViewManager : ViewGroupManager<CameraView>() {
public override fun createViewInstance(context: ThemedReactContext): CameraView { public override fun createViewInstance(context: ThemedReactContext): CameraView = CameraView(context)
return CameraView(context)
}
override fun onAfterUpdateTransaction(view: CameraView) { override fun onAfterUpdateTransaction(view: CameraView) {
super.onAfterUpdateTransaction(view) super.onAfterUpdateTransaction(view)
@ -23,101 +21,110 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
cameraViewTransactions.remove(view) cameraViewTransactions.remove(view)
} }
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? { override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? =
return MapBuilder.builder<String, Any>() MapBuilder.builder<String, Any>()
.put("cameraViewReady", MapBuilder.of("registrationName", "onViewReady")) .put("cameraViewReady", MapBuilder.of("registrationName", "onViewReady"))
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized")) .put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
.put("cameraError", MapBuilder.of("registrationName", "onError")) .put("cameraError", MapBuilder.of("registrationName", "onError"))
.build() .build()
}
override fun getName(): String { override fun getName(): String = TAG
return TAG
}
@ReactProp(name = "cameraId") @ReactProp(name = "cameraId")
fun setCameraId(view: CameraView, cameraId: String) { fun setCameraId(view: CameraView, cameraId: String) {
if (view.cameraId != cameraId) if (view.cameraId != cameraId) {
addChangedPropToTransaction(view, "cameraId") addChangedPropToTransaction(view, "cameraId")
}
view.cameraId = cameraId view.cameraId = cameraId
} }
@ReactProp(name = "photo") @ReactProp(name = "photo")
fun setPhoto(view: CameraView, photo: Boolean?) { fun setPhoto(view: CameraView, photo: Boolean?) {
if (view.photo != photo) if (view.photo != photo) {
addChangedPropToTransaction(view, "photo") addChangedPropToTransaction(view, "photo")
}
view.photo = photo view.photo = photo
} }
@ReactProp(name = "video") @ReactProp(name = "video")
fun setVideo(view: CameraView, video: Boolean?) { fun setVideo(view: CameraView, video: Boolean?) {
if (view.video != video) if (view.video != video) {
addChangedPropToTransaction(view, "video") addChangedPropToTransaction(view, "video")
}
view.video = video view.video = video
} }
@ReactProp(name = "audio") @ReactProp(name = "audio")
fun setAudio(view: CameraView, audio: Boolean?) { fun setAudio(view: CameraView, audio: Boolean?) {
if (view.audio != audio) if (view.audio != audio) {
addChangedPropToTransaction(view, "audio") addChangedPropToTransaction(view, "audio")
}
view.audio = audio view.audio = audio
} }
@ReactProp(name = "enableFrameProcessor") @ReactProp(name = "enableFrameProcessor")
fun setEnableFrameProcessor(view: CameraView, enableFrameProcessor: Boolean) { fun setEnableFrameProcessor(view: CameraView, enableFrameProcessor: Boolean) {
if (view.enableFrameProcessor != enableFrameProcessor) if (view.enableFrameProcessor != enableFrameProcessor) {
addChangedPropToTransaction(view, "enableFrameProcessor") addChangedPropToTransaction(view, "enableFrameProcessor")
}
view.enableFrameProcessor = enableFrameProcessor view.enableFrameProcessor = enableFrameProcessor
} }
@ReactProp(name = "pixelFormat") @ReactProp(name = "pixelFormat")
fun setPixelFormat(view: CameraView, pixelFormat: String?) { fun setPixelFormat(view: CameraView, pixelFormat: String?) {
val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat)
if (view.pixelFormat != newPixelFormat) if (view.pixelFormat != newPixelFormat) {
addChangedPropToTransaction(view, "pixelFormat") addChangedPropToTransaction(view, "pixelFormat")
}
view.pixelFormat = newPixelFormat ?: PixelFormat.NATIVE view.pixelFormat = newPixelFormat ?: PixelFormat.NATIVE
} }
@ReactProp(name = "enableDepthData") @ReactProp(name = "enableDepthData")
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) { fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
if (view.enableDepthData != enableDepthData) if (view.enableDepthData != enableDepthData) {
addChangedPropToTransaction(view, "enableDepthData") addChangedPropToTransaction(view, "enableDepthData")
}
view.enableDepthData = enableDepthData view.enableDepthData = enableDepthData
} }
@ReactProp(name = "enableZoomGesture") @ReactProp(name = "enableZoomGesture")
fun setEnableZoomGesture(view: CameraView, enableZoomGesture: Boolean) { fun setEnableZoomGesture(view: CameraView, enableZoomGesture: Boolean) {
if (view.enableZoomGesture != enableZoomGesture) if (view.enableZoomGesture != enableZoomGesture) {
addChangedPropToTransaction(view, "enableZoomGesture") addChangedPropToTransaction(view, "enableZoomGesture")
}
view.enableZoomGesture = enableZoomGesture view.enableZoomGesture = enableZoomGesture
} }
@ReactProp(name = "videoStabilizationMode") @ReactProp(name = "videoStabilizationMode")
fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) {
val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode)
if (view.videoStabilizationMode != newMode) if (view.videoStabilizationMode != newMode) {
addChangedPropToTransaction(view, "videoStabilizationMode") addChangedPropToTransaction(view, "videoStabilizationMode")
}
view.videoStabilizationMode = newMode view.videoStabilizationMode = newMode
} }
@ReactProp(name = "enableHighQualityPhotos") @ReactProp(name = "enableHighQualityPhotos")
fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) { fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) {
if (view.enableHighQualityPhotos != enableHighQualityPhotos) if (view.enableHighQualityPhotos != enableHighQualityPhotos) {
addChangedPropToTransaction(view, "enableHighQualityPhotos") addChangedPropToTransaction(view, "enableHighQualityPhotos")
}
view.enableHighQualityPhotos = enableHighQualityPhotos view.enableHighQualityPhotos = enableHighQualityPhotos
} }
@ReactProp(name = "enablePortraitEffectsMatteDelivery") @ReactProp(name = "enablePortraitEffectsMatteDelivery")
fun setEnablePortraitEffectsMatteDelivery(view: CameraView, enablePortraitEffectsMatteDelivery: Boolean) { fun setEnablePortraitEffectsMatteDelivery(view: CameraView, enablePortraitEffectsMatteDelivery: Boolean) {
if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery) if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery) {
addChangedPropToTransaction(view, "enablePortraitEffectsMatteDelivery") addChangedPropToTransaction(view, "enablePortraitEffectsMatteDelivery")
}
view.enablePortraitEffectsMatteDelivery = enablePortraitEffectsMatteDelivery view.enablePortraitEffectsMatteDelivery = enablePortraitEffectsMatteDelivery
} }
@ReactProp(name = "format") @ReactProp(name = "format")
fun setFormat(view: CameraView, format: ReadableMap?) { fun setFormat(view: CameraView, format: ReadableMap?) {
if (view.format != format) if (view.format != format) {
addChangedPropToTransaction(view, "format") addChangedPropToTransaction(view, "format")
}
view.format = format view.format = format
} }
@ -126,53 +133,60 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
// of type "Int?" the react bridge throws an error. // of type "Int?" the react bridge throws an error.
@ReactProp(name = "fps", defaultInt = -1) @ReactProp(name = "fps", defaultInt = -1)
fun setFps(view: CameraView, fps: Int) { fun setFps(view: CameraView, fps: Int) {
if (view.fps != fps) if (view.fps != fps) {
addChangedPropToTransaction(view, "fps") addChangedPropToTransaction(view, "fps")
}
view.fps = if (fps > 0) fps else null view.fps = if (fps > 0) fps else null
} }
@ReactProp(name = "hdr") @ReactProp(name = "hdr")
fun setHdr(view: CameraView, hdr: Boolean?) { fun setHdr(view: CameraView, hdr: Boolean?) {
if (view.hdr != hdr) if (view.hdr != hdr) {
addChangedPropToTransaction(view, "hdr") addChangedPropToTransaction(view, "hdr")
}
view.hdr = hdr view.hdr = hdr
} }
@ReactProp(name = "lowLightBoost") @ReactProp(name = "lowLightBoost")
fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) { fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) {
if (view.lowLightBoost != lowLightBoost) if (view.lowLightBoost != lowLightBoost) {
addChangedPropToTransaction(view, "lowLightBoost") addChangedPropToTransaction(view, "lowLightBoost")
}
view.lowLightBoost = lowLightBoost view.lowLightBoost = lowLightBoost
} }
@ReactProp(name = "isActive") @ReactProp(name = "isActive")
fun setIsActive(view: CameraView, isActive: Boolean) { fun setIsActive(view: CameraView, isActive: Boolean) {
if (view.isActive != isActive) if (view.isActive != isActive) {
addChangedPropToTransaction(view, "isActive") addChangedPropToTransaction(view, "isActive")
}
view.isActive = isActive view.isActive = isActive
} }
@ReactProp(name = "torch") @ReactProp(name = "torch")
fun setTorch(view: CameraView, torch: String) { fun setTorch(view: CameraView, torch: String) {
val newMode = Torch.fromUnionValue(torch) val newMode = Torch.fromUnionValue(torch)
if (view.torch != newMode) if (view.torch != newMode) {
addChangedPropToTransaction(view, "torch") addChangedPropToTransaction(view, "torch")
}
view.torch = newMode view.torch = newMode
} }
@ReactProp(name = "zoom") @ReactProp(name = "zoom")
fun setZoom(view: CameraView, zoom: Double) { fun setZoom(view: CameraView, zoom: Double) {
val zoomFloat = zoom.toFloat() val zoomFloat = zoom.toFloat()
if (view.zoom != zoomFloat) if (view.zoom != zoomFloat) {
addChangedPropToTransaction(view, "zoom") addChangedPropToTransaction(view, "zoom")
}
view.zoom = zoomFloat view.zoom = zoomFloat
} }
@ReactProp(name = "orientation") @ReactProp(name = "orientation")
fun setOrientation(view: CameraView, orientation: String?) { fun setOrientation(view: CameraView, orientation: String?) {
val newMode = Orientation.fromUnionValue(orientation) val newMode = Orientation.fromUnionValue(orientation)
if (view.orientation != newMode) if (view.orientation != newMode) {
addChangedPropToTransaction(view, "orientation") addChangedPropToTransaction(view, "orientation")
}
view.orientation = newMode view.orientation = newMode
} }

View File

@ -1,9 +1,7 @@
package com.mrousavy.camera package com.mrousavy.camera
import android.Manifest import android.Manifest
import android.content.Context
import android.content.pm.PackageManager import android.content.pm.PackageManager
import android.hardware.camera2.CameraManager
import android.util.Log import android.util.Log
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import com.facebook.react.bridge.* import com.facebook.react.bridge.*
@ -11,22 +9,21 @@ import com.facebook.react.module.annotations.ReactModule
import com.facebook.react.modules.core.PermissionAwareActivity import com.facebook.react.modules.core.PermissionAwareActivity
import com.facebook.react.modules.core.PermissionListener import com.facebook.react.modules.core.PermissionListener
import com.facebook.react.uimanager.UIManagerHelper import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.core.CameraDeviceDetails
import com.mrousavy.camera.frameprocessor.VisionCameraInstaller import com.mrousavy.camera.frameprocessor.VisionCameraInstaller
import com.mrousavy.camera.frameprocessor.VisionCameraProxy import com.mrousavy.camera.frameprocessor.VisionCameraProxy
import com.mrousavy.camera.parsers.* import com.mrousavy.camera.parsers.*
import com.mrousavy.camera.utils.* import com.mrousavy.camera.utils.*
import kotlinx.coroutines.*
import kotlin.coroutines.resume import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException import kotlin.coroutines.resumeWithException
import kotlin.coroutines.suspendCoroutine import kotlin.coroutines.suspendCoroutine
import kotlinx.coroutines.*
@ReactModule(name = CameraViewModule.TAG) @ReactModule(name = CameraViewModule.TAG)
@Suppress("unused") @Suppress("unused")
class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJavaModule(reactContext) { class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
companion object { companion object {
const val TAG = "CameraView" const val TAG = "CameraView"
var RequestCode = 10 var sharedRequestCode = 10
} }
private val coroutineScope = CoroutineScope(Dispatchers.Default) // TODO: or Dispatchers.Main? private val coroutineScope = CoroutineScope(Dispatchers.Default) // TODO: or Dispatchers.Main?
@ -38,25 +35,32 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
} }
} }
override fun getName(): String { override fun getName(): String = TAG
return TAG
}
private suspend fun findCameraView(viewId: Int): CameraView { private suspend fun findCameraView(viewId: Int): CameraView =
return suspendCoroutine { continuation -> suspendCoroutine { continuation ->
UiThreadUtil.runOnUiThread { UiThreadUtil.runOnUiThread {
Log.d(TAG, "Finding view $viewId...") Log.d(TAG, "Finding view $viewId...")
val view = if (reactApplicationContext != null) UIManagerHelper.getUIManager(reactApplicationContext, viewId)?.resolveView(viewId) as CameraView? else null val view = if (reactApplicationContext != null) {
UIManagerHelper.getUIManager(
reactApplicationContext,
viewId
)?.resolveView(viewId) as CameraView?
} else {
null
}
Log.d(TAG, if (reactApplicationContext != null) "Found view $viewId!" else "Couldn't find view $viewId!") Log.d(TAG, if (reactApplicationContext != null) "Found view $viewId!" else "Couldn't find view $viewId!")
if (view != null) continuation.resume(view) if (view != null) {
else continuation.resumeWithException(ViewNotFoundError(viewId)) continuation.resume(view)
} else {
continuation.resumeWithException(ViewNotFoundError(viewId))
} }
} }
} }
@ReactMethod(isBlockingSynchronousMethod = true) @ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean { fun installFrameProcessorBindings(): Boolean =
return try { try {
val proxy = VisionCameraProxy(reactApplicationContext) val proxy = VisionCameraProxy(reactApplicationContext)
VisionCameraInstaller.install(proxy) VisionCameraInstaller.install(proxy)
true true
@ -64,7 +68,6 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e) Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
false false
} }
}
@ReactMethod @ReactMethod
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) { fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
@ -87,7 +90,8 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error) val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
onRecordCallback(null, map) onRecordCallback(null, map)
} catch (error: Throwable) { } catch (error: Throwable) {
val map = makeErrorMap("capture/unknown", "An unknown error occurred while trying to start a video recording! ${error.message}", error) val map =
makeErrorMap("capture/unknown", "An unknown error occurred while trying to start a video recording! ${error.message}", error)
onRecordCallback(null, map) onRecordCallback(null, map)
} }
} }
@ -137,22 +141,6 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
} }
} }
@ReactMethod
fun getAvailableCameraDevices(promise: Promise) {
coroutineScope.launch {
withPromise(promise) {
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
val devices = Arguments.createArray()
manager.cameraIdList.forEach { cameraId ->
val device = CameraDeviceDetails(manager, cameraId)
devices.pushMap(device.toMap())
}
promise.resolve(devices)
}
}
}
private fun canRequestPermission(permission: String): Boolean { private fun canRequestPermission(permission: String): Boolean {
val activity = currentActivity as? PermissionAwareActivity val activity = currentActivity as? PermissionAwareActivity
return activity?.shouldShowRequestPermissionRationale(permission) ?: false return activity?.shouldShowRequestPermissionRationale(permission) ?: false
@ -182,7 +170,7 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
fun requestCameraPermission(promise: Promise) { fun requestCameraPermission(promise: Promise) {
val activity = reactApplicationContext.currentActivity val activity = reactApplicationContext.currentActivity
if (activity is PermissionAwareActivity) { if (activity is PermissionAwareActivity) {
val currentRequestCode = RequestCode++ val currentRequestCode = sharedRequestCode++
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray -> val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
if (requestCode == currentRequestCode) { if (requestCode == currentRequestCode) {
val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED
@ -202,7 +190,7 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
fun requestMicrophonePermission(promise: Promise) { fun requestMicrophonePermission(promise: Promise) {
val activity = reactApplicationContext.currentActivity val activity = reactApplicationContext.currentActivity
if (activity is PermissionAwareActivity) { if (activity is PermissionAwareActivity) {
val currentRequestCode = RequestCode++ val currentRequestCode = sharedRequestCode++
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray -> val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
if (requestCode == currentRequestCode) { if (requestCode == currentRequestCode) {
val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED

View File

@ -1,7 +1,7 @@
package com.mrousavy.camera package com.mrousavy.camera
import com.mrousavy.camera.parsers.CameraDeviceError
import com.mrousavy.camera.core.outputs.CameraOutputs import com.mrousavy.camera.core.outputs.CameraOutputs
import com.mrousavy.camera.parsers.CameraDeviceError
abstract class CameraError( abstract class CameraError(
// example: "permission" // example: "permission"
@ -16,30 +16,53 @@ abstract class CameraError(
val CameraError.code: String val CameraError.code: String
get() = "$domain/$id" get() = "$domain/$id"
class MicrophonePermissionError : CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied! If you want to record Video without sound, pass `audio={false}`.") class MicrophonePermissionError :
CameraError(
"permission",
"microphone-permission-denied",
"The Microphone permission was denied! If you want to record Video without sound, pass `audio={false}`."
)
class CameraPermissionError : CameraError("permission", "camera-permission-denied", "The Camera permission was denied!") class CameraPermissionError : CameraError("permission", "camera-permission-denied", "The Camera permission was denied!")
class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)") class InvalidTypeScriptUnionError(unionName: String, unionValue: String) :
CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
class NoCameraDeviceError : CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.") class NoCameraDeviceError :
class PixelFormatNotSupportedError(format: String) : CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!") CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
class PixelFormatNotSupportedError(format: String) :
CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") class CameraNotReadyError :
class CameraCannotBeOpenedError(cameraId: String, error: CameraDeviceError) : CameraError("session", "camera-cannot-be-opened", "The given Camera device (id: $cameraId) could not be opened! Error: $error") CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
class CameraSessionCannotBeConfiguredError(cameraId: String, outputs: CameraOutputs) : CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera $cameraId! Outputs: $outputs") class CameraCannotBeOpenedError(cameraId: String, error: CameraDeviceError) :
class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error") CameraError("session", "camera-cannot-be-opened", "The given Camera device (id: $cameraId) could not be opened! Error: $error")
class CameraSessionCannotBeConfiguredError(cameraId: String, outputs: CameraOutputs) :
CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera $cameraId! Outputs: $outputs")
class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) :
CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error")
class VideoNotEnabledError : CameraError("capture", "video-not-enabled", "Video capture is disabled! Pass `video={true}` to enable video recordings.") class VideoNotEnabledError :
class PhotoNotEnabledError : CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.") CameraError("capture", "video-not-enabled", "Video capture is disabled! Pass `video={true}` to enable video recordings.")
class CaptureAbortedError(wasImageCaptured: Boolean) : CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") class PhotoNotEnabledError :
class UnknownCaptureError(wasImageCaptured: Boolean) : CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.")
class CaptureAbortedError(wasImageCaptured: Boolean) :
CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured")
class UnknownCaptureError(wasImageCaptured: Boolean) :
CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured")
class RecorderError(name: String, extra: Int) : CameraError("capture", "recorder-error", "An error occured while recording a video! $name $extra") class RecorderError(name: String, extra: Int) :
CameraError("capture", "recorder-error", "An error occured while recording a video! $name $extra")
class NoRecordingInProgressError : CameraError("capture", "no-recording-in-progress", "There was no active video recording in progress! Did you call stopRecording() twice?") class NoRecordingInProgressError :
class RecordingInProgressError : CameraError("capture", "recording-in-progress", "There is already an active video recording in progress! Did you call startRecording() twice?") CameraError("capture", "no-recording-in-progress", "There was no active video recording in progress! Did you call stopRecording() twice?")
class RecordingInProgressError :
CameraError(
"capture",
"recording-in-progress",
"There is already an active video recording in progress! Did you call startRecording() twice?"
)
class ViewNotFoundError(viewId: Int) : CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.") class ViewNotFoundError(viewId: Int) :
CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause) class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause)

View File

@ -14,10 +14,10 @@ import com.facebook.react.bridge.ReadableMap
import com.mrousavy.camera.extensions.bigger import com.mrousavy.camera.extensions.bigger
import com.mrousavy.camera.extensions.getPhotoSizes import com.mrousavy.camera.extensions.getPhotoSizes
import com.mrousavy.camera.extensions.getVideoSizes import com.mrousavy.camera.extensions.getVideoSizes
import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.HardwareLevel import com.mrousavy.camera.parsers.HardwareLevel
import com.mrousavy.camera.parsers.LensFacing import com.mrousavy.camera.parsers.LensFacing
import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.VideoStabilizationMode import com.mrousavy.camera.parsers.VideoStabilizationMode
import kotlin.math.PI import kotlin.math.PI
import kotlin.math.atan import kotlin.math.atan
@ -29,50 +29,64 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
private val extensions = getSupportedExtensions() private val extensions = getSupportedExtensions()
// device characteristics // device characteristics
private val isMultiCam = capabilities.contains(11 /* TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA */) private val isMultiCam = capabilities.contains(11) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA
private val supportsDepthCapture = capabilities.contains(8 /* TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT */) private val supportsDepthCapture = capabilities.contains(8) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT
private val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) private val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
private val supportsLowLightBoost = extensions.contains(4 /* TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT */) private val supportsLowLightBoost = extensions.contains(4) // TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT
private val lensFacing = LensFacing.fromCameraCharacteristics(characteristics) private val lensFacing = LensFacing.fromCameraCharacteristics(characteristics)
private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false
private val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f /* 35mm default */) private val focalLengths =
characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)
// 35mm is the film standard sensor size
?: floatArrayOf(35f)
private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!! private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
private val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! private val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
private val name = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION) private val name = (
else null) ?: "$lensFacing (${cameraId})" if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
characteristics.get(CameraCharacteristics.INFO_VERSION)
} else {
null
}
) ?: "$lensFacing ($cameraId)"
// "formats" (all possible configurations for this device) // "formats" (all possible configurations for this device)
private val zoomRange = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) private val zoomRange = (
else null) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
} else {
null
}
) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f)
private val minZoom = zoomRange.lower.toDouble() private val minZoom = zoomRange.lower.toDouble()
private val maxZoom = zoomRange.upper.toDouble() private val maxZoom = zoomRange.upper.toDouble()
private val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! private val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
private val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) private val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0)
private val digitalStabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0) private val digitalStabilizationModes =
private val opticalStabilizationModes = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0) characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0)
private val supportsPhotoHdr = extensions.contains(3 /* TODO: CameraExtensionCharacteristics.EXTENSION_HDR */) private val opticalStabilizationModes =
characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0)
private val supportsPhotoHdr = extensions.contains(3) // TODO: CameraExtensionCharacteristics.EXTENSION_HDR
private val supportsVideoHdr = getHasVideoHdr() private val supportsVideoHdr = getHasVideoHdr()
private val videoFormat = ImageFormat.YUV_420_888 private val videoFormat = ImageFormat.YUV_420_888
// get extensions (HDR, Night Mode, ..) // get extensions (HDR, Night Mode, ..)
private fun getSupportedExtensions(): List<Int> { private fun getSupportedExtensions(): List<Int> =
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
val extensions = cameraManager.getCameraExtensionCharacteristics(cameraId) val extensions = cameraManager.getCameraExtensionCharacteristics(cameraId)
extensions.supportedExtensions extensions.supportedExtensions
} else { } else {
emptyList() emptyList()
} }
}
private fun getHasVideoHdr(): Boolean { private fun getHasVideoHdr(): Boolean {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) { if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
val availableProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES) val availableProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES)
?: DynamicRangeProfiles(LongArray(0)) ?: DynamicRangeProfiles(LongArray(0))
return availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HLG10) return availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HLG10) ||
|| availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HDR10) availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HDR10)
} }
} }
return false return false
@ -117,16 +131,10 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
return deviceTypes return deviceTypes
} }
private fun getFieldOfView(): Double { private fun getFieldOfView(): Double = 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
return 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
}
private fun getVideoSizes(): List<Size> { private fun getVideoSizes(): List<Size> = characteristics.getVideoSizes(cameraId, videoFormat)
return characteristics.getVideoSizes(cameraId, videoFormat) private fun getPhotoSizes(): List<Size> = characteristics.getPhotoSizes(ImageFormat.JPEG)
}
private fun getPhotoSizes(): List<Size> {
return characteristics.getPhotoSizes(ImageFormat.JPEG)
}
private fun getFormats(): ReadableArray { private fun getFormats(): ReadableArray {
val array = Arguments.createArray() val array = Arguments.createArray()

View File

@ -25,6 +25,7 @@ import com.mrousavy.camera.PhotoNotEnabledError
import com.mrousavy.camera.RecorderError import com.mrousavy.camera.RecorderError
import com.mrousavy.camera.RecordingInProgressError import com.mrousavy.camera.RecordingInProgressError
import com.mrousavy.camera.VideoNotEnabledError import com.mrousavy.camera.VideoNotEnabledError
import com.mrousavy.camera.core.outputs.CameraOutputs
import com.mrousavy.camera.extensions.capture import com.mrousavy.camera.extensions.capture
import com.mrousavy.camera.extensions.createCaptureSession import com.mrousavy.camera.extensions.createCaptureSession
import com.mrousavy.camera.extensions.createPhotoCaptureRequest import com.mrousavy.camera.extensions.createPhotoCaptureRequest
@ -37,19 +38,23 @@ import com.mrousavy.camera.parsers.QualityPrioritization
import com.mrousavy.camera.parsers.VideoCodec import com.mrousavy.camera.parsers.VideoCodec
import com.mrousavy.camera.parsers.VideoFileType import com.mrousavy.camera.parsers.VideoFileType
import com.mrousavy.camera.parsers.VideoStabilizationMode import com.mrousavy.camera.parsers.VideoStabilizationMode
import com.mrousavy.camera.core.outputs.CameraOutputs import java.io.Closeable
import java.util.concurrent.CancellationException
import kotlin.coroutines.CoroutineContext
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock import kotlinx.coroutines.sync.withLock
import java.io.Closeable
import java.util.concurrent.CancellationException
import kotlin.coroutines.CoroutineContext
class CameraSession(private val context: Context, class CameraSession(
private val context: Context,
private val cameraManager: CameraManager, private val cameraManager: CameraManager,
private val onInitialized: () -> Unit, private val onInitialized: () -> Unit,
private val onError: (e: Throwable) -> Unit): CoroutineScope, Closeable, CameraOutputs.Callback, CameraManager.AvailabilityCallback() { private val onError: (e: Throwable) -> Unit
) : CameraManager.AvailabilityCallback(),
CoroutineScope,
Closeable,
CameraOutputs.Callback {
companion object { companion object {
private const val TAG = "CameraSession" private const val TAG = "CameraSession"
@ -57,11 +62,13 @@ class CameraSession(private val context: Context,
private val CAN_SET_FPS = !Build.MANUFACTURER.equals("samsung", true) private val CAN_SET_FPS = !Build.MANUFACTURER.equals("samsung", true)
} }
data class CapturedPhoto(val image: Image, data class CapturedPhoto(
val image: Image,
val metadata: TotalCaptureResult, val metadata: TotalCaptureResult,
val orientation: Orientation, val orientation: Orientation,
val isMirrored: Boolean, val isMirrored: Boolean,
val format: Int): Closeable { val format: Int
) : Closeable {
override fun close() { override fun close() {
image.close() image.close()
} }
@ -92,6 +99,7 @@ class CameraSession(private val context: Context,
private val mutex = Mutex() private val mutex = Mutex()
private var isRunning = false private var isRunning = false
private var enableTorch = false private var enableTorch = false
// Video Outputs // Video Outputs
private var recording: RecordingSession? = null private var recording: RecordingSession? = null
set(value) { set(value) {
@ -127,18 +135,22 @@ class CameraSession(private val context: Context,
return Orientation.fromRotationDegrees(sensorRotation) return Orientation.fromRotationDegrees(sensorRotation)
} }
fun configureSession(cameraId: String, fun configureSession(
cameraId: String,
preview: CameraOutputs.PreviewOutput? = null, preview: CameraOutputs.PreviewOutput? = null,
photo: CameraOutputs.PhotoOutput? = null, photo: CameraOutputs.PhotoOutput? = null,
video: CameraOutputs.VideoOutput? = null) { video: CameraOutputs.VideoOutput? = null
) {
Log.i(TAG, "Configuring Session for Camera $cameraId...") Log.i(TAG, "Configuring Session for Camera $cameraId...")
val outputs = CameraOutputs(cameraId, val outputs = CameraOutputs(
cameraId,
cameraManager, cameraManager,
preview, preview,
photo, photo,
video, video,
hdr == true, hdr == true,
this) this
)
if (this.cameraId == cameraId && this.outputs == outputs && isActive == isRunning) { if (this.cameraId == cameraId && this.outputs == outputs && isActive == isRunning) {
Log.i(TAG, "Nothing changed in configuration, canceling..") Log.i(TAG, "Nothing changed in configuration, canceling..")
} }
@ -156,10 +168,12 @@ class CameraSession(private val context: Context,
} }
} }
fun configureFormat(fps: Int? = null, fun configureFormat(
fps: Int? = null,
videoStabilizationMode: VideoStabilizationMode? = null, videoStabilizationMode: VideoStabilizationMode? = null,
hdr: Boolean? = null, hdr: Boolean? = null,
lowLightBoost: Boolean? = null) { lowLightBoost: Boolean? = null
) {
Log.i(TAG, "Setting Format (fps: $fps | videoStabilization: $videoStabilizationMode | hdr: $hdr | lowLightBoost: $lowLightBoost)...") Log.i(TAG, "Setting Format (fps: $fps | videoStabilization: $videoStabilizationMode | hdr: $hdr | lowLightBoost: $lowLightBoost)...")
this.fps = fps this.fps = fps
this.videoStabilizationMode = videoStabilizationMode this.videoStabilizationMode = videoStabilizationMode
@ -170,18 +184,23 @@ class CameraSession(private val context: Context,
val currentOutputs = outputs val currentOutputs = outputs
if (currentOutputs != null && currentOutputs.enableHdr != hdr) { if (currentOutputs != null && currentOutputs.enableHdr != hdr) {
// Update existing HDR for Outputs // Update existing HDR for Outputs
this.outputs = CameraOutputs(currentOutputs.cameraId, this.outputs = CameraOutputs(
currentOutputs.cameraId,
cameraManager, cameraManager,
currentOutputs.preview, currentOutputs.preview,
currentOutputs.photo, currentOutputs.photo,
currentOutputs.video, currentOutputs.video,
hdr, hdr,
this) this
)
needsReconfiguration = true needsReconfiguration = true
} }
launch { launch {
if (needsReconfiguration) startRunning() if (needsReconfiguration) {
else updateRepeatingRequest() startRunning()
} else {
updateRepeatingRequest()
}
} }
} }
@ -208,12 +227,14 @@ class CameraSession(private val context: Context,
videoPipeline.setFrameProcessorOutput(this.frameProcessor) videoPipeline.setFrameProcessorOutput(this.frameProcessor)
} }
suspend fun takePhoto(qualityPrioritization: QualityPrioritization, suspend fun takePhoto(
qualityPrioritization: QualityPrioritization,
flashMode: Flash, flashMode: Flash,
enableShutterSound: Boolean, enableShutterSound: Boolean,
enableRedEyeReduction: Boolean, enableRedEyeReduction: Boolean,
enableAutoStabilization: Boolean, enableAutoStabilization: Boolean,
outputOrientation: Orientation): CapturedPhoto { outputOrientation: Orientation
): CapturedPhoto {
val captureSession = captureSession ?: throw CameraNotReadyError() val captureSession = captureSession ?: throw CameraNotReadyError()
val outputs = outputs ?: throw CameraNotReadyError() val outputs = outputs ?: throw CameraNotReadyError()
@ -223,14 +244,16 @@ class CameraSession(private val context: Context,
val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id)
val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics) val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics)
val captureRequest = captureSession.device.createPhotoCaptureRequest(cameraManager, val captureRequest = captureSession.device.createPhotoCaptureRequest(
cameraManager,
photoOutput.surface, photoOutput.surface,
zoom, zoom,
qualityPrioritization, qualityPrioritization,
flashMode, flashMode,
enableRedEyeReduction, enableRedEyeReduction,
enableAutoStabilization, enableAutoStabilization,
orientation) orientation
)
Log.i(TAG, "Photo capture 1/3 - starting capture...") Log.i(TAG, "Photo capture 1/3 - starting capture...")
val result = captureSession.capture(captureRequest, enableShutterSound) val result = captureSession.capture(captureRequest, enableShutterSound)
val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!!
@ -252,11 +275,13 @@ class CameraSession(private val context: Context,
photoOutputSynchronizer.set(image.timestamp, image) photoOutputSynchronizer.set(image.timestamp, image)
} }
suspend fun startRecording(enableAudio: Boolean, suspend fun startRecording(
enableAudio: Boolean,
codec: VideoCodec, codec: VideoCodec,
fileType: VideoFileType, fileType: VideoFileType,
callback: (video: RecordingSession.Video) -> Unit, callback: (video: RecordingSession.Video) -> Unit,
onError: (error: RecorderError) -> Unit) { onError: (error: RecorderError) -> Unit
) {
mutex.withLock { mutex.withLock {
if (recording != null) throw RecordingInProgressError() if (recording != null) throw RecordingInProgressError()
val outputs = outputs ?: throw CameraNotReadyError() val outputs = outputs ?: throw CameraNotReadyError()
@ -396,9 +421,7 @@ class CameraSession(private val context: Context,
// Caches the result of outputs.hashCode() of the last getCaptureSession call // Caches the result of outputs.hashCode() of the last getCaptureSession call
private var lastOutputsHashCode: Int? = null private var lastOutputsHashCode: Int? = null
private suspend fun getCaptureSession(cameraDevice: CameraDevice, private suspend fun getCaptureSession(cameraDevice: CameraDevice, outputs: CameraOutputs, onClosed: () -> Unit): CameraCaptureSession {
outputs: CameraOutputs,
onClosed: () -> Unit): CameraCaptureSession {
val currentSession = captureSession val currentSession = captureSession
if (currentSession?.device == cameraDevice && outputs.hashCode() == lastOutputsHashCode) { if (currentSession?.device == cameraDevice && outputs.hashCode() == lastOutputsHashCode) {
// We already opened a CameraCaptureSession on this device // We already opened a CameraCaptureSession on this device
@ -426,11 +449,13 @@ class CameraSession(private val context: Context,
return session return session
} }
private fun getPreviewCaptureRequest(fps: Int? = null, private fun getPreviewCaptureRequest(
fps: Int? = null,
videoStabilizationMode: VideoStabilizationMode? = null, videoStabilizationMode: VideoStabilizationMode? = null,
lowLightBoost: Boolean? = null, lowLightBoost: Boolean? = null,
hdr: Boolean? = null, hdr: Boolean? = null,
torch: Boolean? = null): CaptureRequest { torch: Boolean? = null
): CaptureRequest {
val captureRequest = previewRequest ?: throw CameraNotReadyError() val captureRequest = previewRequest ?: throw CameraNotReadyError()
// FPS // FPS
@ -442,9 +467,16 @@ class CameraSession(private val context: Context,
captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, videoStabilizationMode?.toOpticalStabilizationMode()) captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, videoStabilizationMode?.toOpticalStabilizationMode())
// Night/HDR Mode // Night/HDR Mode
val sceneMode = if (hdr == true) CaptureRequest.CONTROL_SCENE_MODE_HDR else if (lowLightBoost == true) CaptureRequest.CONTROL_SCENE_MODE_NIGHT else null val sceneMode = if (hdr ==
true
) {
CaptureRequest.CONTROL_SCENE_MODE_HDR
} else if (lowLightBoost == true) CaptureRequest.CONTROL_SCENE_MODE_NIGHT else null
captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, sceneMode) captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, sceneMode)
captureRequest.set(CaptureRequest.CONTROL_MODE, if (sceneMode != null) CaptureRequest.CONTROL_MODE_USE_SCENE_MODE else CaptureRequest.CONTROL_MODE_AUTO) captureRequest.set(
CaptureRequest.CONTROL_MODE,
if (sceneMode != null) CaptureRequest.CONTROL_MODE_USE_SCENE_MODE else CaptureRequest.CONTROL_MODE_AUTO
)
// Zoom // Zoom
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.core; package com.mrousavy.camera.core
import android.media.Image import android.media.Image
import kotlinx.coroutines.CompletableDeferred import kotlinx.coroutines.CompletableDeferred

View File

@ -12,10 +12,12 @@ import com.mrousavy.camera.extensions.getPreviewSize
import kotlin.math.roundToInt import kotlin.math.roundToInt
@SuppressLint("ViewConstructor") @SuppressLint("ViewConstructor")
class PreviewView(context: Context, class PreviewView(
context: Context,
cameraManager: CameraManager, cameraManager: CameraManager,
cameraId: String, cameraId: String,
private val onSurfaceChanged: (surface: Surface?) -> Unit): SurfaceView(context) { private val onSurfaceChanged: (surface: Surface?) -> Unit
) : SurfaceView(context) {
private val targetSize: Size private val targetSize: Size
private val aspectRatio: Float private val aspectRatio: Float
get() = targetSize.width.toFloat() / targetSize.height.toFloat() get() = targetSize.width.toFloat() / targetSize.height.toFloat()
@ -26,7 +28,7 @@ class PreviewView(context: Context,
Log.i(TAG, "Using Preview Size ${targetSize.width} x ${targetSize.height}.") Log.i(TAG, "Using Preview Size ${targetSize.width} x ${targetSize.height}.")
holder.setFixedSize(targetSize.width, targetSize.height) holder.setFixedSize(targetSize.width, targetSize.height)
holder.addCallback(object: SurfaceHolder.Callback { holder.addCallback(object : SurfaceHolder.Callback {
override fun surfaceCreated(holder: SurfaceHolder) { override fun surfaceCreated(holder: SurfaceHolder) {
Log.i(TAG, "Surface created! ${holder.surface}") Log.i(TAG, "Surface created! ${holder.surface}")
onSurfaceChanged(holder.surface) onSurfaceChanged(holder.surface)

View File

@ -14,7 +14,8 @@ import com.mrousavy.camera.parsers.VideoCodec
import com.mrousavy.camera.parsers.VideoFileType import com.mrousavy.camera.parsers.VideoFileType
import java.io.File import java.io.File
class RecordingSession(context: Context, class RecordingSession(
context: Context,
val size: Size, val size: Size,
private val enableAudio: Boolean, private val enableAudio: Boolean,
private val fps: Int? = null, private val fps: Int? = null,
@ -22,9 +23,11 @@ class RecordingSession(context: Context,
private val orientation: Orientation, private val orientation: Orientation,
private val fileType: VideoFileType = VideoFileType.MP4, private val fileType: VideoFileType = VideoFileType.MP4,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: RecorderError) -> Unit) { private val onError: (error: RecorderError) -> Unit
) {
companion object { companion object {
private const val TAG = "RecordingSession" private const val TAG = "RecordingSession"
// bits per second // bits per second
private const val VIDEO_BIT_RATE = 10_000_000 private const val VIDEO_BIT_RATE = 10_000_000
private const val AUDIO_SAMPLING_RATE = 44_100 private const val AUDIO_SAMPLING_RATE = 44_100
@ -67,7 +70,7 @@ class RecordingSession(context: Context,
recorder.setAudioChannels(AUDIO_CHANNELS) recorder.setAudioChannels(AUDIO_CHANNELS)
} }
recorder.setInputSurface(surface) recorder.setInputSurface(surface)
//recorder.setOrientationHint(orientation.toDegrees()) // recorder.setOrientationHint(orientation.toDegrees())
recorder.setOnErrorListener { _, what, extra -> recorder.setOnErrorListener { _, what, extra ->
Log.e(TAG, "MediaRecorder Error: $what ($extra)") Log.e(TAG, "MediaRecorder Error: $what ($extra)")

View File

@ -3,7 +3,6 @@ package com.mrousavy.camera.core
import android.graphics.ImageFormat import android.graphics.ImageFormat
import android.media.ImageReader import android.media.ImageReader
import android.media.ImageWriter import android.media.ImageWriter
import android.media.MediaRecorder
import android.util.Log import android.util.Log
import android.view.Surface import android.view.Surface
import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.CameraQueues
@ -13,10 +12,9 @@ import com.mrousavy.camera.parsers.Orientation
import java.io.Closeable import java.io.Closeable
@Suppress("JoinDeclarationAndAssignment") @Suppress("JoinDeclarationAndAssignment")
class VideoPipeline(val width: Int, class VideoPipeline(val width: Int, val height: Int, val format: Int = ImageFormat.PRIVATE, private val isMirrored: Boolean = false) :
val height: Int, ImageReader.OnImageAvailableListener,
val format: Int = ImageFormat.PRIVATE, Closeable {
private val isMirrored: Boolean = false): ImageReader.OnImageAvailableListener, Closeable {
companion object { companion object {
private const val MAX_IMAGES = 3 private const val MAX_IMAGES = 3
private const val TAG = "VideoPipeline" private const val TAG = "VideoPipeline"

View File

@ -9,32 +9,35 @@ import android.util.Log
import android.util.Size import android.util.Size
import android.view.Surface import android.view.Surface
import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.core.VideoPipeline
import com.mrousavy.camera.extensions.closestToOrMax import com.mrousavy.camera.extensions.closestToOrMax
import com.mrousavy.camera.extensions.getPhotoSizes import com.mrousavy.camera.extensions.getPhotoSizes
import com.mrousavy.camera.extensions.getPreviewSize import com.mrousavy.camera.extensions.getPreviewSize
import com.mrousavy.camera.extensions.getVideoSizes import com.mrousavy.camera.extensions.getVideoSizes
import com.mrousavy.camera.core.VideoPipeline
import java.io.Closeable import java.io.Closeable
class CameraOutputs(val cameraId: String, class CameraOutputs(
val cameraId: String,
cameraManager: CameraManager, cameraManager: CameraManager,
val preview: PreviewOutput? = null, val preview: PreviewOutput? = null,
val photo: PhotoOutput? = null, val photo: PhotoOutput? = null,
val video: VideoOutput? = null, val video: VideoOutput? = null,
val enableHdr: Boolean? = false, val enableHdr: Boolean? = false,
val callback: Callback): Closeable { val callback: Callback
) : Closeable {
companion object { companion object {
private const val TAG = "CameraOutputs" private const val TAG = "CameraOutputs"
const val PHOTO_OUTPUT_BUFFER_SIZE = 3 const val PHOTO_OUTPUT_BUFFER_SIZE = 3
} }
data class PreviewOutput(val surface: Surface) data class PreviewOutput(val surface: Surface)
data class PhotoOutput(val targetSize: Size? = null, data class PhotoOutput(val targetSize: Size? = null, val format: Int = ImageFormat.JPEG)
val format: Int = ImageFormat.JPEG) data class VideoOutput(
data class VideoOutput(val targetSize: Size? = null, val targetSize: Size? = null,
val enableRecording: Boolean = false, val enableRecording: Boolean = false,
val enableFrameProcessor: Boolean? = false, val enableFrameProcessor: Boolean? = false,
val format: Int = ImageFormat.PRIVATE) val format: Int = ImageFormat.PRIVATE
)
interface Callback { interface Callback {
fun onPhotoCaptured(image: Image) fun onPhotoCaptured(image: Image)
@ -58,14 +61,14 @@ class CameraOutputs(val cameraId: String,
override fun equals(other: Any?): Boolean { override fun equals(other: Any?): Boolean {
if (other !is CameraOutputs) return false if (other !is CameraOutputs) return false
return this.cameraId == other.cameraId return this.cameraId == other.cameraId &&
&& this.preview?.surface == other.preview?.surface this.preview?.surface == other.preview?.surface &&
&& this.photo?.targetSize == other.photo?.targetSize this.photo?.targetSize == other.photo?.targetSize &&
&& this.photo?.format == other.photo?.format this.photo?.format == other.photo?.format &&
&& this.video?.enableRecording == other.video?.enableRecording this.video?.enableRecording == other.video?.enableRecording &&
&& this.video?.targetSize == other.video?.targetSize this.video?.targetSize == other.video?.targetSize &&
&& this.video?.format == other.video?.format this.video?.format == other.video?.format &&
&& this.enableHdr == other.enableHdr this.enableHdr == other.enableHdr
} }
override fun hashCode(): Int { override fun hashCode(): Int {

View File

@ -5,15 +5,18 @@ import android.util.Log
import android.util.Size import android.util.Size
import java.io.Closeable import java.io.Closeable
class ImageReaderOutput(private val imageReader: ImageReader, class ImageReaderOutput(private val imageReader: ImageReader, outputType: OutputType, dynamicRangeProfile: Long? = null) :
outputType: OutputType, SurfaceOutput(
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(imageReader.surface, Size(imageReader.width, imageReader.height), outputType, dynamicRangeProfile) { imageReader.surface,
Size(imageReader.width, imageReader.height),
outputType,
dynamicRangeProfile
),
Closeable {
override fun close() { override fun close() {
Log.i(TAG, "Closing ${imageReader.width}x${imageReader.height} $outputType ImageReader..") Log.i(TAG, "Closing ${imageReader.width}x${imageReader.height} $outputType ImageReader..")
imageReader.close() imageReader.close()
} }
override fun toString(): String { override fun toString(): String = "$outputType (${imageReader.width} x ${imageReader.height} in format #${imageReader.imageFormat})"
return "$outputType (${imageReader.width} x ${imageReader.height} in format #${imageReader.imageFormat})"
}
} }

View File

@ -10,11 +10,13 @@ import android.view.Surface
import androidx.annotation.RequiresApi import androidx.annotation.RequiresApi
import java.io.Closeable import java.io.Closeable
open class SurfaceOutput(val surface: Surface, open class SurfaceOutput(
val surface: Surface,
val size: Size, val size: Size,
val outputType: OutputType, val outputType: OutputType,
private val dynamicRangeProfile: Long? = null, private val dynamicRangeProfile: Long? = null,
private val closeSurfaceOnEnd: Boolean = false): Closeable { private val closeSurfaceOnEnd: Boolean = false
) : Closeable {
companion object { companion object {
const val TAG = "SurfaceOutput" const val TAG = "SurfaceOutput"
@ -47,9 +49,7 @@ open class SurfaceOutput(val surface: Surface,
return result return result
} }
override fun toString(): String { override fun toString(): String = "$outputType (${size.width} x ${size.height})"
return "$outputType (${size.width} x ${size.height})"
}
override fun close() { override fun close() {
if (closeSurfaceOnEnd) { if (closeSurfaceOnEnd) {
@ -64,13 +64,12 @@ open class SurfaceOutput(val surface: Surface,
VIDEO_AND_PREVIEW; VIDEO_AND_PREVIEW;
@RequiresApi(Build.VERSION_CODES.TIRAMISU) @RequiresApi(Build.VERSION_CODES.TIRAMISU)
fun toOutputType(): Int { fun toOutputType(): Int =
return when(this) { when (this) {
PHOTO -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE PHOTO -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE
VIDEO -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD VIDEO -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD
PREVIEW -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW PREVIEW -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW
VIDEO_AND_PREVIEW -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL VIDEO_AND_PREVIEW -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL
} }
} }
}
} }

View File

@ -5,15 +5,18 @@ import android.util.Size
import com.mrousavy.camera.core.VideoPipeline import com.mrousavy.camera.core.VideoPipeline
import java.io.Closeable import java.io.Closeable
class VideoPipelineOutput(val videoPipeline: VideoPipeline, class VideoPipelineOutput(val videoPipeline: VideoPipeline, outputType: OutputType, dynamicRangeProfile: Long? = null) :
outputType: OutputType, SurfaceOutput(
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(videoPipeline.surface, Size(videoPipeline.width, videoPipeline.height), outputType, dynamicRangeProfile) { videoPipeline.surface,
Size(videoPipeline.width, videoPipeline.height),
outputType,
dynamicRangeProfile
),
Closeable {
override fun close() { override fun close() {
Log.i(TAG, "Closing ${videoPipeline.width}x${videoPipeline.height} Video Pipeline..") Log.i(TAG, "Closing ${videoPipeline.width}x${videoPipeline.height} Video Pipeline..")
videoPipeline.close() videoPipeline.close()
} }
override fun toString(): String { override fun toString(): String = "$outputType (${videoPipeline.width} x ${videoPipeline.height} in format #${videoPipeline.format})"
return "$outputType (${videoPipeline.width} x ${videoPipeline.height} in format #${videoPipeline.format})"
}
} }

View File

@ -12,14 +12,12 @@ import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException import kotlin.coroutines.resumeWithException
import kotlin.coroutines.suspendCoroutine import kotlin.coroutines.suspendCoroutine
suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult { suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult =
return suspendCoroutine { continuation -> suspendCoroutine { continuation ->
this.capture(captureRequest, object: CameraCaptureSession.CaptureCallback() { this.capture(
override fun onCaptureCompleted( captureRequest,
session: CameraCaptureSession, object : CameraCaptureSession.CaptureCallback() {
request: CaptureRequest, override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) {
result: TotalCaptureResult
) {
super.onCaptureCompleted(session, request, result) super.onCaptureCompleted(session, request, result)
continuation.resume(result) continuation.resume(result)
@ -34,11 +32,7 @@ suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableS
} }
} }
override fun onCaptureFailed( override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) {
session: CameraCaptureSession,
request: CaptureRequest,
failure: CaptureFailure
) {
super.onCaptureFailed(session, request, failure) super.onCaptureFailed(session, request, failure)
val wasImageCaptured = failure.wasImageCaptured() val wasImageCaptured = failure.wasImageCaptured()
val error = when (failure.reason) { val error = when (failure.reason) {
@ -48,6 +42,7 @@ suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableS
} }
continuation.resumeWithException(error) continuation.resumeWithException(error)
} }
}, CameraQueues.cameraQueue.handler) },
CameraQueues.cameraQueue.handler
)
} }
}

View File

@ -8,29 +8,33 @@ import android.hardware.camera2.params.OutputConfiguration
import android.hardware.camera2.params.SessionConfiguration import android.hardware.camera2.params.SessionConfiguration
import android.os.Build import android.os.Build
import android.util.Log import android.util.Log
import androidx.annotation.RequiresApi
import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.CameraSessionCannotBeConfiguredError import com.mrousavy.camera.CameraSessionCannotBeConfiguredError
import com.mrousavy.camera.core.outputs.CameraOutputs import com.mrousavy.camera.core.outputs.CameraOutputs
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlin.coroutines.resume import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException import kotlin.coroutines.resumeWithException
import kotlinx.coroutines.suspendCancellableCoroutine
private const val TAG = "CreateCaptureSession" private const val TAG = "CreateCaptureSession"
private var sessionId = 1000 private var sessionId = 1000
suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager, suspend fun CameraDevice.createCaptureSession(
cameraManager: CameraManager,
outputs: CameraOutputs, outputs: CameraOutputs,
onClosed: (session: CameraCaptureSession) -> Unit, onClosed: (session: CameraCaptureSession) -> Unit,
queue: CameraQueues.CameraQueue): CameraCaptureSession { queue: CameraQueues.CameraQueue
return suspendCancellableCoroutine { continuation -> ): CameraCaptureSession =
suspendCancellableCoroutine { continuation ->
val characteristics = cameraManager.getCameraCharacteristics(id) val characteristics = cameraManager.getCameraCharacteristics(id)
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!! val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
val sessionId = sessionId++ val sessionId = sessionId++
Log.i(TAG, "Camera $id: Creating Capture Session #$sessionId... " + Log.i(
"Hardware Level: $hardwareLevel} | Outputs: $outputs") TAG,
"Camera $id: Creating Capture Session #$sessionId... " +
"Hardware Level: $hardwareLevel} | Outputs: $outputs"
)
val callback = object: CameraCaptureSession.StateCallback() { val callback = object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) { override fun onConfigured(session: CameraCaptureSession) {
Log.i(TAG, "Camera $id: Capture Session #$sessionId configured!") Log.i(TAG, "Camera $id: Capture Session #$sessionId configured!")
continuation.resume(session) continuation.resume(session)
@ -78,4 +82,3 @@ suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
this.createCaptureSessionByOutputConfigurations(outputConfigurations, callback, queue.handler) this.createCaptureSessionByOutputConfigurations(outputConfigurations, callback, queue.handler)
} }
} }
}

View File

@ -23,14 +23,16 @@ private fun supportsSnapshotCapture(cameraCharacteristics: CameraCharacteristics
return true return true
} }
fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager, fun CameraDevice.createPhotoCaptureRequest(
cameraManager: CameraManager,
surface: Surface, surface: Surface,
zoom: Float, zoom: Float,
qualityPrioritization: QualityPrioritization, qualityPrioritization: QualityPrioritization,
flashMode: Flash, flashMode: Flash,
enableRedEyeReduction: Boolean, enableRedEyeReduction: Boolean,
enableAutoStabilization: Boolean, enableAutoStabilization: Boolean,
orientation: Orientation): CaptureRequest { orientation: Orientation
): CaptureRequest {
val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id) val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id)
val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) { val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) {

View File

@ -9,20 +9,22 @@ import com.mrousavy.camera.CameraCannotBeOpenedError
import com.mrousavy.camera.CameraDisconnectedError import com.mrousavy.camera.CameraDisconnectedError
import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.parsers.CameraDeviceError import com.mrousavy.camera.parsers.CameraDeviceError
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlin.coroutines.resume import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException import kotlin.coroutines.resumeWithException
import kotlinx.coroutines.suspendCancellableCoroutine
private const val TAG = "CameraManager" private const val TAG = "CameraManager"
@SuppressLint("MissingPermission") @SuppressLint("MissingPermission")
suspend fun CameraManager.openCamera(cameraId: String, suspend fun CameraManager.openCamera(
cameraId: String,
onDisconnected: (camera: CameraDevice, reason: Throwable) -> Unit, onDisconnected: (camera: CameraDevice, reason: Throwable) -> Unit,
queue: CameraQueues.CameraQueue): CameraDevice { queue: CameraQueues.CameraQueue
return suspendCancellableCoroutine { continuation -> ): CameraDevice =
suspendCancellableCoroutine { continuation ->
Log.i(TAG, "Camera $cameraId: Opening...") Log.i(TAG, "Camera $cameraId: Opening...")
val callback = object: CameraDevice.StateCallback() { val callback = object : CameraDevice.StateCallback() {
override fun onOpened(camera: CameraDevice) { override fun onOpened(camera: CameraDevice) {
Log.i(TAG, "Camera $cameraId: Opened!") Log.i(TAG, "Camera $cameraId: Opened!")
continuation.resume(camera) continuation.resume(camera)
@ -56,4 +58,3 @@ suspend fun CameraManager.openCamera(cameraId: String,
this.openCamera(cameraId, callback, queue.handler) this.openCamera(cameraId, callback, queue.handler)
} }
} }
}

View File

@ -1,5 +1,3 @@
package com.mrousavy.camera.extensions package com.mrousavy.camera.extensions
fun <T> List<T>.containsAny(elements: List<T>): Boolean { fun <T> List<T>.containsAny(elements: List<T>): Boolean = elements.any { element -> this.contains(element) }
return elements.any { element -> this.contains(element) }
}

View File

@ -7,23 +7,21 @@ import kotlin.math.abs
import kotlin.math.max import kotlin.math.max
import kotlin.math.min import kotlin.math.min
fun List<Size>.closestToOrMax(size: Size?): Size { fun List<Size>.closestToOrMax(size: Size?): Size =
return if (size != null) { if (size != null) {
this.minBy { abs(it.width - size.width) + abs(it.height - size.height) } this.minBy { abs(it.width - size.width) + abs(it.height - size.height) }
} else { } else {
this.maxBy { it.width * it.height } this.maxBy { it.width * it.height }
} }
}
fun Size.rotated(surfaceRotation: Int): Size { fun Size.rotated(surfaceRotation: Int): Size =
return when (surfaceRotation) { when (surfaceRotation) {
Surface.ROTATION_0 -> Size(width, height) Surface.ROTATION_0 -> Size(width, height)
Surface.ROTATION_90 -> Size(height, width) Surface.ROTATION_90 -> Size(height, width)
Surface.ROTATION_180 -> Size(width, height) Surface.ROTATION_180 -> Size(width, height)
Surface.ROTATION_270 -> Size(height, width) Surface.ROTATION_270 -> Size(height, width)
else -> Size(width, height) else -> Size(width, height)
} }
}
val Size.bigger: Int val Size.bigger: Int
get() = max(width, height) get() = max(width, height)
@ -35,7 +33,4 @@ val SizeF.bigger: Float
val SizeF.smaller: Float val SizeF.smaller: Float
get() = min(this.width, this.height) get() = min(this.width, this.height)
operator fun Size.compareTo(other: Size): Int { operator fun Size.compareTo(other: Size): Int = (this.width * this.height).compareTo(other.width * other.height)
return (this.width * this.height).compareTo(other.width * other.height)
}

View File

@ -26,6 +26,7 @@ class VisionCameraProxy(context: ReactApplicationContext) {
} }
} }
} }
@DoNotStrip @DoNotStrip
@Keep @Keep
private var mHybridData: HybridData private var mHybridData: HybridData
@ -69,12 +70,9 @@ class VisionCameraProxy(context: ReactApplicationContext) {
@DoNotStrip @DoNotStrip
@Keep @Keep
fun getFrameProcessorPlugin(name: String, options: Map<String, Any>): FrameProcessorPlugin { fun getFrameProcessorPlugin(name: String, options: Map<String, Any>): FrameProcessorPlugin =
return FrameProcessorPluginRegistry.getPlugin(name, options) FrameProcessorPluginRegistry.getPlugin(name, options)
}
// private C++ funcs // private C++ funcs
private external fun initHybrid(jsContext: Long, private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData
jsCallInvokerHolder: CallInvokerHolderImpl,
scheduler: VisionCameraScheduler): HybridData
} }

View File

@ -2,7 +2,7 @@ package com.mrousavy.camera.parsers
import android.hardware.camera2.CameraDevice import android.hardware.camera2.CameraDevice
enum class CameraDeviceError(override val unionValue: String): JSUnionValue { enum class CameraDeviceError(override val unionValue: String) : JSUnionValue {
CAMERA_ALREADY_IN_USE("camera-already-in-use"), CAMERA_ALREADY_IN_USE("camera-already-in-use"),
TOO_MANY_OPEN_CAMERAS("too-many-open-cameras"), TOO_MANY_OPEN_CAMERAS("too-many-open-cameras"),
CAMERA_IS_DISABLED_BY_ANDROID("camera-is-disabled-by-android"), CAMERA_IS_DISABLED_BY_ANDROID("camera-is-disabled-by-android"),
@ -11,8 +11,8 @@ enum class CameraDeviceError(override val unionValue: String): JSUnionValue {
DISCONNECTED("camera-has-been-disconnected"); DISCONNECTED("camera-has-been-disconnected");
companion object { companion object {
fun fromCameraDeviceError(cameraDeviceError: Int): CameraDeviceError { fun fromCameraDeviceError(cameraDeviceError: Int): CameraDeviceError =
return when (cameraDeviceError) { when (cameraDeviceError) {
CameraDevice.StateCallback.ERROR_CAMERA_IN_USE -> CAMERA_ALREADY_IN_USE CameraDevice.StateCallback.ERROR_CAMERA_IN_USE -> CAMERA_ALREADY_IN_USE
CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE -> TOO_MANY_OPEN_CAMERAS CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE -> TOO_MANY_OPEN_CAMERAS
CameraDevice.StateCallback.ERROR_CAMERA_DISABLED -> CAMERA_IS_DISABLED_BY_ANDROID CameraDevice.StateCallback.ERROR_CAMERA_DISABLED -> CAMERA_IS_DISABLED_BY_ANDROID
@ -21,5 +21,4 @@ enum class CameraDeviceError(override val unionValue: String): JSUnionValue {
else -> UNKNOWN_CAMERA_DEVICE_ERROR else -> UNKNOWN_CAMERA_DEVICE_ERROR
} }
} }
}
} }

View File

@ -2,19 +2,18 @@ package com.mrousavy.camera.parsers
import com.mrousavy.camera.InvalidTypeScriptUnionError import com.mrousavy.camera.InvalidTypeScriptUnionError
enum class Flash(override val unionValue: String): JSUnionValue { enum class Flash(override val unionValue: String) : JSUnionValue {
OFF("off"), OFF("off"),
ON("on"), ON("on"),
AUTO("auto"); AUTO("auto");
companion object: JSUnionValue.Companion<Flash> { companion object : JSUnionValue.Companion<Flash> {
override fun fromUnionValue(unionValue: String?): Flash { override fun fromUnionValue(unionValue: String?): Flash =
return when (unionValue) { when (unionValue) {
"off" -> OFF "off" -> OFF
"on" -> ON "on" -> ON
"auto" -> AUTO "auto" -> AUTO
else -> throw InvalidTypeScriptUnionError("flash", unionValue ?: "(null)") else -> throw InvalidTypeScriptUnionError("flash", unionValue ?: "(null)")
} }
} }
}
} }

View File

@ -2,16 +2,16 @@ package com.mrousavy.camera.parsers
import android.hardware.camera2.CameraCharacteristics import android.hardware.camera2.CameraCharacteristics
enum class HardwareLevel(override val unionValue: String): JSUnionValue { enum class HardwareLevel(override val unionValue: String) : JSUnionValue {
LEGACY("legacy"), LEGACY("legacy"),
LIMITED("limited"), LIMITED("limited"),
EXTERNAL("external"), EXTERNAL("limited"),
FULL("full"), FULL("full"),
LEVEL_3("level-3"); LEVEL_3("full");
companion object { companion object {
fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel { fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel =
return when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) {
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY -> LEGACY CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY -> LEGACY
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED -> LIMITED CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED -> LIMITED
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL -> EXTERNAL CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL -> EXTERNAL
@ -20,5 +20,4 @@ enum class HardwareLevel(override val unionValue: String): JSUnionValue {
else -> LEGACY else -> LEGACY
} }
} }
}
} }

View File

@ -2,19 +2,18 @@ package com.mrousavy.camera.parsers
import android.hardware.camera2.CameraCharacteristics import android.hardware.camera2.CameraCharacteristics
enum class LensFacing(override val unionValue: String): JSUnionValue { enum class LensFacing(override val unionValue: String) : JSUnionValue {
BACK("back"), BACK("back"),
FRONT("front"), FRONT("front"),
EXTERNAL("external"); EXTERNAL("external");
companion object { companion object {
fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): LensFacing { fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): LensFacing =
return when (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)!!) { when (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)!!) {
CameraCharacteristics.LENS_FACING_BACK -> BACK CameraCharacteristics.LENS_FACING_BACK -> BACK
CameraCharacteristics.LENS_FACING_FRONT -> FRONT CameraCharacteristics.LENS_FACING_FRONT -> FRONT
CameraCharacteristics.LENS_FACING_EXTERNAL -> EXTERNAL CameraCharacteristics.LENS_FACING_EXTERNAL -> EXTERNAL
else -> EXTERNAL else -> EXTERNAL
} }
} }
}
} }

View File

@ -2,20 +2,19 @@ package com.mrousavy.camera.parsers
import android.hardware.camera2.CameraCharacteristics import android.hardware.camera2.CameraCharacteristics
enum class Orientation(override val unionValue: String): JSUnionValue { enum class Orientation(override val unionValue: String) : JSUnionValue {
PORTRAIT("portrait"), PORTRAIT("portrait"),
LANDSCAPE_RIGHT("landscape-right"), LANDSCAPE_RIGHT("landscape-right"),
PORTRAIT_UPSIDE_DOWN("portrait-upside-down"), PORTRAIT_UPSIDE_DOWN("portrait-upside-down"),
LANDSCAPE_LEFT("landscape-left"); LANDSCAPE_LEFT("landscape-left");
fun toDegrees(): Int { fun toDegrees(): Int =
return when(this) { when (this) {
PORTRAIT -> 0 PORTRAIT -> 0
LANDSCAPE_RIGHT -> 90 LANDSCAPE_RIGHT -> 90
PORTRAIT_UPSIDE_DOWN -> 180 PORTRAIT_UPSIDE_DOWN -> 180
LANDSCAPE_LEFT -> 270 LANDSCAPE_LEFT -> 270
} }
}
fun toSensorRelativeOrientation(cameraCharacteristics: CameraCharacteristics): Orientation { fun toSensorRelativeOrientation(cameraCharacteristics: CameraCharacteristics): Orientation {
val sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! val sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
@ -33,24 +32,22 @@ enum class Orientation(override val unionValue: String): JSUnionValue {
return fromRotationDegrees(newRotationDegrees) return fromRotationDegrees(newRotationDegrees)
} }
companion object: JSUnionValue.Companion<Orientation> { companion object : JSUnionValue.Companion<Orientation> {
override fun fromUnionValue(unionValue: String?): Orientation? { override fun fromUnionValue(unionValue: String?): Orientation? =
return when (unionValue) { when (unionValue) {
"portrait" -> PORTRAIT "portrait" -> PORTRAIT
"landscape-right" -> LANDSCAPE_RIGHT "landscape-right" -> LANDSCAPE_RIGHT
"portrait-upside-down" -> PORTRAIT_UPSIDE_DOWN "portrait-upside-down" -> PORTRAIT_UPSIDE_DOWN
"landscape-left" -> LANDSCAPE_LEFT "landscape-left" -> LANDSCAPE_LEFT
else -> null else -> null
} }
}
fun fromRotationDegrees(rotationDegrees: Int): Orientation { fun fromRotationDegrees(rotationDegrees: Int): Orientation =
return when (rotationDegrees) { when (rotationDegrees) {
in 45..135 -> LANDSCAPE_RIGHT in 45..135 -> LANDSCAPE_RIGHT
in 135..225 -> PORTRAIT_UPSIDE_DOWN in 135..225 -> PORTRAIT_UPSIDE_DOWN
in 225..315 -> LANDSCAPE_LEFT in 225..315 -> LANDSCAPE_LEFT
else -> PORTRAIT else -> PORTRAIT
} }
} }
}
} }

View File

@ -2,18 +2,17 @@ package com.mrousavy.camera.parsers
import android.content.pm.PackageManager import android.content.pm.PackageManager
enum class PermissionStatus(override val unionValue: String): JSUnionValue { enum class PermissionStatus(override val unionValue: String) : JSUnionValue {
DENIED("denied"), DENIED("denied"),
NOT_DETERMINED("not-determined"), NOT_DETERMINED("not-determined"),
GRANTED("granted"); GRANTED("granted");
companion object { companion object {
fun fromPermissionStatus(status: Int): PermissionStatus { fun fromPermissionStatus(status: Int): PermissionStatus =
return when (status) { when (status) {
PackageManager.PERMISSION_DENIED -> DENIED PackageManager.PERMISSION_DENIED -> DENIED
PackageManager.PERMISSION_GRANTED -> GRANTED PackageManager.PERMISSION_GRANTED -> GRANTED
else -> NOT_DETERMINED else -> NOT_DETERMINED
} }
} }
}
} }

View File

@ -4,7 +4,7 @@ import android.graphics.ImageFormat
import com.mrousavy.camera.PixelFormatNotSupportedError import com.mrousavy.camera.PixelFormatNotSupportedError
@Suppress("FoldInitializerAndIfToElvis") @Suppress("FoldInitializerAndIfToElvis")
enum class PixelFormat(override val unionValue: String): JSUnionValue { enum class PixelFormat(override val unionValue: String) : JSUnionValue {
YUV("yuv"), YUV("yuv"),
RGB("rgb"), RGB("rgb"),
DNG("dng"), DNG("dng"),
@ -25,19 +25,18 @@ enum class PixelFormat(override val unionValue: String): JSUnionValue {
return result return result
} }
companion object: JSUnionValue.Companion<PixelFormat> { companion object : JSUnionValue.Companion<PixelFormat> {
fun fromImageFormat(imageFormat: Int): PixelFormat { fun fromImageFormat(imageFormat: Int): PixelFormat =
return when (imageFormat) { when (imageFormat) {
ImageFormat.YUV_420_888 -> YUV ImageFormat.YUV_420_888 -> YUV
ImageFormat.JPEG, ImageFormat.DEPTH_JPEG -> RGB ImageFormat.JPEG, ImageFormat.DEPTH_JPEG -> RGB
ImageFormat.RAW_SENSOR -> DNG ImageFormat.RAW_SENSOR -> DNG
ImageFormat.PRIVATE -> NATIVE ImageFormat.PRIVATE -> NATIVE
else -> UNKNOWN else -> UNKNOWN
} }
}
override fun fromUnionValue(unionValue: String?): PixelFormat? { override fun fromUnionValue(unionValue: String?): PixelFormat? =
return when (unionValue) { when (unionValue) {
"yuv" -> YUV "yuv" -> YUV
"rgb" -> RGB "rgb" -> RGB
"dng" -> DNG "dng" -> DNG
@ -46,5 +45,4 @@ enum class PixelFormat(override val unionValue: String): JSUnionValue {
else -> null else -> null
} }
} }
}
} }

View File

@ -1,18 +1,17 @@
package com.mrousavy.camera.parsers package com.mrousavy.camera.parsers
enum class QualityPrioritization(override val unionValue: String): JSUnionValue { enum class QualityPrioritization(override val unionValue: String) : JSUnionValue {
SPEED("speed"), SPEED("speed"),
BALANCED("balanced"), BALANCED("balanced"),
QUALITY("quality"); QUALITY("quality");
companion object: JSUnionValue.Companion<QualityPrioritization> { companion object : JSUnionValue.Companion<QualityPrioritization> {
override fun fromUnionValue(unionValue: String?): QualityPrioritization { override fun fromUnionValue(unionValue: String?): QualityPrioritization =
return when (unionValue) { when (unionValue) {
"speed" -> SPEED "speed" -> SPEED
"balanced" -> BALANCED "balanced" -> BALANCED
"quality" -> QUALITY "quality" -> QUALITY
else -> BALANCED else -> BALANCED
} }
} }
}
} }

View File

@ -1,16 +1,15 @@
package com.mrousavy.camera.parsers package com.mrousavy.camera.parsers
enum class Torch(override val unionValue: String): JSUnionValue { enum class Torch(override val unionValue: String) : JSUnionValue {
OFF("off"), OFF("off"),
ON("on"); ON("on");
companion object: JSUnionValue.Companion<Torch> { companion object : JSUnionValue.Companion<Torch> {
override fun fromUnionValue(unionValue: String?): Torch { override fun fromUnionValue(unionValue: String?): Torch =
return when (unionValue) { when (unionValue) {
"off" -> OFF "off" -> OFF
"on" -> ON "on" -> ON
else -> OFF else -> OFF
} }
} }
}
} }

View File

@ -2,24 +2,22 @@ package com.mrousavy.camera.parsers
import android.media.MediaRecorder import android.media.MediaRecorder
enum class VideoCodec(override val unionValue: String): JSUnionValue { enum class VideoCodec(override val unionValue: String) : JSUnionValue {
H264("h264"), H264("h264"),
H265("h265"); H265("h265");
fun toVideoCodec(): Int { fun toVideoCodec(): Int =
return when (this) { when (this) {
H264 -> MediaRecorder.VideoEncoder.H264 H264 -> MediaRecorder.VideoEncoder.H264
H265 -> MediaRecorder.VideoEncoder.HEVC H265 -> MediaRecorder.VideoEncoder.HEVC
} }
}
companion object: JSUnionValue.Companion<VideoCodec> { companion object : JSUnionValue.Companion<VideoCodec> {
override fun fromUnionValue(unionValue: String?): VideoCodec { override fun fromUnionValue(unionValue: String?): VideoCodec =
return when (unionValue) { when (unionValue) {
"h264" -> H264 "h264" -> H264
"h265" -> H265 "h265" -> H265
else -> H264 else -> H264
} }
} }
}
} }

View File

@ -2,24 +2,22 @@ package com.mrousavy.camera.parsers
import com.mrousavy.camera.InvalidTypeScriptUnionError import com.mrousavy.camera.InvalidTypeScriptUnionError
enum class VideoFileType(override val unionValue: String): JSUnionValue { enum class VideoFileType(override val unionValue: String) : JSUnionValue {
MOV("mov"), MOV("mov"),
MP4("mp4"); MP4("mp4");
fun toExtension(): String { fun toExtension(): String =
return when (this) { when (this) {
MOV -> ".mov" MOV -> ".mov"
MP4 -> ".mp4" MP4 -> ".mp4"
} }
}
companion object: JSUnionValue.Companion<VideoFileType> { companion object : JSUnionValue.Companion<VideoFileType> {
override fun fromUnionValue(unionValue: String?): VideoFileType { override fun fromUnionValue(unionValue: String?): VideoFileType =
return when (unionValue) { when (unionValue) {
"mov" -> MOV "mov" -> MOV
"mp4" -> MP4 "mp4" -> MP4
else -> throw InvalidTypeScriptUnionError("fileType", unionValue ?: "(null)") else -> throw InvalidTypeScriptUnionError("fileType", unionValue ?: "(null)")
} }
} }
}
} }

View File

@ -6,54 +6,49 @@ import android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_
import android.hardware.camera2.CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_OFF import android.hardware.camera2.CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_OFF
import android.hardware.camera2.CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON import android.hardware.camera2.CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON
enum class VideoStabilizationMode(override val unionValue: String): JSUnionValue { enum class VideoStabilizationMode(override val unionValue: String) : JSUnionValue {
OFF("off"), OFF("off"),
STANDARD("standard"), STANDARD("standard"),
CINEMATIC("cinematic"), CINEMATIC("cinematic"),
CINEMATIC_EXTENDED("cinematic-extended"); CINEMATIC_EXTENDED("cinematic-extended");
fun toDigitalStabilizationMode(): Int { fun toDigitalStabilizationMode(): Int =
return when (this) { when (this) {
OFF -> CONTROL_VIDEO_STABILIZATION_MODE_OFF OFF -> CONTROL_VIDEO_STABILIZATION_MODE_OFF
STANDARD -> CONTROL_VIDEO_STABILIZATION_MODE_ON STANDARD -> CONTROL_VIDEO_STABILIZATION_MODE_ON
CINEMATIC -> 2 /* CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION */ CINEMATIC -> 2 // TODO: CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION
else -> CONTROL_VIDEO_STABILIZATION_MODE_OFF else -> CONTROL_VIDEO_STABILIZATION_MODE_OFF
} }
}
fun toOpticalStabilizationMode(): Int { fun toOpticalStabilizationMode(): Int =
return when (this) { when (this) {
OFF -> LENS_OPTICAL_STABILIZATION_MODE_OFF OFF -> LENS_OPTICAL_STABILIZATION_MODE_OFF
CINEMATIC_EXTENDED -> LENS_OPTICAL_STABILIZATION_MODE_ON CINEMATIC_EXTENDED -> LENS_OPTICAL_STABILIZATION_MODE_ON
else -> LENS_OPTICAL_STABILIZATION_MODE_OFF else -> LENS_OPTICAL_STABILIZATION_MODE_OFF
} }
}
companion object: JSUnionValue.Companion<VideoStabilizationMode> { companion object : JSUnionValue.Companion<VideoStabilizationMode> {
override fun fromUnionValue(unionValue: String?): VideoStabilizationMode? { override fun fromUnionValue(unionValue: String?): VideoStabilizationMode? =
return when (unionValue) { when (unionValue) {
"off" -> OFF "off" -> OFF
"standard" -> STANDARD "standard" -> STANDARD
"cinematic" -> CINEMATIC "cinematic" -> CINEMATIC
"cinematic-extended" -> CINEMATIC_EXTENDED "cinematic-extended" -> CINEMATIC_EXTENDED
else -> null else -> null
} }
}
fun fromDigitalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode { fun fromDigitalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode =
return when (stabiliazionMode) { when (stabiliazionMode) {
CONTROL_VIDEO_STABILIZATION_MODE_OFF -> OFF CONTROL_VIDEO_STABILIZATION_MODE_OFF -> OFF
CONTROL_VIDEO_STABILIZATION_MODE_ON -> STANDARD CONTROL_VIDEO_STABILIZATION_MODE_ON -> STANDARD
CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION -> CINEMATIC CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION -> CINEMATIC
else -> OFF else -> OFF
} }
} fun fromOpticalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode =
fun fromOpticalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode { when (stabiliazionMode) {
return when (stabiliazionMode) {
LENS_OPTICAL_STABILIZATION_MODE_OFF -> OFF LENS_OPTICAL_STABILIZATION_MODE_OFF -> OFF
LENS_OPTICAL_STABILIZATION_MODE_ON -> CINEMATIC_EXTENDED LENS_OPTICAL_STABILIZATION_MODE_ON -> CINEMATIC_EXTENDED
else -> OFF else -> OFF
} }
} }
}
} }

View File

@ -1,16 +1,8 @@
import * as React from 'react'; import * as React from 'react';
import { useRef, useState, useMemo, useCallback } from 'react'; import { useRef, useState, useCallback } from 'react';
import { StyleSheet, Text, View } from 'react-native'; import { StyleSheet, Text, View } from 'react-native';
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler'; import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler';
import { import { CameraRuntimeError, PhotoFile, useCameraDevice, useCameraFormat, useFrameProcessor, VideoFile } from 'react-native-vision-camera';
CameraDeviceFormat,
CameraRuntimeError,
PhotoFile,
sortFormats,
useCameraDevices,
useFrameProcessor,
VideoFile,
} from 'react-native-vision-camera';
import { Camera } from 'react-native-vision-camera'; import { Camera } from 'react-native-vision-camera';
import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING } from './Constants'; import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING } from './Constants';
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated'; import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated';
@ -53,59 +45,24 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
const [enableNightMode, setEnableNightMode] = useState(false); const [enableNightMode, setEnableNightMode] = useState(false);
// camera format settings // camera format settings
const devices = useCameraDevices(); const device = useCameraDevice(cameraPosition);
const device = devices[cameraPosition]; const format = useCameraFormat(device, {
const formats = useMemo<CameraDeviceFormat[]>(() => { fps: {
if (device?.formats == null) return []; target: 60,
return device.formats.sort(sortFormats); priority: 1,
}, [device?.formats]); },
});
//#region Memos //#region Memos
const [is60Fps, setIs60Fps] = useState(true); const [targetFps, setTargetFps] = useState(30);
const fps = useMemo(() => { const fps = Math.min(format?.maxFps ?? 1, targetFps);
if (!is60Fps) return 30;
if (enableNightMode && !device?.supportsLowLightBoost) {
// User has enabled Night Mode, but Night Mode is not natively supported, so we simulate it by lowering the frame rate.
return 30;
}
const supportsHdrAt60Fps = formats.some((f) => f.supportsVideoHDR && f.maxFps >= 60);
if (enableHdr && !supportsHdrAt60Fps) {
// User has enabled HDR, but HDR is not supported at 60 FPS.
return 30;
}
const supports60Fps = formats.some((f) => f.maxFps >= 60);
if (!supports60Fps) {
// 60 FPS is not supported by any format.
return 30;
}
// If nothing blocks us from using it, we default to 60 FPS.
return 60;
}, [device?.supportsLowLightBoost, enableHdr, enableNightMode, formats, is60Fps]);
const supportsCameraFlipping = useMemo(() => devices.back != null && devices.front != null, [devices.back, devices.front]);
const supportsFlash = device?.hasFlash ?? false; const supportsFlash = device?.hasFlash ?? false;
const supportsHdr = useMemo(() => formats.some((f) => f.supportsVideoHDR || f.supportsPhotoHDR), [formats]); const supportsHdr = format?.supportsPhotoHDR;
const supports60Fps = useMemo(() => formats.some((f) => f.maxFps >= 60), [formats]); const supports60Fps = (format?.maxFps ?? 0) >= 60;
const canToggleNightMode = enableNightMode const canToggleNightMode = device?.supportsLowLightBoost ?? false;
? true // it's enabled so you have to be able to turn it off again
: (device?.supportsLowLightBoost ?? false) || fps > 30; // either we have native support, or we can lower the FPS
//#endregion //#endregion
const format = useMemo(() => {
let result = formats;
if (enableHdr) {
// We only filter by HDR capable formats if HDR is set to true.
// Otherwise we ignore the `supportsVideoHDR` property and accept formats which support HDR `true` or `false`
result = result.filter((f) => f.supportsVideoHDR || f.supportsPhotoHDR);
}
// find the first format that includes the given FPS
return result.find((f) => f.maxFps >= fps);
}, [formats, fps, enableHdr]);
//#region Animated Zoom //#region Animated Zoom
// This just maps the zoom factor to a percentage value. // This just maps the zoom factor to a percentage value.
// so e.g. for [min, neutr., max] values [1, 2, 128] this would result in [0, 0.0081, 1] // so e.g. for [min, neutr., max] values [1, 2, 128] this would result in [0, 0.0081, 1]
@ -249,22 +206,17 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
<StatusBarBlurBackground /> <StatusBarBlurBackground />
<View style={styles.rightButtonRow}> <View style={styles.rightButtonRow}>
{supportsCameraFlipping && (
<PressableOpacity style={styles.button} onPress={onFlipCameraPressed} disabledOpacity={0.4}> <PressableOpacity style={styles.button} onPress={onFlipCameraPressed} disabledOpacity={0.4}>
<IonIcon name="camera-reverse" color="white" size={24} /> <IonIcon name="camera-reverse" color="white" size={24} />
</PressableOpacity> </PressableOpacity>
)}
{supportsFlash && ( {supportsFlash && (
<PressableOpacity style={styles.button} onPress={onFlashPressed} disabledOpacity={0.4}> <PressableOpacity style={styles.button} onPress={onFlashPressed} disabledOpacity={0.4}>
<IonIcon name={flash === 'on' ? 'flash' : 'flash-off'} color="white" size={24} /> <IonIcon name={flash === 'on' ? 'flash' : 'flash-off'} color="white" size={24} />
</PressableOpacity> </PressableOpacity>
)} )}
{supports60Fps && ( {supports60Fps && (
<PressableOpacity style={styles.button} onPress={() => setIs60Fps(!is60Fps)}> <PressableOpacity style={styles.button} onPress={() => setTargetFps((t) => (t === 30 ? 60 : 30))}>
<Text style={styles.text}> <Text style={styles.text}>{`${targetFps} FPS`}</Text>
{is60Fps ? '60' : '30'}
{'\n'}FPS
</Text>
</PressableOpacity> </PressableOpacity>
)} )}
{supportsHdr && ( {supportsHdr && (

View File

@ -10,6 +10,7 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import <React/RCTEventEmitter.h>
#import <React/RCTFPSGraph.h> #import <React/RCTFPSGraph.h>
#import <React/RCTLog.h> #import <React/RCTLog.h>
#import <React/RCTUIManager.h> #import <React/RCTUIManager.h>

View File

@ -0,0 +1,15 @@
//
// CameraDevicesManager.m
// VisionCamera
//
// Created by Marc Rousavy on 19.09.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <React/RCTEventEmitter.h>
#import <React/RCTUtils.h>
@interface RCT_EXTERN_REMAP_MODULE (CameraDevices, CameraDevicesManager, RCTEventEmitter)
@end

View File

@ -0,0 +1,83 @@
//
// CameraDevicesManager.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.09.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
@objc(CameraDevicesManager)
class CameraDevicesManager: RCTEventEmitter {
private let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(),
mediaType: .video,
position: .unspecified)
private var observer: NSKeyValueObservation?
private let devicesChangedEventName = "CameraDevicesChanged"
override init() {
super.init()
observer = discoverySession.observe(\.devices) { _, _ in
self.sendEvent(withName: self.devicesChangedEventName, body: self.getDevicesJson())
}
}
override func invalidate() {
observer?.invalidate()
}
override func supportedEvents() -> [String]! {
return [devicesChangedEventName]
}
override class func requiresMainQueueSetup() -> Bool {
return false
}
override func constantsToExport() -> [AnyHashable: Any]! {
return [
"availableCameraDevices": getDevicesJson(),
]
}
private func getDevicesJson() -> [[String: Any]] {
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
"position": $0.position.descriptor,
"name": $0.localizedName,
"hasFlash": $0.hasFlash,
"hasTorch": $0.hasTorch,
"minZoom": $0.minAvailableVideoZoomFactor,
"neutralZoom": $0.neutralZoomFactor,
"maxZoom": $0.maxAvailableVideoZoomFactor,
"isMultiCam": $0.isMultiCam,
"supportsDepthCapture": false, // TODO: supportsDepthCapture
"supportsRawCapture": false, // TODO: supportsRawCapture
"supportsLowLightBoost": $0.isLowLightBoostSupported,
"supportsFocus": $0.isFocusPointOfInterestSupported,
"hardwareLevel": "full",
"sensorOrientation": "portrait", // TODO: Sensor Orientation?
"formats": $0.formats.map { format -> [String: Any] in
format.toDictionary()
},
]
}
}
private static func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
}

View File

@ -19,7 +19,8 @@ RCT_EXTERN_METHOD(getMicrophonePermissionStatus : (RCTPromiseResolveBlock)resolv
RCT_EXTERN_METHOD(requestCameraPermission : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject); RCT_EXTERN_METHOD(requestCameraPermission : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(requestMicrophonePermission : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject); RCT_EXTERN_METHOD(requestMicrophonePermission : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(getAvailableCameraDevices : (RCTPromiseResolveBlock)resolve reject : (RCTPromiseRejectBlock)reject); RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(getAvailableCameraDevices);
RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(installFrameProcessorBindings);
// Camera View Properties // Camera View Properties
RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL); RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL);
@ -75,7 +76,4 @@ RCT_EXTERN_METHOD(focus
: (RCTPromiseResolveBlock)resolve reject : (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject); : (RCTPromiseRejectBlock)reject);
// Static Methods
RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(installFrameProcessorBindings);
@end @end

View File

@ -79,38 +79,6 @@ final class CameraViewManager: RCTViewManager {
component.focus(point: CGPoint(x: x.doubleValue, y: y.doubleValue), promise: promise) component.focus(point: CGPoint(x: x.doubleValue, y: y.doubleValue), promise: promise)
} }
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(),
mediaType: .video,
position: .unspecified)
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
"position": $0.position.descriptor,
"name": $0.localizedName,
"hasFlash": $0.hasFlash,
"hasTorch": $0.hasTorch,
"minZoom": $0.minAvailableVideoZoomFactor,
"neutralZoom": $0.neutralZoomFactor,
"maxZoom": $0.maxAvailableVideoZoomFactor,
"isMultiCam": $0.isMultiCam,
"supportsDepthCapture": false, // TODO: supportsDepthCapture
"supportsRawCapture": false, // TODO: supportsRawCapture
"supportsLowLightBoost": $0.isLowLightBoostSupported,
"supportsFocus": $0.isFocusPointOfInterestSupported,
"hardwareLevel": "full",
"sensorOrientation": "portrait", // TODO: Sensor Orientation?
"formats": $0.formats.map { format -> [String: Any] in
format.toDictionary()
},
]
}
}
}
@objc @objc
final func getCameraPermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) { final func getCameraPermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) { withPromise(resolve: resolve, reject: reject) {
@ -150,17 +118,4 @@ final class CameraViewManager: RCTViewManager {
return bridge.uiManager.view(forReactTag: tag) as! CameraView return bridge.uiManager.view(forReactTag: tag) as! CameraView
// swiftlint:enable force_cast // swiftlint:enable force_cast
} }
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
} }

View File

@ -17,7 +17,7 @@ extension AVCaptureDevice.Position {
case .front: case .front:
return "front" return "front"
case .unspecified: case .unspecified:
return "unspecified" return "external"
@unknown default: @unknown default:
fatalError("AVCaptureDevice.Position has unknown state.") fatalError("AVCaptureDevice.Position has unknown state.")
} }

View File

@ -11,6 +11,8 @@
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; }; B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; }; B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; }; B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
B8446E4D2ABA147C00E56077 /* CameraDevicesManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */; };
B8446E502ABA14C900E56077 /* CameraDevicesManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; }; B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; }; B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; }; B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; };
@ -85,6 +87,8 @@
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; }; B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; }; B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; }; B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraDevicesManager.swift; sourceTree = "<group>"; };
B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraDevicesManager.m; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; }; B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; }; B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; }; B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
@ -181,6 +185,8 @@
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */, B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
B887515F25E0102000DB86D6 /* CameraViewManager.m */, B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */, B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */,
B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */, B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
B83D5EE629377117000AFD2F /* PreviewView.swift */, B83D5EE629377117000AFD2F /* PreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */, B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
@ -407,11 +413,13 @@
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */, B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */, B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */, B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */,
B8446E502ABA14C900E56077 /* CameraDevicesManager.m in Sources */,
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */, B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */,
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */, B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */, B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */, B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */,
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */, B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
B8446E4D2ABA147C00E56077 /* CameraDevicesManager.swift in Sources */,
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */, B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */, B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */, B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */,

View File

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
if which ktlint >/dev/null; then if which ktlint >/dev/null; then
cd android && ktlint -F ./**/*.kt* cd android && ktlint --color --relative --editorconfig=./.editorconfig -F ./**/*.kt*
else else
echo "warning: KTLint not installed, download from https://github.com/pinterest/ktlint" echo "warning: KTLint not installed, download from https://github.com/pinterest/ktlint"
fi fi

View File

@ -9,6 +9,8 @@ import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
import type { Point } from './Point'; import type { Point } from './Point';
import type { RecordVideoOptions, VideoFile } from './VideoFile'; import type { RecordVideoOptions, VideoFile } from './VideoFile';
import { VisionCameraProxy } from './FrameProcessorPlugins'; import { VisionCameraProxy } from './FrameProcessorPlugins';
import { CameraDevices } from './CameraDevices';
import type { EmitterSubscription } from 'react-native';
//#region Types //#region Types
export type CameraPermissionStatus = 'granted' | 'not-determined' | 'denied' | 'restricted'; export type CameraPermissionStatus = 'granted' | 'not-determined' | 'denied' | 'restricted';
@ -37,7 +39,7 @@ type RefType = React.Component<NativeCameraViewProps> & Readonly<NativeMethods>;
* *
* The `<Camera>` component's most important (and therefore _required_) properties are: * The `<Camera>` component's most important (and therefore _required_) properties are:
* *
* * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevices | useCameraDevices()} hook, or manually by using the {@linkcode Camera.getAvailableCameraDevices Camera.getAvailableCameraDevices()} function. * * {@linkcode CameraProps.device | device}: Specifies the {@linkcode CameraDevice} to use. Get a {@linkcode CameraDevice} by using the {@linkcode useCameraDevice | useCameraDevice()} hook, or manually by using the {@linkcode CameraDevices.getAvailableCameraDevices CameraDevices.getAvailableCameraDevices()} function.
* * {@linkcode CameraProps.isActive | isActive}: A boolean value that specifies whether the Camera should actively stream video frames or not. This can be compared to a Video component, where `isActive` specifies whether the video is paused or not. If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again. * * {@linkcode CameraProps.isActive | isActive}: A boolean value that specifies whether the Camera should actively stream video frames or not. This can be compared to a Video component, where `isActive` specifies whether the video is paused or not. If you fully unmount the `<Camera>` component instead of using `isActive={false}`, the Camera will take a bit longer to start again.
* *
* @example * @example
@ -116,12 +118,6 @@ export class Camera extends React.PureComponent<CameraProps> {
/** /**
* Start a new video recording. * Start a new video recording.
* *
* Records in the following formats:
* * **iOS**: QuickTime (`.mov`)
* * **Android**: MPEG4 (`.mp4`)
*
* @blocking This function is synchronized/blocking.
*
* @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error * @throws {@linkcode CameraCaptureError} When any kind of error occured while starting the video recording. Use the {@linkcode CameraCaptureError.code | code} property to get the actual error
* *
* @example * @example
@ -144,8 +140,8 @@ export class Camera extends React.PureComponent<CameraProps> {
if (error != null) return onRecordingError(error); if (error != null) return onRecordingError(error);
if (video != null) return onRecordingFinished(video); if (video != null) return onRecordingFinished(video);
}; };
// TODO: Use TurboModules to either make this a sync invokation, or make it async.
try { try {
// TODO: Use TurboModules to make this awaitable.
CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback); CameraModule.startRecording(this.handle, passThroughOptions, onRecordCallback);
} catch (e) { } catch (e) {
throw tryParseNativeCameraError(e); throw tryParseNativeCameraError(e);
@ -231,8 +227,8 @@ export class Camera extends React.PureComponent<CameraProps> {
/** /**
* Focus the camera to a specific point in the coordinate system. * Focus the camera to a specific point in the coordinate system.
* @param {Point} point The point to focus to. This should be relative to the Camera view's coordinate system, * @param {Point} point The point to focus to. This should be relative
* and expressed in Pixel on iOS and Points on Android. * to the Camera view's coordinate system and is expressed in points.
* * `(0, 0)` means **top left**. * * `(0, 0)` means **top left**.
* * `(CameraView.width, CameraView.height)` means **bottom right**. * * `(CameraView.width, CameraView.height)` means **bottom right**.
* *
@ -257,28 +253,32 @@ export class Camera extends React.PureComponent<CameraProps> {
//#endregion //#endregion
//#region Static Functions (NativeModule) //#region Static Functions (NativeModule)
/** /**
* Get a list of all available camera devices on the current phone. * Get a list of all available camera devices on the current phone.
* *
* @throws {@linkcode CameraRuntimeError} When any kind of error occured while getting all available camera devices. Use the {@linkcode CameraRuntimeError.code | code} property to get the actual error * If you use Hooks, use the `useCameraDevices()` hook instead.
*
* * For Camera Devices attached to the phone, it is safe to assume that this will never change.
* * For external Camera Devices (USB cameras, Mac continuity cameras, etc.) the available Camera Devices could change over time when the external Camera device gets plugged in or plugged out, so use {@link addCameraDevicesChangedListener | addCameraDevicesChangedListener(...)} to listen for such changes.
*
* @example * @example
* ```ts * ```ts
* const devices = await Camera.getAvailableCameraDevices() * const devices = Camera.getAvailableCameraDevices()
* const filtered = devices.filter((d) => matchesMyExpectations(d)) * const backCameras = devices.filter((d) => d.position === "back")
* const sorted = devices.sort(sortDevicesByAmountOfCameras) * const frontCameras = devices.filter((d) => d.position === "front")
* return {
* back: sorted.find((d) => d.position === "back"),
* front: sorted.find((d) => d.position === "front")
* }
* ``` * ```
*/ */
public static async getAvailableCameraDevices(): Promise<CameraDevice[]> { public static getAvailableCameraDevices(): CameraDevice[] {
try { return CameraDevices.getAvailableCameraDevices();
return await CameraModule.getAvailableCameraDevices();
} catch (e) {
throw tryParseNativeCameraError(e);
} }
/**
* Adds a listener that gets called everytime the Camera Devices change, for example
* when an external Camera Device (USB or continuity Camera) gets plugged in or plugged out.
*
* If you use Hooks, use the `useCameraDevices()` hook instead.
*/
public static addCameraDevicesChangedListener(listener: (newDevices: CameraDevice[]) => void): EmitterSubscription {
return CameraDevices.addCameraDevicesChangedListener(listener);
} }
/** /**
* Gets the current Camera Permission Status. Check this before mounting the Camera to ensure * Gets the current Camera Permission Status. Check this before mounting the Camera to ensure

View File

@ -1,48 +1,30 @@
import type { CameraPosition } from './CameraPosition';
import { Orientation } from './Orientation'; import { Orientation } from './Orientation';
import type { PixelFormat } from './PixelFormat'; import type { PixelFormat } from './PixelFormat';
/**
* Represents the camera device position.
*
* * `"back"`: Indicates that the device is physically located on the back of the system hardware
* * `"front"`: Indicates that the device is physically located on the front of the system hardware
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the device's screen.
*/
export type CameraPosition = 'front' | 'back' | 'external';
/** /**
* Indentifiers for a physical camera (one that actually exists on the back/front of the device) * Indentifiers for a physical camera (one that actually exists on the back/front of the device)
* *
* * `"ultra-wide-angle-camera"`: A built-in camera with a shorter focal length than that of a wide-angle camera. (focal length between below 24mm) * * `"ultra-wide-angle-camera"`: A built-in camera with a shorter focal length than that of a wide-angle camera. (focal length between below 24mm)
* * `"wide-angle-camera"`: A built-in wide-angle camera. (focal length between 24mm and 35mm) * * `"wide-angle-camera"`: A built-in wide-angle camera. (focal length between 24mm and 35mm)
* * `"telephoto-camera"`: A built-in camera device with a longer focal length than a wide-angle camera. (focal length between above 85mm) * * `"telephoto-camera"`: A built-in camera device with a longer focal length than a wide-angle camera. (focal length between above 85mm)
*
* Some Camera devices consist of multiple physical devices. They can be interpreted as _logical devices_, for example:
*
* * `"ultra-wide-angle-camera"` + `"wide-angle-camera"` = **dual wide-angle camera**.
* * `"wide-angle-camera"` + `"telephoto-camera"` = **dual camera**.
* * `"ultra-wide-angle-camera"` + `"wide-angle-camera"` + `"telephoto-camera"` = **triple camera**.
*/ */
export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-camera' | 'telephoto-camera'; export type PhysicalCameraDeviceType = 'ultra-wide-angle-camera' | 'wide-angle-camera' | 'telephoto-camera';
/**
* Indentifiers for a logical camera (Combinations of multiple physical cameras to create a single logical camera).
*
* * `"dual-camera"`: A combination of wide-angle and telephoto cameras that creates a capture device.
* * `"dual-wide-camera"`: A device that consists of two cameras of fixed focal length, one ultrawide angle and one wide angle.
* * `"triple-camera"`: A device that consists of three cameras of fixed focal length, one ultrawide angle, one wide angle, and one telephoto.
*/
export type LogicalCameraDeviceType = 'dual-camera' | 'dual-wide-camera' | 'triple-camera';
/**
* Parses an array of physical device types into a single {@linkcode PhysicalCameraDeviceType} or {@linkcode LogicalCameraDeviceType}, depending what matches.
* @method
*/
export const parsePhysicalDeviceTypes = (
physicalDeviceTypes: PhysicalCameraDeviceType[],
): PhysicalCameraDeviceType | LogicalCameraDeviceType => {
if (physicalDeviceTypes.length === 1) {
// @ts-expect-error for very obvious reasons
return physicalDeviceTypes[0];
}
const hasWide = physicalDeviceTypes.includes('wide-angle-camera');
const hasUltra = physicalDeviceTypes.includes('ultra-wide-angle-camera');
const hasTele = physicalDeviceTypes.includes('telephoto-camera');
if (hasTele && hasWide && hasUltra) return 'triple-camera';
if (hasWide && hasUltra) return 'dual-wide-camera';
if (hasWide && hasTele) return 'dual-camera';
throw new Error(`Invalid physical device type combination! ${physicalDeviceTypes.join(' + ')}`);
};
/** /**
* Indicates a format's autofocus system. * Indicates a format's autofocus system.
* *

View File

@ -0,0 +1,25 @@
import { NativeModules, NativeEventEmitter } from 'react-native';
import { CameraDevice } from './CameraDevice';
const CameraDevicesManager = NativeModules.CameraDevices as {
getConstants: () => {
availableCameraDevices: CameraDevice[];
};
};
const constants = CameraDevicesManager.getConstants();
let devices = constants.availableCameraDevices;
const DEVICES_CHANGED_NAME = 'CameraDevicesChanged';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const eventEmitter = new NativeEventEmitter(CameraDevicesManager as any);
eventEmitter.addListener(DEVICES_CHANGED_NAME, (newDevices: CameraDevice[]) => {
devices = newDevices;
});
export const CameraDevices = {
getAvailableCameraDevices: () => devices,
addCameraDevicesChangedListener: (callback: (newDevices: CameraDevice[]) => void) => {
return eventEmitter.addListener(DEVICES_CHANGED_NAME, callback);
},
};

View File

@ -1,13 +0,0 @@
/**
* Represents the camera device position.
*
* * `"back"`: Indicates that the device is physically located on the back of the system hardware
* * `"front"`: Indicates that the device is physically located on the front of the system hardware
*
* #### iOS only
* * `"unspecified"`: Indicates that the device's position relative to the system hardware is unspecified
*
* #### Android only
* * `"external"`: The camera device is an external camera, and has no fixed facing relative to the device's screen. (Android only)
*/
export type CameraPosition = 'front' | 'back' | 'unspecified' | 'external';

View File

@ -0,0 +1,43 @@
export interface Filter<T> {
/**
* The target value for this specific requirement
*/
target: T;
/**
* The priority of this requirement.
* Filters with higher priority can take precedence over filters with lower priority.
*
* For example, if we have two formats:
* ```json
* [
* videoWidth: 3840,
* videoHeight: 2160,
* maxFps: 30,
* ...
* ],
* [
* videoWidth: 1920,
* videoHeight: 1080,
* maxFps: 60,
* ...
* ]
* ```
* And your filter looks like this:
* ```json
* {
* fps: { target: 60, priority: 1 }
* videoSize: { target: { width: 4000, height: 2000 }, priority: 3 }
* }
* ```
* The 4k format will be chosen since the `videoSize` filter has a higher priority (2) than the `fps` filter (1).
*
* To choose the 60 FPS format instead, use a higher priority for the `fps` filter:
* ```json
* {
* fps: { target: 60, priority: 2 }
* videoSize: { target: { width: 4000, height: 2000 }, priority: 1 }
* }
* ```
*/
priority: number;
}

View File

@ -0,0 +1,62 @@
import { CameraDevice, CameraPosition, PhysicalCameraDeviceType } from '../CameraDevice';
import { CameraRuntimeError } from '../CameraError';
export interface DeviceFilter {
/**
* The desired physical devices your camera device should have.
*
* Many modern phones have multiple Camera devices on one side and can combine those physical camera devices to one logical camera device.
* For example, the iPhone 11 has two physical camera devices, the `ultra-wide-angle-camera` ("fish-eye") and the normal `wide-angle-camera`. You can either use one of those devices individually, or use a combined logical camera device which can smoothly switch over between the two physical cameras depending on the current `zoom` level.
* When the user is at 0.5x-1x zoom, the `ultra-wide-angle-camera` can be used to offer a fish-eye zoom-out effect, and anything above 1x will smoothly switch over to the `wide-angle-camera`.
*
* **Note:** Devices with less phyiscal devices (`['wide-angle-camera']`) are usually faster to start-up than more complex
* devices (`['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera']`), but don't offer zoom switch-over capabilities.
*
* @example
* ```ts
* // This device is simpler, so it starts up faster.
* getCameraDevice({ physicalDevices: ['wide-angle-camera'] })
* // This device is more complex, so it starts up slower, but you can switch between devices on 0.5x, 1x and 2x zoom.
* getCameraDevice({ physicalDevices: ['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera'] })
* ```
*/
physicalDevices?: PhysicalCameraDeviceType[];
}
/**
* Get the best matching Camera device that satisfies your requirements using a sorting filter.
* @param devices All available Camera Devices this function will use for filtering. To get devices, use `Camera.getAvailableCameraDevices()`.
* @param filter The filter you want to use. The device that matches your filter the closest will be returned.
* @returns The device that matches your filter the closest.
*/
export function getCameraDevice(devices: CameraDevice[], position: CameraPosition, filter: DeviceFilter = {}): CameraDevice {
const filtered = devices.filter((d) => d.position === position);
const sortedDevices = filtered.sort((left, right) => {
let leftPoints = 0;
let rightPoints = 0;
// prefer higher hardware-level
if (left.hardwareLevel === 'full') leftPoints += 4;
if (right.hardwareLevel === 'full') rightPoints += 4;
// compare devices. two possible scenarios:
// 1. user wants all cameras ([ultra-wide, wide, tele]) to zoom. prefer those devices that have all 3 cameras.
// 2. user wants only one ([wide]) for faster performance. prefer those devices that only have one camera, if they have more, we rank them lower.
if (filter.physicalDevices != null) {
for (const device of left.devices) {
if (filter.physicalDevices.includes(device)) leftPoints += 1;
else leftPoints -= 1;
}
for (const device of right.devices) {
if (filter.physicalDevices.includes(device)) rightPoints += 1;
else rightPoints -= 1;
}
}
return leftPoints - rightPoints;
});
const device = sortedDevices[0];
if (device == null) throw new CameraRuntimeError('device/invalid-device', 'No Camera Device could be found!');
return device;
}

View File

@ -0,0 +1,153 @@
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from '../CameraDevice';
import { CameraRuntimeError } from '../CameraError';
import { PixelFormat } from '../PixelFormat';
import { Filter } from './Filter';
interface Size {
width: number;
height: number;
}
export interface FormatFilter {
/**
* The target resolution of the video (and frame processor) output pipeline.
* If no format supports the given resolution, the format closest to this value will be used.
*/
videoResolution?: Filter<Size>;
/**
* The target resolution of the photo output pipeline.
* If no format supports the given resolution, the format closest to this value will be used.
*/
photoResolution?: Filter<Size>;
/**
* The target aspect ratio of the video (and preview) output, expressed as a factor: `width / height`.
*
* In most cases, you want this to be as close to the screen's aspect ratio as possible (usually ~9:16).
*
* @example
* ```ts
* const screen = Dimensions.get('screen')
* targetVideoAspectRatio: screen.width / screen.height
* ```
*/
videoAspectRatio?: Filter<number>;
/**
* The target aspect ratio of the photo output, expressed as a factor: `width / height`.
*
* In most cases, you want this to be the same as `targetVideoAspectRatio`, which you often want
* to be as close to the screen's aspect ratio as possible (usually ~9:16)
*
* @example
* ```ts
* const screen = Dimensions.get('screen')
* targetPhotoAspectRatio: screen.width / screen.height
* ```
*/
photoAspectRatio?: Filter<number>;
/**
* The target FPS you want to record video at.
* If the FPS requirements can not be met, the format closest to this value will be used.
*/
fps?: Filter<number>;
/**
* The target video stabilization mode you want to use.
* If no format supports the target video stabilization mode, the best other matching format will be used.
*/
videoStabilizationMode?: Filter<VideoStabilizationMode>;
/**
* The target pixel format you want to use.
* If no format supports the target pixel format, the best other matching format will be used.
*/
pixelFormat?: Filter<PixelFormat>;
}
/**
* Get the best matching Camera format for the given device that satisfies your requirements using a sorting filter. By default, formats are sorted by highest to lowest resolution.
* @param device The Camera Device you're currently using
* @param filter The filter you want to use. The format that matches your filter the closest will be returned
* @returns The format that matches your filter the closest.
*/
export function getCameraFormat(device: CameraDevice, filter: FormatFilter): CameraDeviceFormat {
const copy = [...device.formats];
const sortedFormats = copy.sort((left, right) => {
let leftPoints = 0;
let rightPoints = 0;
const leftVideoResolution = left.videoWidth * left.videoHeight;
const rightVideoResolution = right.videoWidth * right.videoHeight;
if (filter.videoResolution != null) {
// Find video resolution closest to the filter (ignoring orientation)
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height;
const leftDiff = Math.abs(leftVideoResolution - targetResolution);
const rightDiff = Math.abs(rightVideoResolution - targetResolution);
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority;
else if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority;
} else {
// No filter is set, so just prefer higher resolutions
if (leftVideoResolution > rightVideoResolution) leftPoints++;
else if (rightVideoResolution > leftVideoResolution) rightPoints++;
}
const leftPhotoResolution = left.photoWidth * left.photoHeight;
const rightPhotoResolution = right.photoWidth * right.photoHeight;
if (filter.photoResolution != null) {
// Find closest photo resolution to the filter (ignoring orientation)
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height;
const leftDiff = Math.abs(leftPhotoResolution - targetResolution);
const rightDiff = Math.abs(rightPhotoResolution - targetResolution);
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority;
else if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority;
} else {
// No filter is set, so just prefer higher resolutions
if (leftPhotoResolution > rightPhotoResolution) leftPoints++;
else if (rightPhotoResolution > leftPhotoResolution) rightPoints++;
}
// Find closest aspect ratio (video)
if (filter.videoAspectRatio != null) {
const leftAspect = left.videoWidth / right.videoHeight;
const rightAspect = right.videoWidth / right.videoHeight;
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target);
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target);
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority;
else if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority;
}
// Find closest aspect ratio (photo)
if (filter.photoAspectRatio != null) {
const leftAspect = left.photoWidth / right.photoHeight;
const rightAspect = right.photoWidth / right.photoHeight;
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target);
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target);
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority;
else if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority;
}
// Find closest max FPS
if (filter.fps != null) {
const leftDiff = Math.abs(left.maxFps - filter.fps.target);
const rightDiff = Math.abs(right.maxFps - filter.fps.target);
if (leftDiff < rightDiff) leftPoints += filter.fps.priority;
else if (rightDiff < leftDiff) rightPoints += filter.fps.priority;
}
// Find video stabilization mode
if (filter.videoStabilizationMode != null) {
if (left.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++;
if (right.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++;
}
// Find pixel format
if (filter.pixelFormat != null) {
if (left.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++;
if (right.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++;
}
return rightPoints - leftPoints;
});
const format = sortedFormats[0];
if (format == null)
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`);
return format;
}

View File

@ -0,0 +1,29 @@
import { useMemo } from 'react';
import { CameraDevice, CameraPosition } from '../CameraDevice';
import { getCameraDevice, DeviceFilter } from '../devices/getCameraDevice';
import { useCameraDevices } from './useCameraDevices';
/**
* Get the best matching Camera device that best satisfies your requirements using a sorting filter.
* @param position The position of the Camera device relative to the phone.
* @param filter The filter you want to use. The Camera device that matches your filter the closest will be returned
* @returns The Camera device that matches your filter the closest.
* @example
* ```ts
* const [position, setPosition] = useState<CameraPosition>('back')
* const device = useCameraDevice(position, {
* physicalDevices: ['wide-angle-camera']
* })
* ```
*/
export function useCameraDevice(position: CameraPosition, filter?: DeviceFilter): CameraDevice | undefined {
const devices = useCameraDevices();
const device = useMemo(
() => getCameraDevice(devices, position, filter),
// eslint-disable-next-line react-hooks/exhaustive-deps
[devices, position, JSON.stringify(filter)],
);
return device;
}

View File

@ -1,78 +1,23 @@
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import type { CameraPosition } from '../CameraPosition'; import type { CameraDevice } from '../CameraDevice';
import { sortDevices } from '../utils/FormatFilter'; import { CameraDevices } from '../CameraDevices';
import { Camera } from '../Camera';
import { CameraDevice, LogicalCameraDeviceType, parsePhysicalDeviceTypes, PhysicalCameraDeviceType } from '../CameraDevice';
export type CameraDevices = {
[key in CameraPosition]: CameraDevice | undefined;
};
const DefaultCameraDevices: CameraDevices = {
back: undefined,
external: undefined,
front: undefined,
unspecified: undefined,
};
/** /**
* Gets the best available {@linkcode CameraDevice}. Devices with more cameras are preferred. * Get all available Camera Devices this phone has.
* *
* @returns The best matching {@linkcode CameraDevice}. * Camera Devices attached to this phone (`back` or `front`) are always available,
* @throws {@linkcode CameraRuntimeError} if no device was found. * while `external` devices might be plugged in or out at any point,
* @example * so the result of this function might update over time.
* ```tsx
* const device = useCameraDevice()
* // ...
* return <Camera device={device} />
* ```
*/ */
export function useCameraDevices(): CameraDevices; export function useCameraDevices(): CameraDevice[] {
const [devices, setDevices] = useState(() => CameraDevices.getAvailableCameraDevices());
/**
* Gets a {@linkcode CameraDevice} for the requested device type.
*
* @param {PhysicalCameraDeviceType | LogicalCameraDeviceType} deviceType Specifies a device type which will be used as a device filter.
* @returns A {@linkcode CameraDevice} for the requested device type.
* @throws {@linkcode CameraRuntimeError} if no device was found.
* @example
* ```tsx
* const device = useCameraDevice('wide-angle-camera')
* // ...
* return <Camera device={device} />
* ```
*/
export function useCameraDevices(deviceType: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevices;
export function useCameraDevices(deviceType?: PhysicalCameraDeviceType | LogicalCameraDeviceType): CameraDevices {
const [cameraDevices, setCameraDevices] = useState<CameraDevices>(DefaultCameraDevices);
useEffect(() => { useEffect(() => {
let isMounted = true; const listener = CameraDevices.addCameraDevicesChangedListener((newDevices) => {
setDevices(newDevices);
const loadDevice = async (): Promise<void> => {
let devices = await Camera.getAvailableCameraDevices();
if (!isMounted) return;
devices = devices.sort(sortDevices);
if (deviceType != null) {
devices = devices.filter((d) => {
const parsedType = parsePhysicalDeviceTypes(d.devices);
return parsedType === deviceType;
}); });
} return () => listener.remove();
setCameraDevices({ }, []);
back: devices.find((d) => d.position === 'back'),
external: devices.find((d) => d.position === 'external'),
front: devices.find((d) => d.position === 'front'),
unspecified: devices.find((d) => d.position === 'unspecified'),
});
};
loadDevice();
return () => { return devices;
isMounted = false;
};
}, [deviceType]);
return cameraDevices;
} }

View File

@ -1,16 +1,27 @@
import { useMemo } from 'react'; import { useMemo } from 'react';
import type { CameraDevice, CameraDeviceFormat } from '../CameraDevice'; import { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
import { sortFormats } from '../utils/FormatFilter'; import { FormatFilter, getCameraFormat } from '../devices/getCameraFormat';
/** /**
* Returns the best format for the given camera device. * Get the best matching Camera format for the given device that satisfies your requirements using a sorting filter. By default, formats are sorted by highest to lowest resolution.
* * @param device The Camera Device you're currently using
* This function tries to choose a format with the highest possible photo-capture resolution and best matching aspect ratio. * @param filter The filter you want to use. The format that matches your filter the closest will be returned
* * @returns The format that matches your filter the closest.
* @param {CameraDevice} device The Camera Device * @example
* * ```ts
* @returns The best matching format for the given camera device, or `undefined` if the camera device is `undefined`. * const device = useCameraDevice(...)
* const format = useCameraFormat(device, {
* videoResolution: { target: { width: 3048, height: 2160 }, priority: 2 },
* fps: { target: 60, priority: 1 }
* })
* ```
*/ */
export function useCameraFormat(device?: CameraDevice): CameraDeviceFormat | undefined { export function useCameraFormat(device: CameraDevice | undefined, filter: FormatFilter): CameraDeviceFormat | undefined {
return useMemo(() => device?.formats.sort(sortFormats)[0], [device?.formats]); const format = useMemo(() => {
if (device == null) return undefined;
return getCameraFormat(device, filter);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [device, JSON.stringify(filter)]);
return format;
} }

View File

@ -1,19 +1,16 @@
export * from './Camera'; export * from './Camera';
export * from './CameraDevice'; export * from './CameraDevice';
export * from './CameraError'; export * from './CameraError';
export * from './CameraPosition';
export * from './CameraProps';
export { Frame } from './Frame'; export { Frame } from './Frame';
export * from './FrameProcessorPlugins'; export * from './FrameProcessorPlugins';
export * from './CameraProps'; export * from './CameraProps';
export * from './PhotoFile'; export * from './PhotoFile';
export * from './PixelFormat'; export * from './PixelFormat';
export * from './Point'; export * from './Point';
export * from './TemporaryFile';
export * from './VideoFile'; export * from './VideoFile';
export * from './hooks/useCameraDevices'; export * from './hooks/useCameraDevices';
export * from './hooks/useCameraDevice';
export * from './hooks/useCameraFormat'; export * from './hooks/useCameraFormat';
export * from './devices/getCameraFormat';
export * from './hooks/useFrameProcessor'; export * from './hooks/useFrameProcessor';
export * from './utils/FormatFilter';

View File

@ -1,93 +0,0 @@
import { Dimensions } from 'react-native';
import type { CameraDevice, CameraDeviceFormat } from '../CameraDevice';
/**
* Compares two devices by the following criteria:
* * `wide-angle-camera`s are ranked higher than others
* * Devices with more physical cameras are ranked higher than ones with less. (e.g. "Triple Camera" > "Wide-Angle Camera")
*
* > Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*
* @example
* ```ts
* const devices = camera.devices.sort(sortDevices)
* const bestDevice = devices[0]
* ```
* @method
*/
export const sortDevices = (left: CameraDevice, right: CameraDevice): number => {
let leftPoints = 0;
let rightPoints = 0;
const leftHasWideAngle = left.devices.includes('wide-angle-camera');
const rightHasWideAngle = right.devices.includes('wide-angle-camera');
if (leftHasWideAngle) leftPoints += 2;
if (rightHasWideAngle) rightPoints += 2;
if (left.isMultiCam) leftPoints += 2;
if (right.isMultiCam) rightPoints += 2;
if (left.hardwareLevel === 'full') leftPoints += 3;
if (right.hardwareLevel === 'full') rightPoints += 3;
if (left.hardwareLevel === 'limited') leftPoints += 1;
if (right.hardwareLevel === 'limited') rightPoints += 1;
if (left.hasFlash) leftPoints += 1;
if (right.hasFlash) rightPoints += 1;
const leftMaxResolution = left.formats.reduce(
(prev, curr) => Math.max(prev, curr.videoHeight * curr.videoWidth + curr.photoHeight * curr.photoWidth),
0,
);
const rightMaxResolution = right.formats.reduce(
(prev, curr) => Math.max(prev, curr.videoHeight * curr.videoWidth + curr.photoHeight * curr.photoWidth),
0,
);
if (leftMaxResolution > rightMaxResolution) leftPoints += 3;
if (rightMaxResolution > leftMaxResolution) rightPoints += 3;
// telephoto cameras often have very poor quality.
const leftHasTelephoto = left.devices.includes('telephoto-camera');
const rightHasTelephoto = right.devices.includes('telephoto-camera');
if (leftHasTelephoto) leftPoints -= 2;
if (rightHasTelephoto) rightPoints -= 2;
if (left.devices.length > right.devices.length) leftPoints += 1;
if (right.devices.length > left.devices.length) rightPoints += 1;
return rightPoints - leftPoints;
};
const SCREEN_SIZE = {
width: Dimensions.get('window').width,
height: Dimensions.get('window').height,
};
const SCREEN_ASPECT_RATIO = SCREEN_SIZE.width / SCREEN_SIZE.height;
/**
* Sort formats by resolution and aspect ratio difference (to the Screen size).
*
* > Note that this makes the `sort()` function descending, so the first element (`[0]`) is the "best" device.
*/
export const sortFormats = (left: CameraDeviceFormat, right: CameraDeviceFormat): number => {
let leftPoints = 0,
rightPoints = 0;
// we downscale the points so much that we are in smaller number ranges for future calculations
// e.g. for 4k (4096), this adds 8 points.
leftPoints += Math.round(left.photoWidth / 500);
rightPoints += Math.round(right.photoWidth / 500);
// e.g. for 4k (4096), this adds 8 points.
leftPoints += Math.round(left.videoWidth / 500);
rightPoints += Math.round(right.videoWidth / 500);
// we downscale the points here as well, so if left has 16:9 and right has 21:9, this roughly
// adds 5 points. If the difference is smaller, e.g. 16:9 vs 17:9, this roughly adds a little
// bit over 1 point, just enough to overrule the FPS below.
const leftAspectRatioDiff = left.photoHeight / left.photoWidth - SCREEN_ASPECT_RATIO;
const rightAspectRatioDiff = right.photoHeight / right.photoWidth - SCREEN_ASPECT_RATIO;
leftPoints -= Math.abs(leftAspectRatioDiff) * 10;
rightPoints -= Math.abs(rightAspectRatioDiff) * 10;
return rightPoints - leftPoints;
};