feat: New JS API for useCameraDevice
and useCameraFormat
and much faster getAvailableCameraDevices()
(#1784)
* Update podfile * Update useCameraFormat.ts * Update API * Delete FormatFilter.md * Format CameraViewManager.m ObjC style * Make `getAvailableCameraDevices` synchronous/blocking * Create some docs * fix: Fix HardwareLevel types * fix: Use new device/format API * Use 60 FPS format as an example * Replace `Camera.getAvailableCameraDevices` with new `CameraDevices` API/Module * Fix Lint * KTLint options * Use continuation indent of 8 * Use 2 spaces for indent * Update .editorconfig * Format code * Update .editorconfig * Format more * Update VideoStabilizationMode.kt * fix: Expose `CameraDevicesManager` to ObjC * Update CameraPage.tsx * fix: `requiresMainQueueSetup() -> false` * Always prefer higher resolution * Update CameraDevicesManager.swift * Update CameraPage.tsx * Also filter pixelFormat * fix: Add AVFoundation import
This commit is contained in:
@@ -1,6 +1,15 @@
|
||||
[*.{kt,kts}]
|
||||
indent_size=2
|
||||
indent_style=space
|
||||
indent_size=2
|
||||
continuation_indent_size=4
|
||||
insert_final_newline=true
|
||||
max_line_length=off
|
||||
disabled_rules=no-wildcard-imports
|
||||
max_line_length=140
|
||||
ktlint_code_style=android_studio
|
||||
ktlint_standard=enabled
|
||||
ktlint_experimental=enabled
|
||||
ktlint_standard_filename=disabled # dont require PascalCase filenames
|
||||
ktlint_standard_no-wildcard-imports=disabled # allow .* imports
|
||||
ktlint_function_signature_rule_force_multiline_when_parameter_count_greater_or_equal_than=5
|
||||
ktlint_function_signature_body_expression_wrapping=multiline
|
||||
ij_kotlin_allow_trailing_comma_on_call_site=false
|
||||
ij_kotlin_allow_trailing_comma=false
|
||||
|
@@ -0,0 +1,84 @@
|
||||
package com.mrousavy.camera
|
||||
|
||||
import android.content.Context
|
||||
import android.hardware.camera2.CameraManager
|
||||
import android.util.Log
|
||||
import com.facebook.react.bridge.Arguments
|
||||
import com.facebook.react.bridge.ReactApplicationContext
|
||||
import com.facebook.react.bridge.ReactContextBaseJavaModule
|
||||
import com.facebook.react.bridge.ReactMethod
|
||||
import com.facebook.react.bridge.ReadableArray
|
||||
import com.facebook.react.modules.core.DeviceEventManagerModule
|
||||
import com.mrousavy.camera.core.CameraDeviceDetails
|
||||
|
||||
class CameraDevicesManager(private val reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
|
||||
companion object {
|
||||
private const val TAG = "CameraDevices"
|
||||
}
|
||||
private val cameraManager = reactContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
|
||||
|
||||
private val callback = object : CameraManager.AvailabilityCallback() {
|
||||
private var devices = cameraManager.cameraIdList.toMutableList()
|
||||
|
||||
// Check if device is still physically connected (even if onCameraUnavailable() is called)
|
||||
private fun isDeviceConnected(cameraId: String): Boolean =
|
||||
try {
|
||||
cameraManager.getCameraCharacteristics(cameraId)
|
||||
true
|
||||
} catch (_: Throwable) {
|
||||
false
|
||||
}
|
||||
|
||||
override fun onCameraAvailable(cameraId: String) {
|
||||
Log.i(TAG, "Camera #$cameraId: Available!")
|
||||
if (!devices.contains(cameraId)) {
|
||||
devices.add(cameraId)
|
||||
sendAvailableDevicesChangedEvent()
|
||||
}
|
||||
}
|
||||
|
||||
override fun onCameraUnavailable(cameraId: String) {
|
||||
Log.i(TAG, "Camera #$cameraId: Unavailable!")
|
||||
if (devices.contains(cameraId) && !isDeviceConnected(cameraId)) {
|
||||
devices.remove(cameraId)
|
||||
sendAvailableDevicesChangedEvent()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun getName(): String = TAG
|
||||
|
||||
override fun initialize() {
|
||||
cameraManager.registerAvailabilityCallback(callback, null)
|
||||
}
|
||||
|
||||
override fun invalidate() {
|
||||
cameraManager.unregisterAvailabilityCallback(callback)
|
||||
super.invalidate()
|
||||
}
|
||||
|
||||
private fun getDevicesJson(): ReadableArray {
|
||||
val devices = Arguments.createArray()
|
||||
cameraManager.cameraIdList.forEach { cameraId ->
|
||||
val device = CameraDeviceDetails(cameraManager, cameraId)
|
||||
devices.pushMap(device.toMap())
|
||||
}
|
||||
return devices
|
||||
}
|
||||
|
||||
fun sendAvailableDevicesChangedEvent() {
|
||||
val eventEmitter = reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java)
|
||||
eventEmitter.emit("CameraDevicesChanged", getDevicesJson())
|
||||
}
|
||||
|
||||
override fun hasConstants(): Boolean = true
|
||||
|
||||
override fun getConstants(): MutableMap<String, Any> = mutableMapOf("availableCameraDevices" to getDevicesJson())
|
||||
|
||||
// Required for NativeEventEmitter, this is just a dummy implementation:
|
||||
@ReactMethod
|
||||
fun addListener(eventName: String) {}
|
||||
|
||||
@ReactMethod
|
||||
fun removeListeners(count: Int) {}
|
||||
}
|
@@ -6,11 +6,11 @@ import com.facebook.react.bridge.ReactApplicationContext
|
||||
import com.facebook.react.uimanager.ViewManager
|
||||
|
||||
class CameraPackage : ReactPackage {
|
||||
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> {
|
||||
return listOf(CameraViewModule(reactContext))
|
||||
}
|
||||
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> =
|
||||
listOf(
|
||||
CameraViewModule(reactContext),
|
||||
CameraDevicesManager(reactContext)
|
||||
)
|
||||
|
||||
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> {
|
||||
return listOf(CameraViewManager())
|
||||
}
|
||||
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> = listOf(CameraViewManager())
|
||||
}
|
||||
|
@@ -2,10 +2,10 @@ package com.mrousavy.camera
|
||||
|
||||
import android.os.Handler
|
||||
import android.os.HandlerThread
|
||||
import java.util.concurrent.Executor
|
||||
import kotlinx.coroutines.CoroutineDispatcher
|
||||
import kotlinx.coroutines.android.asCoroutineDispatcher
|
||||
import kotlinx.coroutines.asExecutor
|
||||
import java.util.concurrent.Executor
|
||||
|
||||
class CameraQueues {
|
||||
companion object {
|
||||
@@ -32,4 +32,3 @@ class CameraQueues {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -5,10 +5,10 @@ import android.annotation.SuppressLint
|
||||
import android.content.pm.PackageManager
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.facebook.react.bridge.*
|
||||
import com.mrousavy.camera.core.RecordingSession
|
||||
import com.mrousavy.camera.parsers.Torch
|
||||
import com.mrousavy.camera.parsers.VideoCodec
|
||||
import com.mrousavy.camera.parsers.VideoFileType
|
||||
import com.mrousavy.camera.core.RecordingSession
|
||||
import com.mrousavy.camera.utils.makeErrorMap
|
||||
import java.util.*
|
||||
|
||||
|
@@ -15,10 +15,9 @@ import com.mrousavy.camera.core.CameraSession
|
||||
import com.mrousavy.camera.parsers.Flash
|
||||
import com.mrousavy.camera.parsers.QualityPrioritization
|
||||
import com.mrousavy.camera.utils.*
|
||||
import kotlinx.coroutines.*
|
||||
import java.io.File
|
||||
import java.io.FileOutputStream
|
||||
import java.io.OutputStream
|
||||
import kotlinx.coroutines.*
|
||||
|
||||
private const val TAG = "CameraView.takePhoto"
|
||||
|
||||
@@ -36,12 +35,14 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap {
|
||||
val flashMode = Flash.fromUnionValue(flash)
|
||||
val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization)
|
||||
|
||||
val photo = cameraSession.takePhoto(qualityPrioritizationMode,
|
||||
flashMode,
|
||||
enableShutterSound,
|
||||
enableAutoRedEyeReduction,
|
||||
enableAutoStabilization,
|
||||
outputOrientation)
|
||||
val photo = cameraSession.takePhoto(
|
||||
qualityPrioritizationMode,
|
||||
flashMode,
|
||||
enableShutterSound,
|
||||
enableAutoRedEyeReduction,
|
||||
enableAutoStabilization,
|
||||
outputOrientation
|
||||
)
|
||||
|
||||
photo.use {
|
||||
Log.i(TAG, "Successfully captured ${photo.image.width} x ${photo.image.height} photo!")
|
||||
@@ -83,10 +84,12 @@ private fun writePhotoToFile(photo: CameraSession.CapturedPhoto, file: File) {
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun savePhotoToFile(context: Context,
|
||||
cameraCharacteristics: CameraCharacteristics,
|
||||
photo: CameraSession.CapturedPhoto): String {
|
||||
return withContext(Dispatchers.IO) {
|
||||
private suspend fun savePhotoToFile(
|
||||
context: Context,
|
||||
cameraCharacteristics: CameraCharacteristics,
|
||||
photo: CameraSession.CapturedPhoto
|
||||
): String =
|
||||
withContext(Dispatchers.IO) {
|
||||
when (photo.format) {
|
||||
// When the format is JPEG or DEPTH JPEG we can simply save the bytes as-is
|
||||
ImageFormat.JPEG, ImageFormat.DEPTH_JPEG -> {
|
||||
@@ -111,8 +114,8 @@ private suspend fun savePhotoToFile(context: Context,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun createFile(context: Context, extension: String): File {
|
||||
return File.createTempFile("mrousavy", extension, context.cacheDir).apply { deleteOnExit() }
|
||||
}
|
||||
private fun createFile(context: Context, extension: String): File =
|
||||
File.createTempFile("mrousavy", extension, context.cacheDir).apply {
|
||||
deleteOnExit()
|
||||
}
|
||||
|
@@ -15,6 +15,7 @@ import androidx.core.content.ContextCompat
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.mrousavy.camera.core.CameraSession
|
||||
import com.mrousavy.camera.core.PreviewView
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.extensions.containsAny
|
||||
import com.mrousavy.camera.extensions.installHierarchyFitter
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
||||
@@ -22,7 +23,6 @@ import com.mrousavy.camera.parsers.Orientation
|
||||
import com.mrousavy.camera.parsers.PixelFormat
|
||||
import com.mrousavy.camera.parsers.Torch
|
||||
import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
@@ -42,7 +42,8 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
const val TAG = "CameraView"
|
||||
|
||||
private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId")
|
||||
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat")
|
||||
private val propsThatRequireSessionReconfiguration =
|
||||
arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat")
|
||||
private val propsThatRequireFormatReconfiguration = arrayListOf("fps", "hdr", "videoStabilizationMode", "lowLightBoost")
|
||||
}
|
||||
|
||||
@@ -52,18 +53,21 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
var enableDepthData = false
|
||||
var enableHighQualityPhotos: Boolean? = null
|
||||
var enablePortraitEffectsMatteDelivery = false
|
||||
|
||||
// use-cases
|
||||
var photo: Boolean? = null
|
||||
var video: Boolean? = null
|
||||
var audio: Boolean? = null
|
||||
var enableFrameProcessor = false
|
||||
var pixelFormat: PixelFormat = PixelFormat.NATIVE
|
||||
|
||||
// props that require format reconfiguring
|
||||
var format: ReadableMap? = null
|
||||
var fps: Int? = null
|
||||
var videoStabilizationMode: VideoStabilizationMode? = null
|
||||
var hdr: Boolean? = null // nullable bool
|
||||
var lowLightBoost: Boolean? = null // nullable bool
|
||||
|
||||
// other props
|
||||
var isActive = false
|
||||
var torch: Torch = Torch.OFF
|
||||
@@ -129,7 +133,7 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
Log.i(TAG, "Props changed: $changedProps")
|
||||
try {
|
||||
val shouldReconfigurePreview = changedProps.containsAny(propsThatRequirePreviewReconfiguration)
|
||||
val shouldReconfigureSession = shouldReconfigurePreview || changedProps.containsAny(propsThatRequireSessionReconfiguration)
|
||||
val shouldReconfigureSession = shouldReconfigurePreview || changedProps.containsAny(propsThatRequireSessionReconfiguration)
|
||||
val shouldReconfigureFormat = shouldReconfigureSession || changedProps.containsAny(propsThatRequireFormatReconfiguration)
|
||||
val shouldReconfigureZoom = shouldReconfigureSession || changedProps.contains("zoom")
|
||||
val shouldReconfigureTorch = shouldReconfigureSession || changedProps.contains("torch")
|
||||
@@ -182,10 +186,14 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
val previewOutput = CameraOutputs.PreviewOutput(previewSurface)
|
||||
val photoOutput = if (photo == true) {
|
||||
CameraOutputs.PhotoOutput(targetPhotoSize)
|
||||
} else null
|
||||
} else {
|
||||
null
|
||||
}
|
||||
val videoOutput = if (video == true || enableFrameProcessor) {
|
||||
CameraOutputs.VideoOutput(targetVideoSize, video == true, enableFrameProcessor, pixelFormat.toImageFormat())
|
||||
} else null
|
||||
} else {
|
||||
null
|
||||
}
|
||||
|
||||
cameraSession.configureSession(cameraId, previewOutput, photoOutput, videoOutput)
|
||||
} catch (e: Throwable) {
|
||||
@@ -215,13 +223,16 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
@SuppressLint("ClickableViewAccessibility")
|
||||
private fun updateZoomGesture() {
|
||||
if (enableZoomGesture) {
|
||||
val scaleGestureDetector = ScaleGestureDetector(context, object: ScaleGestureDetector.SimpleOnScaleGestureListener() {
|
||||
override fun onScale(detector: ScaleGestureDetector): Boolean {
|
||||
zoom *= detector.scaleFactor
|
||||
cameraSession.setZoom(zoom)
|
||||
return true
|
||||
val scaleGestureDetector = ScaleGestureDetector(
|
||||
context,
|
||||
object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
|
||||
override fun onScale(detector: ScaleGestureDetector): Boolean {
|
||||
zoom *= detector.scaleFactor
|
||||
cameraSession.setZoom(zoom)
|
||||
return true
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
setOnTouchListener { _, event ->
|
||||
scaleGestureDetector.onTouchEvent(event)
|
||||
}
|
||||
|
@@ -12,9 +12,7 @@ import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
|
||||
@Suppress("unused")
|
||||
class CameraViewManager : ViewGroupManager<CameraView>() {
|
||||
public override fun createViewInstance(context: ThemedReactContext): CameraView {
|
||||
return CameraView(context)
|
||||
}
|
||||
public override fun createViewInstance(context: ThemedReactContext): CameraView = CameraView(context)
|
||||
|
||||
override fun onAfterUpdateTransaction(view: CameraView) {
|
||||
super.onAfterUpdateTransaction(view)
|
||||
@@ -23,101 +21,110 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
|
||||
cameraViewTransactions.remove(view)
|
||||
}
|
||||
|
||||
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? {
|
||||
return MapBuilder.builder<String, Any>()
|
||||
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? =
|
||||
MapBuilder.builder<String, Any>()
|
||||
.put("cameraViewReady", MapBuilder.of("registrationName", "onViewReady"))
|
||||
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
|
||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||
.build()
|
||||
}
|
||||
|
||||
override fun getName(): String {
|
||||
return TAG
|
||||
}
|
||||
override fun getName(): String = TAG
|
||||
|
||||
@ReactProp(name = "cameraId")
|
||||
fun setCameraId(view: CameraView, cameraId: String) {
|
||||
if (view.cameraId != cameraId)
|
||||
if (view.cameraId != cameraId) {
|
||||
addChangedPropToTransaction(view, "cameraId")
|
||||
}
|
||||
view.cameraId = cameraId
|
||||
}
|
||||
|
||||
@ReactProp(name = "photo")
|
||||
fun setPhoto(view: CameraView, photo: Boolean?) {
|
||||
if (view.photo != photo)
|
||||
if (view.photo != photo) {
|
||||
addChangedPropToTransaction(view, "photo")
|
||||
}
|
||||
view.photo = photo
|
||||
}
|
||||
|
||||
@ReactProp(name = "video")
|
||||
fun setVideo(view: CameraView, video: Boolean?) {
|
||||
if (view.video != video)
|
||||
if (view.video != video) {
|
||||
addChangedPropToTransaction(view, "video")
|
||||
}
|
||||
view.video = video
|
||||
}
|
||||
|
||||
@ReactProp(name = "audio")
|
||||
fun setAudio(view: CameraView, audio: Boolean?) {
|
||||
if (view.audio != audio)
|
||||
if (view.audio != audio) {
|
||||
addChangedPropToTransaction(view, "audio")
|
||||
}
|
||||
view.audio = audio
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableFrameProcessor")
|
||||
fun setEnableFrameProcessor(view: CameraView, enableFrameProcessor: Boolean) {
|
||||
if (view.enableFrameProcessor != enableFrameProcessor)
|
||||
if (view.enableFrameProcessor != enableFrameProcessor) {
|
||||
addChangedPropToTransaction(view, "enableFrameProcessor")
|
||||
}
|
||||
view.enableFrameProcessor = enableFrameProcessor
|
||||
}
|
||||
|
||||
@ReactProp(name = "pixelFormat")
|
||||
fun setPixelFormat(view: CameraView, pixelFormat: String?) {
|
||||
val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat)
|
||||
if (view.pixelFormat != newPixelFormat)
|
||||
if (view.pixelFormat != newPixelFormat) {
|
||||
addChangedPropToTransaction(view, "pixelFormat")
|
||||
}
|
||||
view.pixelFormat = newPixelFormat ?: PixelFormat.NATIVE
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableDepthData")
|
||||
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
|
||||
if (view.enableDepthData != enableDepthData)
|
||||
if (view.enableDepthData != enableDepthData) {
|
||||
addChangedPropToTransaction(view, "enableDepthData")
|
||||
}
|
||||
view.enableDepthData = enableDepthData
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableZoomGesture")
|
||||
fun setEnableZoomGesture(view: CameraView, enableZoomGesture: Boolean) {
|
||||
if (view.enableZoomGesture != enableZoomGesture)
|
||||
if (view.enableZoomGesture != enableZoomGesture) {
|
||||
addChangedPropToTransaction(view, "enableZoomGesture")
|
||||
}
|
||||
view.enableZoomGesture = enableZoomGesture
|
||||
}
|
||||
|
||||
@ReactProp(name = "videoStabilizationMode")
|
||||
fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) {
|
||||
val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode)
|
||||
if (view.videoStabilizationMode != newMode)
|
||||
if (view.videoStabilizationMode != newMode) {
|
||||
addChangedPropToTransaction(view, "videoStabilizationMode")
|
||||
}
|
||||
view.videoStabilizationMode = newMode
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableHighQualityPhotos")
|
||||
fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) {
|
||||
if (view.enableHighQualityPhotos != enableHighQualityPhotos)
|
||||
if (view.enableHighQualityPhotos != enableHighQualityPhotos) {
|
||||
addChangedPropToTransaction(view, "enableHighQualityPhotos")
|
||||
}
|
||||
view.enableHighQualityPhotos = enableHighQualityPhotos
|
||||
}
|
||||
|
||||
@ReactProp(name = "enablePortraitEffectsMatteDelivery")
|
||||
fun setEnablePortraitEffectsMatteDelivery(view: CameraView, enablePortraitEffectsMatteDelivery: Boolean) {
|
||||
if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery)
|
||||
if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery) {
|
||||
addChangedPropToTransaction(view, "enablePortraitEffectsMatteDelivery")
|
||||
}
|
||||
view.enablePortraitEffectsMatteDelivery = enablePortraitEffectsMatteDelivery
|
||||
}
|
||||
|
||||
@ReactProp(name = "format")
|
||||
fun setFormat(view: CameraView, format: ReadableMap?) {
|
||||
if (view.format != format)
|
||||
if (view.format != format) {
|
||||
addChangedPropToTransaction(view, "format")
|
||||
}
|
||||
view.format = format
|
||||
}
|
||||
|
||||
@@ -126,53 +133,60 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
|
||||
// of type "Int?" the react bridge throws an error.
|
||||
@ReactProp(name = "fps", defaultInt = -1)
|
||||
fun setFps(view: CameraView, fps: Int) {
|
||||
if (view.fps != fps)
|
||||
if (view.fps != fps) {
|
||||
addChangedPropToTransaction(view, "fps")
|
||||
}
|
||||
view.fps = if (fps > 0) fps else null
|
||||
}
|
||||
|
||||
@ReactProp(name = "hdr")
|
||||
fun setHdr(view: CameraView, hdr: Boolean?) {
|
||||
if (view.hdr != hdr)
|
||||
if (view.hdr != hdr) {
|
||||
addChangedPropToTransaction(view, "hdr")
|
||||
}
|
||||
view.hdr = hdr
|
||||
}
|
||||
|
||||
@ReactProp(name = "lowLightBoost")
|
||||
fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) {
|
||||
if (view.lowLightBoost != lowLightBoost)
|
||||
if (view.lowLightBoost != lowLightBoost) {
|
||||
addChangedPropToTransaction(view, "lowLightBoost")
|
||||
}
|
||||
view.lowLightBoost = lowLightBoost
|
||||
}
|
||||
|
||||
@ReactProp(name = "isActive")
|
||||
fun setIsActive(view: CameraView, isActive: Boolean) {
|
||||
if (view.isActive != isActive)
|
||||
if (view.isActive != isActive) {
|
||||
addChangedPropToTransaction(view, "isActive")
|
||||
}
|
||||
view.isActive = isActive
|
||||
}
|
||||
|
||||
@ReactProp(name = "torch")
|
||||
fun setTorch(view: CameraView, torch: String) {
|
||||
val newMode = Torch.fromUnionValue(torch)
|
||||
if (view.torch != newMode)
|
||||
if (view.torch != newMode) {
|
||||
addChangedPropToTransaction(view, "torch")
|
||||
}
|
||||
view.torch = newMode
|
||||
}
|
||||
|
||||
@ReactProp(name = "zoom")
|
||||
fun setZoom(view: CameraView, zoom: Double) {
|
||||
val zoomFloat = zoom.toFloat()
|
||||
if (view.zoom != zoomFloat)
|
||||
if (view.zoom != zoomFloat) {
|
||||
addChangedPropToTransaction(view, "zoom")
|
||||
}
|
||||
view.zoom = zoomFloat
|
||||
}
|
||||
|
||||
@ReactProp(name = "orientation")
|
||||
fun setOrientation(view: CameraView, orientation: String?) {
|
||||
val newMode = Orientation.fromUnionValue(orientation)
|
||||
if (view.orientation != newMode)
|
||||
if (view.orientation != newMode) {
|
||||
addChangedPropToTransaction(view, "orientation")
|
||||
}
|
||||
view.orientation = newMode
|
||||
}
|
||||
|
||||
|
@@ -1,9 +1,7 @@
|
||||
package com.mrousavy.camera
|
||||
|
||||
import android.Manifest
|
||||
import android.content.Context
|
||||
import android.content.pm.PackageManager
|
||||
import android.hardware.camera2.CameraManager
|
||||
import android.util.Log
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.facebook.react.bridge.*
|
||||
@@ -11,22 +9,21 @@ import com.facebook.react.module.annotations.ReactModule
|
||||
import com.facebook.react.modules.core.PermissionAwareActivity
|
||||
import com.facebook.react.modules.core.PermissionListener
|
||||
import com.facebook.react.uimanager.UIManagerHelper
|
||||
import com.mrousavy.camera.core.CameraDeviceDetails
|
||||
import com.mrousavy.camera.frameprocessor.VisionCameraInstaller
|
||||
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
|
||||
import com.mrousavy.camera.parsers.*
|
||||
import com.mrousavy.camera.utils.*
|
||||
import kotlinx.coroutines.*
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlin.coroutines.suspendCoroutine
|
||||
import kotlinx.coroutines.*
|
||||
|
||||
@ReactModule(name = CameraViewModule.TAG)
|
||||
@Suppress("unused")
|
||||
class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJavaModule(reactContext) {
|
||||
class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
|
||||
companion object {
|
||||
const val TAG = "CameraView"
|
||||
var RequestCode = 10
|
||||
var sharedRequestCode = 10
|
||||
}
|
||||
|
||||
private val coroutineScope = CoroutineScope(Dispatchers.Default) // TODO: or Dispatchers.Main?
|
||||
@@ -38,25 +35,32 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
|
||||
}
|
||||
}
|
||||
|
||||
override fun getName(): String {
|
||||
return TAG
|
||||
}
|
||||
override fun getName(): String = TAG
|
||||
|
||||
private suspend fun findCameraView(viewId: Int): CameraView {
|
||||
return suspendCoroutine { continuation ->
|
||||
private suspend fun findCameraView(viewId: Int): CameraView =
|
||||
suspendCoroutine { continuation ->
|
||||
UiThreadUtil.runOnUiThread {
|
||||
Log.d(TAG, "Finding view $viewId...")
|
||||
val view = if (reactApplicationContext != null) UIManagerHelper.getUIManager(reactApplicationContext, viewId)?.resolveView(viewId) as CameraView? else null
|
||||
Log.d(TAG, if (reactApplicationContext != null) "Found view $viewId!" else "Couldn't find view $viewId!")
|
||||
if (view != null) continuation.resume(view)
|
||||
else continuation.resumeWithException(ViewNotFoundError(viewId))
|
||||
val view = if (reactApplicationContext != null) {
|
||||
UIManagerHelper.getUIManager(
|
||||
reactApplicationContext,
|
||||
viewId
|
||||
)?.resolveView(viewId) as CameraView?
|
||||
} else {
|
||||
null
|
||||
}
|
||||
Log.d(TAG, if (reactApplicationContext != null) "Found view $viewId!" else "Couldn't find view $viewId!")
|
||||
if (view != null) {
|
||||
continuation.resume(view)
|
||||
} else {
|
||||
continuation.resumeWithException(ViewNotFoundError(viewId))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod(isBlockingSynchronousMethod = true)
|
||||
fun installFrameProcessorBindings(): Boolean {
|
||||
return try {
|
||||
fun installFrameProcessorBindings(): Boolean =
|
||||
try {
|
||||
val proxy = VisionCameraProxy(reactApplicationContext)
|
||||
VisionCameraInstaller.install(proxy)
|
||||
true
|
||||
@@ -64,7 +68,6 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
|
||||
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
|
||||
@@ -87,7 +90,8 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
|
||||
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
|
||||
onRecordCallback(null, map)
|
||||
} catch (error: Throwable) {
|
||||
val map = makeErrorMap("capture/unknown", "An unknown error occurred while trying to start a video recording! ${error.message}", error)
|
||||
val map =
|
||||
makeErrorMap("capture/unknown", "An unknown error occurred while trying to start a video recording! ${error.message}", error)
|
||||
onRecordCallback(null, map)
|
||||
}
|
||||
}
|
||||
@@ -137,22 +141,6 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getAvailableCameraDevices(promise: Promise) {
|
||||
coroutineScope.launch {
|
||||
withPromise(promise) {
|
||||
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
|
||||
|
||||
val devices = Arguments.createArray()
|
||||
manager.cameraIdList.forEach { cameraId ->
|
||||
val device = CameraDeviceDetails(manager, cameraId)
|
||||
devices.pushMap(device.toMap())
|
||||
}
|
||||
promise.resolve(devices)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun canRequestPermission(permission: String): Boolean {
|
||||
val activity = currentActivity as? PermissionAwareActivity
|
||||
return activity?.shouldShowRequestPermissionRationale(permission) ?: false
|
||||
@@ -182,7 +170,7 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
|
||||
fun requestCameraPermission(promise: Promise) {
|
||||
val activity = reactApplicationContext.currentActivity
|
||||
if (activity is PermissionAwareActivity) {
|
||||
val currentRequestCode = RequestCode++
|
||||
val currentRequestCode = sharedRequestCode++
|
||||
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
|
||||
if (requestCode == currentRequestCode) {
|
||||
val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED
|
||||
@@ -202,7 +190,7 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
|
||||
fun requestMicrophonePermission(promise: Promise) {
|
||||
val activity = reactApplicationContext.currentActivity
|
||||
if (activity is PermissionAwareActivity) {
|
||||
val currentRequestCode = RequestCode++
|
||||
val currentRequestCode = sharedRequestCode++
|
||||
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
|
||||
if (requestCode == currentRequestCode) {
|
||||
val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED
|
||||
|
@@ -1,7 +1,7 @@
|
||||
package com.mrousavy.camera
|
||||
|
||||
import com.mrousavy.camera.parsers.CameraDeviceError
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.parsers.CameraDeviceError
|
||||
|
||||
abstract class CameraError(
|
||||
// example: "permission"
|
||||
@@ -16,30 +16,53 @@ abstract class CameraError(
|
||||
val CameraError.code: String
|
||||
get() = "$domain/$id"
|
||||
|
||||
class MicrophonePermissionError : CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied! If you want to record Video without sound, pass `audio={false}`.")
|
||||
class MicrophonePermissionError :
|
||||
CameraError(
|
||||
"permission",
|
||||
"microphone-permission-denied",
|
||||
"The Microphone permission was denied! If you want to record Video without sound, pass `audio={false}`."
|
||||
)
|
||||
class CameraPermissionError : CameraError("permission", "camera-permission-denied", "The Camera permission was denied!")
|
||||
|
||||
class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
|
||||
class InvalidTypeScriptUnionError(unionName: String, unionValue: String) :
|
||||
CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
|
||||
|
||||
class NoCameraDeviceError : CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||
class PixelFormatNotSupportedError(format: String) : CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
|
||||
class NoCameraDeviceError :
|
||||
CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||
class PixelFormatNotSupportedError(format: String) :
|
||||
CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
|
||||
|
||||
class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
||||
class CameraCannotBeOpenedError(cameraId: String, error: CameraDeviceError) : CameraError("session", "camera-cannot-be-opened", "The given Camera device (id: $cameraId) could not be opened! Error: $error")
|
||||
class CameraSessionCannotBeConfiguredError(cameraId: String, outputs: CameraOutputs) : CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera $cameraId! Outputs: $outputs")
|
||||
class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error")
|
||||
class CameraNotReadyError :
|
||||
CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
||||
class CameraCannotBeOpenedError(cameraId: String, error: CameraDeviceError) :
|
||||
CameraError("session", "camera-cannot-be-opened", "The given Camera device (id: $cameraId) could not be opened! Error: $error")
|
||||
class CameraSessionCannotBeConfiguredError(cameraId: String, outputs: CameraOutputs) :
|
||||
CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera $cameraId! Outputs: $outputs")
|
||||
class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) :
|
||||
CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error")
|
||||
|
||||
class VideoNotEnabledError : CameraError("capture", "video-not-enabled", "Video capture is disabled! Pass `video={true}` to enable video recordings.")
|
||||
class PhotoNotEnabledError : CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.")
|
||||
class CaptureAbortedError(wasImageCaptured: Boolean) : CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured")
|
||||
class UnknownCaptureError(wasImageCaptured: Boolean) : CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured")
|
||||
class VideoNotEnabledError :
|
||||
CameraError("capture", "video-not-enabled", "Video capture is disabled! Pass `video={true}` to enable video recordings.")
|
||||
class PhotoNotEnabledError :
|
||||
CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.")
|
||||
class CaptureAbortedError(wasImageCaptured: Boolean) :
|
||||
CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured")
|
||||
class UnknownCaptureError(wasImageCaptured: Boolean) :
|
||||
CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured")
|
||||
|
||||
class RecorderError(name: String, extra: Int) : CameraError("capture", "recorder-error", "An error occured while recording a video! $name $extra")
|
||||
class RecorderError(name: String, extra: Int) :
|
||||
CameraError("capture", "recorder-error", "An error occured while recording a video! $name $extra")
|
||||
|
||||
class NoRecordingInProgressError : CameraError("capture", "no-recording-in-progress", "There was no active video recording in progress! Did you call stopRecording() twice?")
|
||||
class RecordingInProgressError : CameraError("capture", "recording-in-progress", "There is already an active video recording in progress! Did you call startRecording() twice?")
|
||||
class NoRecordingInProgressError :
|
||||
CameraError("capture", "no-recording-in-progress", "There was no active video recording in progress! Did you call stopRecording() twice?")
|
||||
class RecordingInProgressError :
|
||||
CameraError(
|
||||
"capture",
|
||||
"recording-in-progress",
|
||||
"There is already an active video recording in progress! Did you call startRecording() twice?"
|
||||
)
|
||||
|
||||
class ViewNotFoundError(viewId: Int) : CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
|
||||
class ViewNotFoundError(viewId: Int) :
|
||||
CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
|
||||
|
||||
class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause)
|
||||
|
||||
|
@@ -14,10 +14,10 @@ import com.facebook.react.bridge.ReadableMap
|
||||
import com.mrousavy.camera.extensions.bigger
|
||||
import com.mrousavy.camera.extensions.getPhotoSizes
|
||||
import com.mrousavy.camera.extensions.getVideoSizes
|
||||
import com.mrousavy.camera.parsers.PixelFormat
|
||||
import com.mrousavy.camera.parsers.HardwareLevel
|
||||
import com.mrousavy.camera.parsers.LensFacing
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import com.mrousavy.camera.parsers.PixelFormat
|
||||
import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
import kotlin.math.PI
|
||||
import kotlin.math.atan
|
||||
@@ -29,50 +29,64 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
|
||||
private val extensions = getSupportedExtensions()
|
||||
|
||||
// device characteristics
|
||||
private val isMultiCam = capabilities.contains(11 /* TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA */)
|
||||
private val supportsDepthCapture = capabilities.contains(8 /* TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT */)
|
||||
private val isMultiCam = capabilities.contains(11) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA
|
||||
private val supportsDepthCapture = capabilities.contains(8) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT
|
||||
private val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
|
||||
private val supportsLowLightBoost = extensions.contains(4 /* TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT */)
|
||||
private val supportsLowLightBoost = extensions.contains(4) // TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT
|
||||
private val lensFacing = LensFacing.fromCameraCharacteristics(characteristics)
|
||||
private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false
|
||||
private val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f /* 35mm default */)
|
||||
private val focalLengths =
|
||||
characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)
|
||||
// 35mm is the film standard sensor size
|
||||
?: floatArrayOf(35f)
|
||||
private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
|
||||
private val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
|
||||
private val name = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION)
|
||||
else null) ?: "$lensFacing (${cameraId})"
|
||||
private val name = (
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
|
||||
characteristics.get(CameraCharacteristics.INFO_VERSION)
|
||||
} else {
|
||||
null
|
||||
}
|
||||
) ?: "$lensFacing ($cameraId)"
|
||||
|
||||
// "formats" (all possible configurations for this device)
|
||||
private val zoomRange = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
|
||||
else null) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f)
|
||||
private val zoomRange = (
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||
characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
|
||||
} else {
|
||||
null
|
||||
}
|
||||
) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f)
|
||||
private val minZoom = zoomRange.lower.toDouble()
|
||||
private val maxZoom = zoomRange.upper.toDouble()
|
||||
|
||||
private val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
|
||||
private val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0)
|
||||
private val digitalStabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0)
|
||||
private val opticalStabilizationModes = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0)
|
||||
private val supportsPhotoHdr = extensions.contains(3 /* TODO: CameraExtensionCharacteristics.EXTENSION_HDR */)
|
||||
private val digitalStabilizationModes =
|
||||
characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0)
|
||||
private val opticalStabilizationModes =
|
||||
characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0)
|
||||
private val supportsPhotoHdr = extensions.contains(3) // TODO: CameraExtensionCharacteristics.EXTENSION_HDR
|
||||
private val supportsVideoHdr = getHasVideoHdr()
|
||||
|
||||
private val videoFormat = ImageFormat.YUV_420_888
|
||||
|
||||
// get extensions (HDR, Night Mode, ..)
|
||||
private fun getSupportedExtensions(): List<Int> {
|
||||
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
private fun getSupportedExtensions(): List<Int> =
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
val extensions = cameraManager.getCameraExtensionCharacteristics(cameraId)
|
||||
extensions.supportedExtensions
|
||||
} else {
|
||||
emptyList()
|
||||
}
|
||||
}
|
||||
|
||||
private fun getHasVideoHdr(): Boolean {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
|
||||
if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
|
||||
val availableProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES)
|
||||
?: DynamicRangeProfiles(LongArray(0))
|
||||
return availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HLG10)
|
||||
|| availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HDR10)
|
||||
return availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HLG10) ||
|
||||
availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HDR10)
|
||||
}
|
||||
}
|
||||
return false
|
||||
@@ -117,16 +131,10 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
|
||||
return deviceTypes
|
||||
}
|
||||
|
||||
private fun getFieldOfView(): Double {
|
||||
return 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
|
||||
}
|
||||
private fun getFieldOfView(): Double = 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
|
||||
|
||||
private fun getVideoSizes(): List<Size> {
|
||||
return characteristics.getVideoSizes(cameraId, videoFormat)
|
||||
}
|
||||
private fun getPhotoSizes(): List<Size> {
|
||||
return characteristics.getPhotoSizes(ImageFormat.JPEG)
|
||||
}
|
||||
private fun getVideoSizes(): List<Size> = characteristics.getVideoSizes(cameraId, videoFormat)
|
||||
private fun getPhotoSizes(): List<Size> = characteristics.getPhotoSizes(ImageFormat.JPEG)
|
||||
|
||||
private fun getFormats(): ReadableArray {
|
||||
val array = Arguments.createArray()
|
||||
|
@@ -25,6 +25,7 @@ import com.mrousavy.camera.PhotoNotEnabledError
|
||||
import com.mrousavy.camera.RecorderError
|
||||
import com.mrousavy.camera.RecordingInProgressError
|
||||
import com.mrousavy.camera.VideoNotEnabledError
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.extensions.capture
|
||||
import com.mrousavy.camera.extensions.createCaptureSession
|
||||
import com.mrousavy.camera.extensions.createPhotoCaptureRequest
|
||||
@@ -37,19 +38,23 @@ import com.mrousavy.camera.parsers.QualityPrioritization
|
||||
import com.mrousavy.camera.parsers.VideoCodec
|
||||
import com.mrousavy.camera.parsers.VideoFileType
|
||||
import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import java.io.Closeable
|
||||
import java.util.concurrent.CancellationException
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.sync.Mutex
|
||||
import kotlinx.coroutines.sync.withLock
|
||||
import java.io.Closeable
|
||||
import java.util.concurrent.CancellationException
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
class CameraSession(private val context: Context,
|
||||
private val cameraManager: CameraManager,
|
||||
private val onInitialized: () -> Unit,
|
||||
private val onError: (e: Throwable) -> Unit): CoroutineScope, Closeable, CameraOutputs.Callback, CameraManager.AvailabilityCallback() {
|
||||
class CameraSession(
|
||||
private val context: Context,
|
||||
private val cameraManager: CameraManager,
|
||||
private val onInitialized: () -> Unit,
|
||||
private val onError: (e: Throwable) -> Unit
|
||||
) : CameraManager.AvailabilityCallback(),
|
||||
CoroutineScope,
|
||||
Closeable,
|
||||
CameraOutputs.Callback {
|
||||
companion object {
|
||||
private const val TAG = "CameraSession"
|
||||
|
||||
@@ -57,11 +62,13 @@ class CameraSession(private val context: Context,
|
||||
private val CAN_SET_FPS = !Build.MANUFACTURER.equals("samsung", true)
|
||||
}
|
||||
|
||||
data class CapturedPhoto(val image: Image,
|
||||
val metadata: TotalCaptureResult,
|
||||
val orientation: Orientation,
|
||||
val isMirrored: Boolean,
|
||||
val format: Int): Closeable {
|
||||
data class CapturedPhoto(
|
||||
val image: Image,
|
||||
val metadata: TotalCaptureResult,
|
||||
val orientation: Orientation,
|
||||
val isMirrored: Boolean,
|
||||
val format: Int
|
||||
) : Closeable {
|
||||
override fun close() {
|
||||
image.close()
|
||||
}
|
||||
@@ -92,6 +99,7 @@ class CameraSession(private val context: Context,
|
||||
private val mutex = Mutex()
|
||||
private var isRunning = false
|
||||
private var enableTorch = false
|
||||
|
||||
// Video Outputs
|
||||
private var recording: RecordingSession? = null
|
||||
set(value) {
|
||||
@@ -127,18 +135,22 @@ class CameraSession(private val context: Context,
|
||||
return Orientation.fromRotationDegrees(sensorRotation)
|
||||
}
|
||||
|
||||
fun configureSession(cameraId: String,
|
||||
preview: CameraOutputs.PreviewOutput? = null,
|
||||
photo: CameraOutputs.PhotoOutput? = null,
|
||||
video: CameraOutputs.VideoOutput? = null) {
|
||||
fun configureSession(
|
||||
cameraId: String,
|
||||
preview: CameraOutputs.PreviewOutput? = null,
|
||||
photo: CameraOutputs.PhotoOutput? = null,
|
||||
video: CameraOutputs.VideoOutput? = null
|
||||
) {
|
||||
Log.i(TAG, "Configuring Session for Camera $cameraId...")
|
||||
val outputs = CameraOutputs(cameraId,
|
||||
val outputs = CameraOutputs(
|
||||
cameraId,
|
||||
cameraManager,
|
||||
preview,
|
||||
photo,
|
||||
video,
|
||||
hdr == true,
|
||||
this)
|
||||
this
|
||||
)
|
||||
if (this.cameraId == cameraId && this.outputs == outputs && isActive == isRunning) {
|
||||
Log.i(TAG, "Nothing changed in configuration, canceling..")
|
||||
}
|
||||
@@ -156,10 +168,12 @@ class CameraSession(private val context: Context,
|
||||
}
|
||||
}
|
||||
|
||||
fun configureFormat(fps: Int? = null,
|
||||
videoStabilizationMode: VideoStabilizationMode? = null,
|
||||
hdr: Boolean? = null,
|
||||
lowLightBoost: Boolean? = null) {
|
||||
fun configureFormat(
|
||||
fps: Int? = null,
|
||||
videoStabilizationMode: VideoStabilizationMode? = null,
|
||||
hdr: Boolean? = null,
|
||||
lowLightBoost: Boolean? = null
|
||||
) {
|
||||
Log.i(TAG, "Setting Format (fps: $fps | videoStabilization: $videoStabilizationMode | hdr: $hdr | lowLightBoost: $lowLightBoost)...")
|
||||
this.fps = fps
|
||||
this.videoStabilizationMode = videoStabilizationMode
|
||||
@@ -170,18 +184,23 @@ class CameraSession(private val context: Context,
|
||||
val currentOutputs = outputs
|
||||
if (currentOutputs != null && currentOutputs.enableHdr != hdr) {
|
||||
// Update existing HDR for Outputs
|
||||
this.outputs = CameraOutputs(currentOutputs.cameraId,
|
||||
this.outputs = CameraOutputs(
|
||||
currentOutputs.cameraId,
|
||||
cameraManager,
|
||||
currentOutputs.preview,
|
||||
currentOutputs.photo,
|
||||
currentOutputs.video,
|
||||
hdr,
|
||||
this)
|
||||
this
|
||||
)
|
||||
needsReconfiguration = true
|
||||
}
|
||||
launch {
|
||||
if (needsReconfiguration) startRunning()
|
||||
else updateRepeatingRequest()
|
||||
if (needsReconfiguration) {
|
||||
startRunning()
|
||||
} else {
|
||||
updateRepeatingRequest()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -208,12 +227,14 @@ class CameraSession(private val context: Context,
|
||||
videoPipeline.setFrameProcessorOutput(this.frameProcessor)
|
||||
}
|
||||
|
||||
suspend fun takePhoto(qualityPrioritization: QualityPrioritization,
|
||||
flashMode: Flash,
|
||||
enableShutterSound: Boolean,
|
||||
enableRedEyeReduction: Boolean,
|
||||
enableAutoStabilization: Boolean,
|
||||
outputOrientation: Orientation): CapturedPhoto {
|
||||
suspend fun takePhoto(
|
||||
qualityPrioritization: QualityPrioritization,
|
||||
flashMode: Flash,
|
||||
enableShutterSound: Boolean,
|
||||
enableRedEyeReduction: Boolean,
|
||||
enableAutoStabilization: Boolean,
|
||||
outputOrientation: Orientation
|
||||
): CapturedPhoto {
|
||||
val captureSession = captureSession ?: throw CameraNotReadyError()
|
||||
val outputs = outputs ?: throw CameraNotReadyError()
|
||||
|
||||
@@ -223,14 +244,16 @@ class CameraSession(private val context: Context,
|
||||
|
||||
val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id)
|
||||
val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics)
|
||||
val captureRequest = captureSession.device.createPhotoCaptureRequest(cameraManager,
|
||||
photoOutput.surface,
|
||||
zoom,
|
||||
qualityPrioritization,
|
||||
flashMode,
|
||||
enableRedEyeReduction,
|
||||
enableAutoStabilization,
|
||||
orientation)
|
||||
val captureRequest = captureSession.device.createPhotoCaptureRequest(
|
||||
cameraManager,
|
||||
photoOutput.surface,
|
||||
zoom,
|
||||
qualityPrioritization,
|
||||
flashMode,
|
||||
enableRedEyeReduction,
|
||||
enableAutoStabilization,
|
||||
orientation
|
||||
)
|
||||
Log.i(TAG, "Photo capture 1/3 - starting capture...")
|
||||
val result = captureSession.capture(captureRequest, enableShutterSound)
|
||||
val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!!
|
||||
@@ -252,11 +275,13 @@ class CameraSession(private val context: Context,
|
||||
photoOutputSynchronizer.set(image.timestamp, image)
|
||||
}
|
||||
|
||||
suspend fun startRecording(enableAudio: Boolean,
|
||||
codec: VideoCodec,
|
||||
fileType: VideoFileType,
|
||||
callback: (video: RecordingSession.Video) -> Unit,
|
||||
onError: (error: RecorderError) -> Unit) {
|
||||
suspend fun startRecording(
|
||||
enableAudio: Boolean,
|
||||
codec: VideoCodec,
|
||||
fileType: VideoFileType,
|
||||
callback: (video: RecordingSession.Video) -> Unit,
|
||||
onError: (error: RecorderError) -> Unit
|
||||
) {
|
||||
mutex.withLock {
|
||||
if (recording != null) throw RecordingInProgressError()
|
||||
val outputs = outputs ?: throw CameraNotReadyError()
|
||||
@@ -396,9 +421,7 @@ class CameraSession(private val context: Context,
|
||||
// Caches the result of outputs.hashCode() of the last getCaptureSession call
|
||||
private var lastOutputsHashCode: Int? = null
|
||||
|
||||
private suspend fun getCaptureSession(cameraDevice: CameraDevice,
|
||||
outputs: CameraOutputs,
|
||||
onClosed: () -> Unit): CameraCaptureSession {
|
||||
private suspend fun getCaptureSession(cameraDevice: CameraDevice, outputs: CameraOutputs, onClosed: () -> Unit): CameraCaptureSession {
|
||||
val currentSession = captureSession
|
||||
if (currentSession?.device == cameraDevice && outputs.hashCode() == lastOutputsHashCode) {
|
||||
// We already opened a CameraCaptureSession on this device
|
||||
@@ -426,11 +449,13 @@ class CameraSession(private val context: Context,
|
||||
return session
|
||||
}
|
||||
|
||||
private fun getPreviewCaptureRequest(fps: Int? = null,
|
||||
videoStabilizationMode: VideoStabilizationMode? = null,
|
||||
lowLightBoost: Boolean? = null,
|
||||
hdr: Boolean? = null,
|
||||
torch: Boolean? = null): CaptureRequest {
|
||||
private fun getPreviewCaptureRequest(
|
||||
fps: Int? = null,
|
||||
videoStabilizationMode: VideoStabilizationMode? = null,
|
||||
lowLightBoost: Boolean? = null,
|
||||
hdr: Boolean? = null,
|
||||
torch: Boolean? = null
|
||||
): CaptureRequest {
|
||||
val captureRequest = previewRequest ?: throw CameraNotReadyError()
|
||||
|
||||
// FPS
|
||||
@@ -442,9 +467,16 @@ class CameraSession(private val context: Context,
|
||||
captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, videoStabilizationMode?.toOpticalStabilizationMode())
|
||||
|
||||
// Night/HDR Mode
|
||||
val sceneMode = if (hdr == true) CaptureRequest.CONTROL_SCENE_MODE_HDR else if (lowLightBoost == true) CaptureRequest.CONTROL_SCENE_MODE_NIGHT else null
|
||||
val sceneMode = if (hdr ==
|
||||
true
|
||||
) {
|
||||
CaptureRequest.CONTROL_SCENE_MODE_HDR
|
||||
} else if (lowLightBoost == true) CaptureRequest.CONTROL_SCENE_MODE_NIGHT else null
|
||||
captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, sceneMode)
|
||||
captureRequest.set(CaptureRequest.CONTROL_MODE, if (sceneMode != null) CaptureRequest.CONTROL_MODE_USE_SCENE_MODE else CaptureRequest.CONTROL_MODE_AUTO)
|
||||
captureRequest.set(
|
||||
CaptureRequest.CONTROL_MODE,
|
||||
if (sceneMode != null) CaptureRequest.CONTROL_MODE_USE_SCENE_MODE else CaptureRequest.CONTROL_MODE_AUTO
|
||||
)
|
||||
|
||||
// Zoom
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
package com.mrousavy.camera.core;
|
||||
package com.mrousavy.camera.core
|
||||
|
||||
import android.media.Image
|
||||
import kotlinx.coroutines.CompletableDeferred
|
||||
|
@@ -12,10 +12,12 @@ import com.mrousavy.camera.extensions.getPreviewSize
|
||||
import kotlin.math.roundToInt
|
||||
|
||||
@SuppressLint("ViewConstructor")
|
||||
class PreviewView(context: Context,
|
||||
cameraManager: CameraManager,
|
||||
cameraId: String,
|
||||
private val onSurfaceChanged: (surface: Surface?) -> Unit): SurfaceView(context) {
|
||||
class PreviewView(
|
||||
context: Context,
|
||||
cameraManager: CameraManager,
|
||||
cameraId: String,
|
||||
private val onSurfaceChanged: (surface: Surface?) -> Unit
|
||||
) : SurfaceView(context) {
|
||||
private val targetSize: Size
|
||||
private val aspectRatio: Float
|
||||
get() = targetSize.width.toFloat() / targetSize.height.toFloat()
|
||||
@@ -26,7 +28,7 @@ class PreviewView(context: Context,
|
||||
|
||||
Log.i(TAG, "Using Preview Size ${targetSize.width} x ${targetSize.height}.")
|
||||
holder.setFixedSize(targetSize.width, targetSize.height)
|
||||
holder.addCallback(object: SurfaceHolder.Callback {
|
||||
holder.addCallback(object : SurfaceHolder.Callback {
|
||||
override fun surfaceCreated(holder: SurfaceHolder) {
|
||||
Log.i(TAG, "Surface created! ${holder.surface}")
|
||||
onSurfaceChanged(holder.surface)
|
||||
|
@@ -14,17 +14,20 @@ import com.mrousavy.camera.parsers.VideoCodec
|
||||
import com.mrousavy.camera.parsers.VideoFileType
|
||||
import java.io.File
|
||||
|
||||
class RecordingSession(context: Context,
|
||||
val size: Size,
|
||||
private val enableAudio: Boolean,
|
||||
private val fps: Int? = null,
|
||||
private val codec: VideoCodec = VideoCodec.H264,
|
||||
private val orientation: Orientation,
|
||||
private val fileType: VideoFileType = VideoFileType.MP4,
|
||||
private val callback: (video: Video) -> Unit,
|
||||
private val onError: (error: RecorderError) -> Unit) {
|
||||
class RecordingSession(
|
||||
context: Context,
|
||||
val size: Size,
|
||||
private val enableAudio: Boolean,
|
||||
private val fps: Int? = null,
|
||||
private val codec: VideoCodec = VideoCodec.H264,
|
||||
private val orientation: Orientation,
|
||||
private val fileType: VideoFileType = VideoFileType.MP4,
|
||||
private val callback: (video: Video) -> Unit,
|
||||
private val onError: (error: RecorderError) -> Unit
|
||||
) {
|
||||
companion object {
|
||||
private const val TAG = "RecordingSession"
|
||||
|
||||
// bits per second
|
||||
private const val VIDEO_BIT_RATE = 10_000_000
|
||||
private const val AUDIO_SAMPLING_RATE = 44_100
|
||||
@@ -67,7 +70,7 @@ class RecordingSession(context: Context,
|
||||
recorder.setAudioChannels(AUDIO_CHANNELS)
|
||||
}
|
||||
recorder.setInputSurface(surface)
|
||||
//recorder.setOrientationHint(orientation.toDegrees())
|
||||
// recorder.setOrientationHint(orientation.toDegrees())
|
||||
|
||||
recorder.setOnErrorListener { _, what, extra ->
|
||||
Log.e(TAG, "MediaRecorder Error: $what ($extra)")
|
||||
|
@@ -3,7 +3,6 @@ package com.mrousavy.camera.core
|
||||
import android.graphics.ImageFormat
|
||||
import android.media.ImageReader
|
||||
import android.media.ImageWriter
|
||||
import android.media.MediaRecorder
|
||||
import android.util.Log
|
||||
import android.view.Surface
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
@@ -13,10 +12,9 @@ import com.mrousavy.camera.parsers.Orientation
|
||||
import java.io.Closeable
|
||||
|
||||
@Suppress("JoinDeclarationAndAssignment")
|
||||
class VideoPipeline(val width: Int,
|
||||
val height: Int,
|
||||
val format: Int = ImageFormat.PRIVATE,
|
||||
private val isMirrored: Boolean = false): ImageReader.OnImageAvailableListener, Closeable {
|
||||
class VideoPipeline(val width: Int, val height: Int, val format: Int = ImageFormat.PRIVATE, private val isMirrored: Boolean = false) :
|
||||
ImageReader.OnImageAvailableListener,
|
||||
Closeable {
|
||||
companion object {
|
||||
private const val MAX_IMAGES = 3
|
||||
private const val TAG = "VideoPipeline"
|
||||
|
@@ -9,32 +9,35 @@ import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.Surface
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.core.VideoPipeline
|
||||
import com.mrousavy.camera.extensions.closestToOrMax
|
||||
import com.mrousavy.camera.extensions.getPhotoSizes
|
||||
import com.mrousavy.camera.extensions.getPreviewSize
|
||||
import com.mrousavy.camera.extensions.getVideoSizes
|
||||
import com.mrousavy.camera.core.VideoPipeline
|
||||
import java.io.Closeable
|
||||
|
||||
class CameraOutputs(val cameraId: String,
|
||||
cameraManager: CameraManager,
|
||||
val preview: PreviewOutput? = null,
|
||||
val photo: PhotoOutput? = null,
|
||||
val video: VideoOutput? = null,
|
||||
val enableHdr: Boolean? = false,
|
||||
val callback: Callback): Closeable {
|
||||
class CameraOutputs(
|
||||
val cameraId: String,
|
||||
cameraManager: CameraManager,
|
||||
val preview: PreviewOutput? = null,
|
||||
val photo: PhotoOutput? = null,
|
||||
val video: VideoOutput? = null,
|
||||
val enableHdr: Boolean? = false,
|
||||
val callback: Callback
|
||||
) : Closeable {
|
||||
companion object {
|
||||
private const val TAG = "CameraOutputs"
|
||||
const val PHOTO_OUTPUT_BUFFER_SIZE = 3
|
||||
}
|
||||
|
||||
data class PreviewOutput(val surface: Surface)
|
||||
data class PhotoOutput(val targetSize: Size? = null,
|
||||
val format: Int = ImageFormat.JPEG)
|
||||
data class VideoOutput(val targetSize: Size? = null,
|
||||
val enableRecording: Boolean = false,
|
||||
val enableFrameProcessor: Boolean? = false,
|
||||
val format: Int = ImageFormat.PRIVATE)
|
||||
data class PhotoOutput(val targetSize: Size? = null, val format: Int = ImageFormat.JPEG)
|
||||
data class VideoOutput(
|
||||
val targetSize: Size? = null,
|
||||
val enableRecording: Boolean = false,
|
||||
val enableFrameProcessor: Boolean? = false,
|
||||
val format: Int = ImageFormat.PRIVATE
|
||||
)
|
||||
|
||||
interface Callback {
|
||||
fun onPhotoCaptured(image: Image)
|
||||
@@ -58,14 +61,14 @@ class CameraOutputs(val cameraId: String,
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is CameraOutputs) return false
|
||||
return this.cameraId == other.cameraId
|
||||
&& this.preview?.surface == other.preview?.surface
|
||||
&& this.photo?.targetSize == other.photo?.targetSize
|
||||
&& this.photo?.format == other.photo?.format
|
||||
&& this.video?.enableRecording == other.video?.enableRecording
|
||||
&& this.video?.targetSize == other.video?.targetSize
|
||||
&& this.video?.format == other.video?.format
|
||||
&& this.enableHdr == other.enableHdr
|
||||
return this.cameraId == other.cameraId &&
|
||||
this.preview?.surface == other.preview?.surface &&
|
||||
this.photo?.targetSize == other.photo?.targetSize &&
|
||||
this.photo?.format == other.photo?.format &&
|
||||
this.video?.enableRecording == other.video?.enableRecording &&
|
||||
this.video?.targetSize == other.video?.targetSize &&
|
||||
this.video?.format == other.video?.format &&
|
||||
this.enableHdr == other.enableHdr
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
|
@@ -5,15 +5,18 @@ import android.util.Log
|
||||
import android.util.Size
|
||||
import java.io.Closeable
|
||||
|
||||
class ImageReaderOutput(private val imageReader: ImageReader,
|
||||
outputType: OutputType,
|
||||
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(imageReader.surface, Size(imageReader.width, imageReader.height), outputType, dynamicRangeProfile) {
|
||||
class ImageReaderOutput(private val imageReader: ImageReader, outputType: OutputType, dynamicRangeProfile: Long? = null) :
|
||||
SurfaceOutput(
|
||||
imageReader.surface,
|
||||
Size(imageReader.width, imageReader.height),
|
||||
outputType,
|
||||
dynamicRangeProfile
|
||||
),
|
||||
Closeable {
|
||||
override fun close() {
|
||||
Log.i(TAG, "Closing ${imageReader.width}x${imageReader.height} $outputType ImageReader..")
|
||||
imageReader.close()
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "$outputType (${imageReader.width} x ${imageReader.height} in format #${imageReader.imageFormat})"
|
||||
}
|
||||
override fun toString(): String = "$outputType (${imageReader.width} x ${imageReader.height} in format #${imageReader.imageFormat})"
|
||||
}
|
||||
|
@@ -10,11 +10,13 @@ import android.view.Surface
|
||||
import androidx.annotation.RequiresApi
|
||||
import java.io.Closeable
|
||||
|
||||
open class SurfaceOutput(val surface: Surface,
|
||||
val size: Size,
|
||||
val outputType: OutputType,
|
||||
private val dynamicRangeProfile: Long? = null,
|
||||
private val closeSurfaceOnEnd: Boolean = false): Closeable {
|
||||
open class SurfaceOutput(
|
||||
val surface: Surface,
|
||||
val size: Size,
|
||||
val outputType: OutputType,
|
||||
private val dynamicRangeProfile: Long? = null,
|
||||
private val closeSurfaceOnEnd: Boolean = false
|
||||
) : Closeable {
|
||||
companion object {
|
||||
const val TAG = "SurfaceOutput"
|
||||
|
||||
@@ -47,9 +49,7 @@ open class SurfaceOutput(val surface: Surface,
|
||||
return result
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "$outputType (${size.width} x ${size.height})"
|
||||
}
|
||||
override fun toString(): String = "$outputType (${size.width} x ${size.height})"
|
||||
|
||||
override fun close() {
|
||||
if (closeSurfaceOnEnd) {
|
||||
@@ -64,13 +64,12 @@ open class SurfaceOutput(val surface: Surface,
|
||||
VIDEO_AND_PREVIEW;
|
||||
|
||||
@RequiresApi(Build.VERSION_CODES.TIRAMISU)
|
||||
fun toOutputType(): Int {
|
||||
return when(this) {
|
||||
fun toOutputType(): Int =
|
||||
when (this) {
|
||||
PHOTO -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE
|
||||
VIDEO -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD
|
||||
PREVIEW -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW
|
||||
VIDEO_AND_PREVIEW -> CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,15 +5,18 @@ import android.util.Size
|
||||
import com.mrousavy.camera.core.VideoPipeline
|
||||
import java.io.Closeable
|
||||
|
||||
class VideoPipelineOutput(val videoPipeline: VideoPipeline,
|
||||
outputType: OutputType,
|
||||
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(videoPipeline.surface, Size(videoPipeline.width, videoPipeline.height), outputType, dynamicRangeProfile) {
|
||||
class VideoPipelineOutput(val videoPipeline: VideoPipeline, outputType: OutputType, dynamicRangeProfile: Long? = null) :
|
||||
SurfaceOutput(
|
||||
videoPipeline.surface,
|
||||
Size(videoPipeline.width, videoPipeline.height),
|
||||
outputType,
|
||||
dynamicRangeProfile
|
||||
),
|
||||
Closeable {
|
||||
override fun close() {
|
||||
Log.i(TAG, "Closing ${videoPipeline.width}x${videoPipeline.height} Video Pipeline..")
|
||||
videoPipeline.close()
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "$outputType (${videoPipeline.width} x ${videoPipeline.height} in format #${videoPipeline.format})"
|
||||
}
|
||||
override fun toString(): String = "$outputType (${videoPipeline.width} x ${videoPipeline.height} in format #${videoPipeline.format})"
|
||||
}
|
||||
|
@@ -12,42 +12,37 @@ import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlin.coroutines.suspendCoroutine
|
||||
|
||||
suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult {
|
||||
return suspendCoroutine { continuation ->
|
||||
this.capture(captureRequest, object: CameraCaptureSession.CaptureCallback() {
|
||||
override fun onCaptureCompleted(
|
||||
session: CameraCaptureSession,
|
||||
request: CaptureRequest,
|
||||
result: TotalCaptureResult
|
||||
) {
|
||||
super.onCaptureCompleted(session, request, result)
|
||||
suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult =
|
||||
suspendCoroutine { continuation ->
|
||||
this.capture(
|
||||
captureRequest,
|
||||
object : CameraCaptureSession.CaptureCallback() {
|
||||
override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) {
|
||||
super.onCaptureCompleted(session, request, result)
|
||||
|
||||
continuation.resume(result)
|
||||
}
|
||||
|
||||
override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) {
|
||||
super.onCaptureStarted(session, request, timestamp, frameNumber)
|
||||
|
||||
if (enableShutterSound) {
|
||||
val mediaActionSound = MediaActionSound()
|
||||
mediaActionSound.play(MediaActionSound.SHUTTER_CLICK)
|
||||
continuation.resume(result)
|
||||
}
|
||||
}
|
||||
|
||||
override fun onCaptureFailed(
|
||||
session: CameraCaptureSession,
|
||||
request: CaptureRequest,
|
||||
failure: CaptureFailure
|
||||
) {
|
||||
super.onCaptureFailed(session, request, failure)
|
||||
val wasImageCaptured = failure.wasImageCaptured()
|
||||
val error = when (failure.reason) {
|
||||
CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured)
|
||||
CaptureFailure.REASON_FLUSHED -> CaptureAbortedError(wasImageCaptured)
|
||||
else -> UnknownCaptureError(wasImageCaptured)
|
||||
override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) {
|
||||
super.onCaptureStarted(session, request, timestamp, frameNumber)
|
||||
|
||||
if (enableShutterSound) {
|
||||
val mediaActionSound = MediaActionSound()
|
||||
mediaActionSound.play(MediaActionSound.SHUTTER_CLICK)
|
||||
}
|
||||
}
|
||||
continuation.resumeWithException(error)
|
||||
}
|
||||
}, CameraQueues.cameraQueue.handler)
|
||||
|
||||
override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) {
|
||||
super.onCaptureFailed(session, request, failure)
|
||||
val wasImageCaptured = failure.wasImageCaptured()
|
||||
val error = when (failure.reason) {
|
||||
CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured)
|
||||
CaptureFailure.REASON_FLUSHED -> CaptureAbortedError(wasImageCaptured)
|
||||
else -> UnknownCaptureError(wasImageCaptured)
|
||||
}
|
||||
continuation.resumeWithException(error)
|
||||
}
|
||||
},
|
||||
CameraQueues.cameraQueue.handler
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@@ -8,29 +8,33 @@ import android.hardware.camera2.params.OutputConfiguration
|
||||
import android.hardware.camera2.params.SessionConfiguration
|
||||
import android.os.Build
|
||||
import android.util.Log
|
||||
import androidx.annotation.RequiresApi
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.CameraSessionCannotBeConfiguredError
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import kotlinx.coroutines.suspendCancellableCoroutine
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlinx.coroutines.suspendCancellableCoroutine
|
||||
|
||||
private const val TAG = "CreateCaptureSession"
|
||||
private var sessionId = 1000
|
||||
|
||||
suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
|
||||
outputs: CameraOutputs,
|
||||
onClosed: (session: CameraCaptureSession) -> Unit,
|
||||
queue: CameraQueues.CameraQueue): CameraCaptureSession {
|
||||
return suspendCancellableCoroutine { continuation ->
|
||||
suspend fun CameraDevice.createCaptureSession(
|
||||
cameraManager: CameraManager,
|
||||
outputs: CameraOutputs,
|
||||
onClosed: (session: CameraCaptureSession) -> Unit,
|
||||
queue: CameraQueues.CameraQueue
|
||||
): CameraCaptureSession =
|
||||
suspendCancellableCoroutine { continuation ->
|
||||
val characteristics = cameraManager.getCameraCharacteristics(id)
|
||||
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
|
||||
val sessionId = sessionId++
|
||||
Log.i(TAG, "Camera $id: Creating Capture Session #$sessionId... " +
|
||||
"Hardware Level: $hardwareLevel} | Outputs: $outputs")
|
||||
Log.i(
|
||||
TAG,
|
||||
"Camera $id: Creating Capture Session #$sessionId... " +
|
||||
"Hardware Level: $hardwareLevel} | Outputs: $outputs"
|
||||
)
|
||||
|
||||
val callback = object: CameraCaptureSession.StateCallback() {
|
||||
val callback = object : CameraCaptureSession.StateCallback() {
|
||||
override fun onConfigured(session: CameraCaptureSession) {
|
||||
Log.i(TAG, "Camera $id: Capture Session #$sessionId configured!")
|
||||
continuation.resume(session)
|
||||
@@ -78,4 +82,3 @@ suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
|
||||
this.createCaptureSessionByOutputConfigurations(outputConfigurations, callback, queue.handler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -23,14 +23,16 @@ private fun supportsSnapshotCapture(cameraCharacteristics: CameraCharacteristics
|
||||
return true
|
||||
}
|
||||
|
||||
fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager,
|
||||
surface: Surface,
|
||||
zoom: Float,
|
||||
qualityPrioritization: QualityPrioritization,
|
||||
flashMode: Flash,
|
||||
enableRedEyeReduction: Boolean,
|
||||
enableAutoStabilization: Boolean,
|
||||
orientation: Orientation): CaptureRequest {
|
||||
fun CameraDevice.createPhotoCaptureRequest(
|
||||
cameraManager: CameraManager,
|
||||
surface: Surface,
|
||||
zoom: Float,
|
||||
qualityPrioritization: QualityPrioritization,
|
||||
flashMode: Flash,
|
||||
enableRedEyeReduction: Boolean,
|
||||
enableAutoStabilization: Boolean,
|
||||
orientation: Orientation
|
||||
): CaptureRequest {
|
||||
val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id)
|
||||
|
||||
val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) {
|
||||
|
@@ -9,20 +9,22 @@ import com.mrousavy.camera.CameraCannotBeOpenedError
|
||||
import com.mrousavy.camera.CameraDisconnectedError
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.parsers.CameraDeviceError
|
||||
import kotlinx.coroutines.suspendCancellableCoroutine
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlinx.coroutines.suspendCancellableCoroutine
|
||||
|
||||
private const val TAG = "CameraManager"
|
||||
|
||||
@SuppressLint("MissingPermission")
|
||||
suspend fun CameraManager.openCamera(cameraId: String,
|
||||
onDisconnected: (camera: CameraDevice, reason: Throwable) -> Unit,
|
||||
queue: CameraQueues.CameraQueue): CameraDevice {
|
||||
return suspendCancellableCoroutine { continuation ->
|
||||
suspend fun CameraManager.openCamera(
|
||||
cameraId: String,
|
||||
onDisconnected: (camera: CameraDevice, reason: Throwable) -> Unit,
|
||||
queue: CameraQueues.CameraQueue
|
||||
): CameraDevice =
|
||||
suspendCancellableCoroutine { continuation ->
|
||||
Log.i(TAG, "Camera $cameraId: Opening...")
|
||||
|
||||
val callback = object: CameraDevice.StateCallback() {
|
||||
val callback = object : CameraDevice.StateCallback() {
|
||||
override fun onOpened(camera: CameraDevice) {
|
||||
Log.i(TAG, "Camera $cameraId: Opened!")
|
||||
continuation.resume(camera)
|
||||
@@ -56,4 +58,3 @@ suspend fun CameraManager.openCamera(cameraId: String,
|
||||
this.openCamera(cameraId, callback, queue.handler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,3 @@
|
||||
package com.mrousavy.camera.extensions
|
||||
|
||||
fun <T> List<T>.containsAny(elements: List<T>): Boolean {
|
||||
return elements.any { element -> this.contains(element) }
|
||||
}
|
||||
fun <T> List<T>.containsAny(elements: List<T>): Boolean = elements.any { element -> this.contains(element) }
|
||||
|
@@ -7,23 +7,21 @@ import kotlin.math.abs
|
||||
import kotlin.math.max
|
||||
import kotlin.math.min
|
||||
|
||||
fun List<Size>.closestToOrMax(size: Size?): Size {
|
||||
return if (size != null) {
|
||||
fun List<Size>.closestToOrMax(size: Size?): Size =
|
||||
if (size != null) {
|
||||
this.minBy { abs(it.width - size.width) + abs(it.height - size.height) }
|
||||
} else {
|
||||
this.maxBy { it.width * it.height }
|
||||
}
|
||||
}
|
||||
|
||||
fun Size.rotated(surfaceRotation: Int): Size {
|
||||
return when (surfaceRotation) {
|
||||
fun Size.rotated(surfaceRotation: Int): Size =
|
||||
when (surfaceRotation) {
|
||||
Surface.ROTATION_0 -> Size(width, height)
|
||||
Surface.ROTATION_90 -> Size(height, width)
|
||||
Surface.ROTATION_180 -> Size(width, height)
|
||||
Surface.ROTATION_270 -> Size(height, width)
|
||||
else -> Size(width, height)
|
||||
}
|
||||
}
|
||||
|
||||
val Size.bigger: Int
|
||||
get() = max(width, height)
|
||||
@@ -35,7 +33,4 @@ val SizeF.bigger: Float
|
||||
val SizeF.smaller: Float
|
||||
get() = min(this.width, this.height)
|
||||
|
||||
operator fun Size.compareTo(other: Size): Int {
|
||||
return (this.width * this.height).compareTo(other.width * other.height)
|
||||
}
|
||||
|
||||
operator fun Size.compareTo(other: Size): Int = (this.width * this.height).compareTo(other.width * other.height)
|
||||
|
@@ -26,6 +26,7 @@ class VisionCameraProxy(context: ReactApplicationContext) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@DoNotStrip
|
||||
@Keep
|
||||
private var mHybridData: HybridData
|
||||
@@ -45,7 +46,7 @@ class VisionCameraProxy(context: ReactApplicationContext) {
|
||||
Log.d(TAG, "Finding view $viewId...")
|
||||
val ctx = mContext.get()
|
||||
val view = if (ctx != null) UIManagerHelper.getUIManager(ctx, viewId)?.resolveView(viewId) as CameraView? else null
|
||||
Log.d(TAG, if (view != null) "Found view $viewId!" else "Couldn't find view $viewId!")
|
||||
Log.d(TAG, if (view != null) "Found view $viewId!" else "Couldn't find view $viewId!")
|
||||
return view ?: throw ViewNotFoundError(viewId)
|
||||
}
|
||||
|
||||
@@ -69,12 +70,9 @@ class VisionCameraProxy(context: ReactApplicationContext) {
|
||||
|
||||
@DoNotStrip
|
||||
@Keep
|
||||
fun getFrameProcessorPlugin(name: String, options: Map<String, Any>): FrameProcessorPlugin {
|
||||
return FrameProcessorPluginRegistry.getPlugin(name, options)
|
||||
}
|
||||
fun getFrameProcessorPlugin(name: String, options: Map<String, Any>): FrameProcessorPlugin =
|
||||
FrameProcessorPluginRegistry.getPlugin(name, options)
|
||||
|
||||
// private C++ funcs
|
||||
private external fun initHybrid(jsContext: Long,
|
||||
jsCallInvokerHolder: CallInvokerHolderImpl,
|
||||
scheduler: VisionCameraScheduler): HybridData
|
||||
private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import android.hardware.camera2.CameraDevice
|
||||
|
||||
enum class CameraDeviceError(override val unionValue: String): JSUnionValue {
|
||||
enum class CameraDeviceError(override val unionValue: String) : JSUnionValue {
|
||||
CAMERA_ALREADY_IN_USE("camera-already-in-use"),
|
||||
TOO_MANY_OPEN_CAMERAS("too-many-open-cameras"),
|
||||
CAMERA_IS_DISABLED_BY_ANDROID("camera-is-disabled-by-android"),
|
||||
@@ -11,8 +11,8 @@ enum class CameraDeviceError(override val unionValue: String): JSUnionValue {
|
||||
DISCONNECTED("camera-has-been-disconnected");
|
||||
|
||||
companion object {
|
||||
fun fromCameraDeviceError(cameraDeviceError: Int): CameraDeviceError {
|
||||
return when (cameraDeviceError) {
|
||||
fun fromCameraDeviceError(cameraDeviceError: Int): CameraDeviceError =
|
||||
when (cameraDeviceError) {
|
||||
CameraDevice.StateCallback.ERROR_CAMERA_IN_USE -> CAMERA_ALREADY_IN_USE
|
||||
CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE -> TOO_MANY_OPEN_CAMERAS
|
||||
CameraDevice.StateCallback.ERROR_CAMERA_DISABLED -> CAMERA_IS_DISABLED_BY_ANDROID
|
||||
@@ -20,6 +20,5 @@ enum class CameraDeviceError(override val unionValue: String): JSUnionValue {
|
||||
CameraDevice.StateCallback.ERROR_CAMERA_SERVICE -> UNKNOWN_FATAL_CAMERA_SERVICE_ERROR
|
||||
else -> UNKNOWN_CAMERA_DEVICE_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,19 +2,18 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import com.mrousavy.camera.InvalidTypeScriptUnionError
|
||||
|
||||
enum class Flash(override val unionValue: String): JSUnionValue {
|
||||
enum class Flash(override val unionValue: String) : JSUnionValue {
|
||||
OFF("off"),
|
||||
ON("on"),
|
||||
AUTO("auto");
|
||||
|
||||
companion object: JSUnionValue.Companion<Flash> {
|
||||
override fun fromUnionValue(unionValue: String?): Flash {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<Flash> {
|
||||
override fun fromUnionValue(unionValue: String?): Flash =
|
||||
when (unionValue) {
|
||||
"off" -> OFF
|
||||
"on" -> ON
|
||||
"auto" -> AUTO
|
||||
else -> throw InvalidTypeScriptUnionError("flash", unionValue ?: "(null)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,16 +2,16 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import android.hardware.camera2.CameraCharacteristics
|
||||
|
||||
enum class HardwareLevel(override val unionValue: String): JSUnionValue {
|
||||
enum class HardwareLevel(override val unionValue: String) : JSUnionValue {
|
||||
LEGACY("legacy"),
|
||||
LIMITED("limited"),
|
||||
EXTERNAL("external"),
|
||||
EXTERNAL("limited"),
|
||||
FULL("full"),
|
||||
LEVEL_3("level-3");
|
||||
LEVEL_3("full");
|
||||
|
||||
companion object {
|
||||
fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel {
|
||||
return when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) {
|
||||
fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel =
|
||||
when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) {
|
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY -> LEGACY
|
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED -> LIMITED
|
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL -> EXTERNAL
|
||||
@@ -19,6 +19,5 @@ enum class HardwareLevel(override val unionValue: String): JSUnionValue {
|
||||
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3 -> LEVEL_3
|
||||
else -> LEGACY
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,19 +2,18 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import android.hardware.camera2.CameraCharacteristics
|
||||
|
||||
enum class LensFacing(override val unionValue: String): JSUnionValue {
|
||||
enum class LensFacing(override val unionValue: String) : JSUnionValue {
|
||||
BACK("back"),
|
||||
FRONT("front"),
|
||||
EXTERNAL("external");
|
||||
|
||||
companion object {
|
||||
fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): LensFacing {
|
||||
return when (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)!!) {
|
||||
fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): LensFacing =
|
||||
when (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)!!) {
|
||||
CameraCharacteristics.LENS_FACING_BACK -> BACK
|
||||
CameraCharacteristics.LENS_FACING_FRONT -> FRONT
|
||||
CameraCharacteristics.LENS_FACING_EXTERNAL -> EXTERNAL
|
||||
else -> EXTERNAL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,20 +2,19 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import android.hardware.camera2.CameraCharacteristics
|
||||
|
||||
enum class Orientation(override val unionValue: String): JSUnionValue {
|
||||
enum class Orientation(override val unionValue: String) : JSUnionValue {
|
||||
PORTRAIT("portrait"),
|
||||
LANDSCAPE_RIGHT("landscape-right"),
|
||||
PORTRAIT_UPSIDE_DOWN("portrait-upside-down"),
|
||||
LANDSCAPE_LEFT("landscape-left");
|
||||
|
||||
fun toDegrees(): Int {
|
||||
return when(this) {
|
||||
fun toDegrees(): Int =
|
||||
when (this) {
|
||||
PORTRAIT -> 0
|
||||
LANDSCAPE_RIGHT -> 90
|
||||
PORTRAIT_UPSIDE_DOWN -> 180
|
||||
LANDSCAPE_LEFT -> 270
|
||||
}
|
||||
}
|
||||
|
||||
fun toSensorRelativeOrientation(cameraCharacteristics: CameraCharacteristics): Orientation {
|
||||
val sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
|
||||
@@ -33,24 +32,22 @@ enum class Orientation(override val unionValue: String): JSUnionValue {
|
||||
return fromRotationDegrees(newRotationDegrees)
|
||||
}
|
||||
|
||||
companion object: JSUnionValue.Companion<Orientation> {
|
||||
override fun fromUnionValue(unionValue: String?): Orientation? {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<Orientation> {
|
||||
override fun fromUnionValue(unionValue: String?): Orientation? =
|
||||
when (unionValue) {
|
||||
"portrait" -> PORTRAIT
|
||||
"landscape-right" -> LANDSCAPE_RIGHT
|
||||
"portrait-upside-down" -> PORTRAIT_UPSIDE_DOWN
|
||||
"landscape-left" -> LANDSCAPE_LEFT
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
fun fromRotationDegrees(rotationDegrees: Int): Orientation {
|
||||
return when (rotationDegrees) {
|
||||
fun fromRotationDegrees(rotationDegrees: Int): Orientation =
|
||||
when (rotationDegrees) {
|
||||
in 45..135 -> LANDSCAPE_RIGHT
|
||||
in 135..225 -> PORTRAIT_UPSIDE_DOWN
|
||||
in 225..315 -> LANDSCAPE_LEFT
|
||||
else -> PORTRAIT
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,18 +2,17 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import android.content.pm.PackageManager
|
||||
|
||||
enum class PermissionStatus(override val unionValue: String): JSUnionValue {
|
||||
enum class PermissionStatus(override val unionValue: String) : JSUnionValue {
|
||||
DENIED("denied"),
|
||||
NOT_DETERMINED("not-determined"),
|
||||
GRANTED("granted");
|
||||
|
||||
companion object {
|
||||
fun fromPermissionStatus(status: Int): PermissionStatus {
|
||||
return when (status) {
|
||||
fun fromPermissionStatus(status: Int): PermissionStatus =
|
||||
when (status) {
|
||||
PackageManager.PERMISSION_DENIED -> DENIED
|
||||
PackageManager.PERMISSION_GRANTED -> GRANTED
|
||||
else -> NOT_DETERMINED
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -4,7 +4,7 @@ import android.graphics.ImageFormat
|
||||
import com.mrousavy.camera.PixelFormatNotSupportedError
|
||||
|
||||
@Suppress("FoldInitializerAndIfToElvis")
|
||||
enum class PixelFormat(override val unionValue: String): JSUnionValue {
|
||||
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
|
||||
YUV("yuv"),
|
||||
RGB("rgb"),
|
||||
DNG("dng"),
|
||||
@@ -25,19 +25,18 @@ enum class PixelFormat(override val unionValue: String): JSUnionValue {
|
||||
return result
|
||||
}
|
||||
|
||||
companion object: JSUnionValue.Companion<PixelFormat> {
|
||||
fun fromImageFormat(imageFormat: Int): PixelFormat {
|
||||
return when (imageFormat) {
|
||||
companion object : JSUnionValue.Companion<PixelFormat> {
|
||||
fun fromImageFormat(imageFormat: Int): PixelFormat =
|
||||
when (imageFormat) {
|
||||
ImageFormat.YUV_420_888 -> YUV
|
||||
ImageFormat.JPEG, ImageFormat.DEPTH_JPEG -> RGB
|
||||
ImageFormat.RAW_SENSOR -> DNG
|
||||
ImageFormat.PRIVATE -> NATIVE
|
||||
else -> UNKNOWN
|
||||
}
|
||||
}
|
||||
|
||||
override fun fromUnionValue(unionValue: String?): PixelFormat? {
|
||||
return when (unionValue) {
|
||||
override fun fromUnionValue(unionValue: String?): PixelFormat? =
|
||||
when (unionValue) {
|
||||
"yuv" -> YUV
|
||||
"rgb" -> RGB
|
||||
"dng" -> DNG
|
||||
@@ -45,6 +44,5 @@ enum class PixelFormat(override val unionValue: String): JSUnionValue {
|
||||
"unknown" -> UNKNOWN
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,18 +1,17 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
enum class QualityPrioritization(override val unionValue: String): JSUnionValue {
|
||||
enum class QualityPrioritization(override val unionValue: String) : JSUnionValue {
|
||||
SPEED("speed"),
|
||||
BALANCED("balanced"),
|
||||
QUALITY("quality");
|
||||
|
||||
companion object: JSUnionValue.Companion<QualityPrioritization> {
|
||||
override fun fromUnionValue(unionValue: String?): QualityPrioritization {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<QualityPrioritization> {
|
||||
override fun fromUnionValue(unionValue: String?): QualityPrioritization =
|
||||
when (unionValue) {
|
||||
"speed" -> SPEED
|
||||
"balanced" -> BALANCED
|
||||
"quality" -> QUALITY
|
||||
else -> BALANCED
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,16 +1,15 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
enum class Torch(override val unionValue: String): JSUnionValue {
|
||||
enum class Torch(override val unionValue: String) : JSUnionValue {
|
||||
OFF("off"),
|
||||
ON("on");
|
||||
|
||||
companion object: JSUnionValue.Companion<Torch> {
|
||||
override fun fromUnionValue(unionValue: String?): Torch {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<Torch> {
|
||||
override fun fromUnionValue(unionValue: String?): Torch =
|
||||
when (unionValue) {
|
||||
"off" -> OFF
|
||||
"on" -> ON
|
||||
else -> OFF
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,24 +2,22 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import android.media.MediaRecorder
|
||||
|
||||
enum class VideoCodec(override val unionValue: String): JSUnionValue {
|
||||
enum class VideoCodec(override val unionValue: String) : JSUnionValue {
|
||||
H264("h264"),
|
||||
H265("h265");
|
||||
|
||||
fun toVideoCodec(): Int {
|
||||
return when (this) {
|
||||
fun toVideoCodec(): Int =
|
||||
when (this) {
|
||||
H264 -> MediaRecorder.VideoEncoder.H264
|
||||
H265 -> MediaRecorder.VideoEncoder.HEVC
|
||||
}
|
||||
}
|
||||
|
||||
companion object: JSUnionValue.Companion<VideoCodec> {
|
||||
override fun fromUnionValue(unionValue: String?): VideoCodec {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<VideoCodec> {
|
||||
override fun fromUnionValue(unionValue: String?): VideoCodec =
|
||||
when (unionValue) {
|
||||
"h264" -> H264
|
||||
"h265" -> H265
|
||||
else -> H264
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,24 +2,22 @@ package com.mrousavy.camera.parsers
|
||||
|
||||
import com.mrousavy.camera.InvalidTypeScriptUnionError
|
||||
|
||||
enum class VideoFileType(override val unionValue: String): JSUnionValue {
|
||||
enum class VideoFileType(override val unionValue: String) : JSUnionValue {
|
||||
MOV("mov"),
|
||||
MP4("mp4");
|
||||
|
||||
fun toExtension(): String {
|
||||
return when (this) {
|
||||
fun toExtension(): String =
|
||||
when (this) {
|
||||
MOV -> ".mov"
|
||||
MP4 -> ".mp4"
|
||||
}
|
||||
}
|
||||
|
||||
companion object: JSUnionValue.Companion<VideoFileType> {
|
||||
override fun fromUnionValue(unionValue: String?): VideoFileType {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<VideoFileType> {
|
||||
override fun fromUnionValue(unionValue: String?): VideoFileType =
|
||||
when (unionValue) {
|
||||
"mov" -> MOV
|
||||
"mp4" -> MP4
|
||||
else -> throw InvalidTypeScriptUnionError("fileType", unionValue ?: "(null)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,54 +6,49 @@ import android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_
|
||||
import android.hardware.camera2.CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_OFF
|
||||
import android.hardware.camera2.CameraMetadata.LENS_OPTICAL_STABILIZATION_MODE_ON
|
||||
|
||||
enum class VideoStabilizationMode(override val unionValue: String): JSUnionValue {
|
||||
enum class VideoStabilizationMode(override val unionValue: String) : JSUnionValue {
|
||||
OFF("off"),
|
||||
STANDARD("standard"),
|
||||
CINEMATIC("cinematic"),
|
||||
CINEMATIC_EXTENDED("cinematic-extended");
|
||||
|
||||
fun toDigitalStabilizationMode(): Int {
|
||||
return when (this) {
|
||||
fun toDigitalStabilizationMode(): Int =
|
||||
when (this) {
|
||||
OFF -> CONTROL_VIDEO_STABILIZATION_MODE_OFF
|
||||
STANDARD -> CONTROL_VIDEO_STABILIZATION_MODE_ON
|
||||
CINEMATIC -> 2 /* CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION */
|
||||
CINEMATIC -> 2 // TODO: CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION
|
||||
else -> CONTROL_VIDEO_STABILIZATION_MODE_OFF
|
||||
}
|
||||
}
|
||||
|
||||
fun toOpticalStabilizationMode(): Int {
|
||||
return when (this) {
|
||||
fun toOpticalStabilizationMode(): Int =
|
||||
when (this) {
|
||||
OFF -> LENS_OPTICAL_STABILIZATION_MODE_OFF
|
||||
CINEMATIC_EXTENDED -> LENS_OPTICAL_STABILIZATION_MODE_ON
|
||||
else -> LENS_OPTICAL_STABILIZATION_MODE_OFF
|
||||
}
|
||||
}
|
||||
|
||||
companion object: JSUnionValue.Companion<VideoStabilizationMode> {
|
||||
override fun fromUnionValue(unionValue: String?): VideoStabilizationMode? {
|
||||
return when (unionValue) {
|
||||
companion object : JSUnionValue.Companion<VideoStabilizationMode> {
|
||||
override fun fromUnionValue(unionValue: String?): VideoStabilizationMode? =
|
||||
when (unionValue) {
|
||||
"off" -> OFF
|
||||
"standard" -> STANDARD
|
||||
"cinematic" -> CINEMATIC
|
||||
"cinematic-extended" -> CINEMATIC_EXTENDED
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
fun fromDigitalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode {
|
||||
return when (stabiliazionMode) {
|
||||
fun fromDigitalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode =
|
||||
when (stabiliazionMode) {
|
||||
CONTROL_VIDEO_STABILIZATION_MODE_OFF -> OFF
|
||||
CONTROL_VIDEO_STABILIZATION_MODE_ON -> STANDARD
|
||||
CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION -> CINEMATIC
|
||||
else -> OFF
|
||||
}
|
||||
}
|
||||
fun fromOpticalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode {
|
||||
return when (stabiliazionMode) {
|
||||
fun fromOpticalVideoStabilizationMode(stabiliazionMode: Int): VideoStabilizationMode =
|
||||
when (stabiliazionMode) {
|
||||
LENS_OPTICAL_STABILIZATION_MODE_OFF -> OFF
|
||||
LENS_OPTICAL_STABILIZATION_MODE_ON -> CINEMATIC_EXTENDED
|
||||
else -> OFF
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user