Add Android

This commit is contained in:
Marc Rousavy 2021-02-19 16:28:14 +01:00
parent 00c8970366
commit 4d42be6436
28 changed files with 1742 additions and 29 deletions

View File

@ -1,17 +1,16 @@
package com.reactnativevisioncamera package com.cuvent.experiences.friends.camera
import com.facebook.react.ReactPackage import com.facebook.react.ReactPackage
import com.facebook.react.bridge.NativeModule import com.facebook.react.bridge.NativeModule
import com.facebook.react.bridge.ReactApplicationContext import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.uimanager.ViewManager import com.facebook.react.uimanager.ViewManager
class CameraPackage : ReactPackage {
class VisionCameraPackage : ReactPackage {
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> { override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> {
return listOf(VisionCameraModule(reactContext)) return listOf(CameraViewModule(reactContext))
} }
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> { override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> {
return emptyList() return listOf(CameraViewManager())
} }
} }

View File

@ -0,0 +1,26 @@
package com.cuvent.experiences.friends.camera
import androidx.camera.core.FocusMeteringAction
import androidx.camera.core.SurfaceOrientedMeteringPointFactory
import com.facebook.react.bridge.ReadableMap
import kotlinx.coroutines.guava.await
import java.util.concurrent.TimeUnit
suspend fun CameraView.focus(pointMap: ReadableMap) {
val cameraControl = camera?.cameraControl ?: throw CameraNotReadyError()
if (!pointMap.hasKey("x") || !pointMap.hasKey("y")) {
throw InvalidTypeScriptUnionError("point", pointMap.toString())
}
val dpi = resources.displayMetrics.density
val x = pointMap.getDouble("x") * dpi
val y = pointMap.getDouble("y") * dpi
val factory = SurfaceOrientedMeteringPointFactory(this.width.toFloat(), this.height.toFloat())
val point = factory.createPoint(x.toFloat(), y.toFloat())
val action = FocusMeteringAction.Builder(point, FocusMeteringAction.FLAG_AF or FocusMeteringAction.FLAG_AE)
.setAutoCancelDuration(5, TimeUnit.SECONDS) // auto-reset after 5 seconds
.build()
cameraControl.startFocusAndMetering(action).await()
}

View File

@ -0,0 +1,71 @@
package com.cuvent.experiences.friends.camera
import android.annotation.SuppressLint
import androidx.camera.core.VideoCapture
import com.cuvent.experiences.friends.camera.utils.makeErrorMap
import com.cuvent.experiences.friends.camera.utils.withSuspendablePromise
import com.facebook.react.bridge.*
import kotlinx.coroutines.*
import java.io.File
data class TemporaryFile(val path: String)
@SuppressLint("RestrictedApi")
suspend fun CameraView.startRecording(options: ReadableMap, onRecordCallback: Callback): TemporaryFile {
if (videoCapture == null) {
throw CameraNotReadyError()
}
if (options.hasKey("flash")) {
val enableFlash = options.getString("flash") == "on"
// overrides current torch mode value to enable flash while recording
camera!!.cameraControl.enableTorch(enableFlash)
}
@Suppress("BlockingMethodInNonBlockingContext") // in withContext we are not blocking. False positive.
val videoFile = withContext(Dispatchers.IO) {
File.createTempFile("video", ".mp4", context.cacheDir).apply { deleteOnExit() }
}
val videoFileOptions = VideoCapture.OutputFileOptions.Builder(videoFile)
videoCapture!!.startRecording(videoFileOptions.build(), recordVideoExecutor, object : VideoCapture.OnVideoSavedCallback {
override fun onVideoSaved(outputFileResults: VideoCapture.OutputFileResults) {
val map = Arguments.createMap()
map.putString("path", videoFile.absolutePath)
// TODO: duration and size
onRecordCallback(map, null)
// reset the torch mode
camera!!.cameraControl.enableTorch(torch == "on")
}
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
val error = when (videoCaptureError) {
VideoCapture.ERROR_ENCODER -> VideoEncoderError(message, cause)
VideoCapture.ERROR_FILE_IO -> FileIOError(message, cause)
VideoCapture.ERROR_INVALID_CAMERA -> InvalidCameraError(message, cause)
VideoCapture.ERROR_MUXER -> VideoMuxerError(message, cause)
VideoCapture.ERROR_RECORDING_IN_PROGRESS -> RecordingInProgressError(message, cause)
else -> UnknownCameraError(Error(message, cause))
}
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
onRecordCallback(null, map)
// reset the torch mode
camera!!.cameraControl.enableTorch(torch == "on")
}
})
return TemporaryFile(videoFile.absolutePath)
}
@SuppressLint("RestrictedApi")
fun CameraView.stopRecording() {
if (videoCapture == null) {
throw CameraNotReadyError()
}
videoCapture!!.stopRecording()
// reset torch mode to original value
camera!!.cameraControl.enableTorch(torch == "on")
}

View File

@ -0,0 +1,105 @@
package com.cuvent.experiences.friends.camera
import android.annotation.SuppressLint
import android.hardware.camera2.*
import android.util.Log
import androidx.camera.camera2.interop.Camera2CameraInfo
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageProxy
import androidx.exifinterface.media.ExifInterface
import com.cuvent.experiences.friends.camera.utils.*
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.WritableMap
import kotlinx.coroutines.*
import java.io.File
private const val TAG = "CameraView.performance"
@SuppressLint("UnsafeExperimentalUsageError")
suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope {
val startFunc = System.nanoTime()
Log.d(CameraView.REACT_CLASS, "takePhoto() called")
val imageCapture = imageCapture ?: throw CameraNotReadyError()
if (options.hasKey("photoCodec")) {
// TODO photoCodec
}
if (options.hasKey("qualityPrioritization")) {
// TODO qualityPrioritization
}
if (options.hasKey("flash")) {
val flashMode = options.getString("flash")
imageCapture.flashMode = when (flashMode) {
"on" -> ImageCapture.FLASH_MODE_ON
"off" -> ImageCapture.FLASH_MODE_OFF
"auto" -> ImageCapture.FLASH_MODE_AUTO
else -> throw InvalidTypeScriptUnionError("flash", flashMode ?: "(null)")
}
}
if (options.hasKey("enableAutoRedEyeReduction")) {
// TODO enableAutoRedEyeReduction
}
if (options.hasKey("enableDualCameraFusion")) {
// TODO enableDualCameraFusion
}
if (options.hasKey("enableVirtualDeviceFusion")) {
// TODO enableVirtualDeviceFusion
}
if (options.hasKey("enableAutoStabilization")) {
// TODO enableAutoStabilization
}
if (options.hasKey("enableAutoDistortionCorrection")) {
// TODO enableAutoDistortionCorrection
}
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
val camera2Info = Camera2CameraInfo.from(camera!!.cameraInfo)
val lensFacing = camera2Info.getCameraCharacteristic(CameraCharacteristics.LENS_FACING)
// TODO: Flip image if lens is front side
val results = awaitAll(
async(coroutineContext) {
Log.d(CameraView.REACT_CLASS, "Taking picture...")
val startCapture = System.nanoTime()
val pic = imageCapture.takePicture(takePhotoExecutor)
val endCapture = System.nanoTime()
Log.d(TAG, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms")
pic
},
async(Dispatchers.IO) {
Log.d(CameraView.REACT_CLASS, "Creating temp file...")
File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
})
val photo = results.first { it is ImageProxy } as ImageProxy
val file = results.first { it is File } as File
val exif: ExifInterface?
@Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) {
Log.d(CameraView.REACT_CLASS, "Saving picture to ${file.absolutePath}...")
val startSave = System.nanoTime()
photo.save(file, lensFacing == CameraCharacteristics.LENS_FACING_FRONT)
val endSave = System.nanoTime()
Log.d(TAG, "Finished image saving in ${(endSave - startSave) / 1_000_000}ms")
// TODO: Read Exif from existing in-memory photo buffer instead of file?
exif = if (skipMetadata) null else ExifInterface(file)
}
val map = Arguments.createMap()
map.putString("path", file.absolutePath)
map.putInt("width", photo.width)
map.putInt("height", photo.height)
map.putBoolean("isRawPhoto", photo.isRaw)
val metadata = exif?.buildMetadataMap()
map.putMap("metadata", metadata)
photo.close()
Log.d(CameraView.REACT_CLASS, "Finished taking photo!")
val endFunc = System.nanoTime()
Log.d(TAG, "Finished function execution in ${(endFunc - startFunc) / 1_000_000}ms")
return@coroutineScope map
}

View File

@ -0,0 +1,42 @@
package com.cuvent.experiences.friends.camera
import android.graphics.Bitmap
import androidx.exifinterface.media.ExifInterface
import com.cuvent.experiences.friends.camera.utils.buildMetadataMap
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.WritableMap
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.withContext
import java.io.File
import java.io.FileOutputStream
suspend fun CameraView.takeSnapshot(options: ReadableMap): WritableMap = coroutineScope {
val bitmap = this@takeSnapshot.previewView.bitmap ?: throw CameraNotReadyError()
val quality = if (options.hasKey("quality")) options.getInt("quality") else 100
val file: File
val exif: ExifInterface
@Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) {
file = File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
FileOutputStream(file).use { stream ->
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, stream)
}
exif = ExifInterface(file)
}
val map = Arguments.createMap()
map.putString("path", file.absolutePath)
map.putInt("width", bitmap.width)
map.putInt("height", bitmap.height)
map.putBoolean("isRawPhoto", false)
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
val metadata = if (skipMetadata) null else exif.buildMetadataMap()
map.putMap("metadata", metadata)
return@coroutineScope map
}

View File

@ -0,0 +1,415 @@
package com.cuvent.experiences.friends.camera
import android.Manifest
import android.annotation.SuppressLint
import android.content.Context
import android.content.pm.PackageManager
import android.hardware.camera2.*
import android.util.Log
import android.util.Range
import android.view.*
import android.view.View.OnTouchListener
import android.widget.FrameLayout
import androidx.camera.camera2.interop.Camera2Interop
import androidx.camera.core.*
import androidx.camera.core.impl.*
import androidx.camera.extensions.HdrImageCaptureExtender
import androidx.camera.extensions.HdrPreviewExtender
import androidx.camera.extensions.NightImageCaptureExtender
import androidx.camera.extensions.NightPreviewExtender
import androidx.camera.view.PreviewView
import androidx.core.content.ContextCompat
import androidx.lifecycle.*
import com.cuvent.experiences.friends.camera.utils.*
import com.facebook.react.bridge.*
import com.facebook.react.uimanager.events.RCTEventEmitter
import kotlinx.coroutines.*
import java.lang.IllegalArgumentException
import java.util.concurrent.Executors
import kotlin.math.max
import kotlin.math.min
// CaptureRequest.java
// TODO: CONTROL_AE_ANTIBANDING_MODE (60Hz, 50Hz lights)
// TODO: CONTROL_AE_MODE for RedEye reduction
// TODO: CONTROL_AE_TARGET_FPS_RANGE if FPS changes
// TODO: CONTROL_CAPTURE_INTENT for prioritization (Preview, Still Capture, Video recording)
// TODO: CONTROL_EFFECT_MODE for color filters/effects
// TODO: CONTROL_SCENE_MODE contains HDR, do I need that?
// TODO: CONTROL_VIDEO_STABILIZATION_MODE and STATISTICS_OIS_DATA_MODE for stabilization techniques
// TODO: CONTROL_ENABLE_ZSL for Zero Shutter Lag (speed prio)
// TODO: EDGE_MODE not sure what that is
// TODO: JPEG_xxx other JPEG options
// TODO: NOISE_REDUCTION_MODE also maybe ZSL
// TODO: SCALER_CROP_REGION for digital zoom
// TODO: SENSOR_FRAME_DURATION for FPS
//
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
//
// CameraView
// TODO: Configurable FPS higher than 30
// TODO: High-speed video recordings (export in CameraViewModule::getAvailableVideoDevices(), and set in CameraView::configurePreview()) (120FPS+)
// TODO: configureSession() Use format (photoWidth/photoHeight)
// TODO: configureSession() enableDepthData
// TODO: configureSession() enableHighResolutionCapture
// TODO: configureSession() enablePortraitEffectsMatteDelivery
// TODO: configureSession() scannableCodes | onCodeScanned
// TODO: configureSession() colorSpace
// CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
// TODO: videoStabilizationMode
// TODO: Video HDR
// CameraView+TakePhoto
// TODO: takePhoto() depth data
// TODO: takePhoto() raw capture
// TODO: takePhoto() photoCodec ("hevc" | "jpeg" | "raw")
// TODO: takePhoto() qualityPrioritization
// TODO: takePhoto() enableAutoRedEyeReduction
// TODO: takePhoto() enableVirtualDeviceFusion
// TODO: takePhoto() enableAutoStabilization
// TODO: takePhoto() enableAutoDistortionCorrection
// TODO: takePhoto() return with jsi::Value Image reference for faster capture
@SuppressLint("ClickableViewAccessibility") // suppresses the warning that the pinch to zoom gesture is not accessible
class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
// react properties
// props that require reconfiguring
var cameraId: String? = null // this is actually not a react prop directly, but the result of setting device={}
var enableDepthData = false
var enableHighResolutionCapture: Boolean? = null
var enablePortraitEffectsMatteDelivery = false
var scannableCodes: ReadableArray? = null
// props that require format reconfiguring
var format: ReadableMap? = null
var fps: Int? = null
var hdr: Boolean? = null // nullable bool
var colorSpace: String? = null
var lowLightBoost: Boolean? = null // nullable bool
// other props
var isActive = false
var torch = "off"
var zoom = 0.0 // in percent
var enableZoomGesture = false
// private properties
private val reactContext: ReactContext
get() = context as ReactContext
internal val previewView: PreviewView
private val cameraExecutor = Executors.newSingleThreadExecutor()
internal val takePhotoExecutor = Executors.newSingleThreadExecutor()
internal val recordVideoExecutor = Executors.newSingleThreadExecutor()
internal var camera: Camera? = null
internal var imageCapture: ImageCapture? = null
internal var videoCapture: VideoCapture? = null
private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
private val scaleGestureDetector: ScaleGestureDetector
private val touchEventListener: OnTouchListener
private val lifecycleRegistry: LifecycleRegistry
private var hostLifecycleState: Lifecycle.State
private var minZoom: Float = 1f
private var maxZoom: Float = 1f
init {
previewView = PreviewView(context)
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
previewView.installHierarchyFitter() // If this is not called correctly, view finder will be black/blank
addView(previewView)
scaleGestureListener = object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
override fun onScale(detector: ScaleGestureDetector): Boolean {
zoom = min(max(((zoom + 1) * detector.scaleFactor) - 1, 0.0), 1.0)
update(arrayListOf("zoom"))
return true
}
}
scaleGestureDetector = ScaleGestureDetector(context, scaleGestureListener)
touchEventListener = OnTouchListener { _, event -> return@OnTouchListener scaleGestureDetector.onTouchEvent(event) }
hostLifecycleState = Lifecycle.State.INITIALIZED
lifecycleRegistry = LifecycleRegistry(this)
reactContext.addLifecycleEventListener(object : LifecycleEventListener {
override fun onHostResume() {
hostLifecycleState = Lifecycle.State.RESUMED
updateLifecycleState()
}
override fun onHostPause() {
hostLifecycleState = Lifecycle.State.CREATED
updateLifecycleState()
}
override fun onHostDestroy() {
hostLifecycleState = Lifecycle.State.DESTROYED
updateLifecycleState()
cameraExecutor.shutdown()
takePhotoExecutor.shutdown()
recordVideoExecutor.shutdown()
}
})
}
override fun getLifecycle(): Lifecycle {
return lifecycleRegistry
}
/**
* Updates the custom Lifecycle to match the host activity's lifecycle, and if it's active we narrow it down to the [isActive] and [isAttachedToWindow] fields.
*/
private fun updateLifecycleState() {
val lifecycleBefore = lifecycleRegistry.currentState
if (hostLifecycleState == Lifecycle.State.RESUMED) {
// Host Lifecycle (Activity) is currently active (RESUMED), so we narrow it down to the view's lifecycle
if (isActive && isAttachedToWindow) {
lifecycleRegistry.currentState = Lifecycle.State.RESUMED
} else {
lifecycleRegistry.currentState = Lifecycle.State.CREATED
}
} else {
// Host Lifecycle (Activity) is currently inactive (STARTED or DESTROYED), so that overrules our view's lifecycle
lifecycleRegistry.currentState = hostLifecycleState
}
Log.d(REACT_CLASS, "Lifecycle went from ${lifecycleBefore.name} -> ${lifecycleRegistry.currentState.name} (isActive: $isActive | isAttachedToWindow: $isAttachedToWindow)")
}
override fun onAttachedToWindow() {
super.onAttachedToWindow()
updateLifecycleState()
}
override fun onDetachedFromWindow() {
super.onDetachedFromWindow()
updateLifecycleState()
}
/**
* Invalidate all React Props and reconfigure the device
*/
fun update(changedProps: ArrayList<String>) = GlobalScope.launch(Dispatchers.Main) {
try {
val shouldReconfigureSession = changedProps.containsAny(propsThatRequireSessionReconfiguration)
val shouldReconfigureZoom = shouldReconfigureSession || changedProps.contains("zoom")
val shouldReconfigureTorch = shouldReconfigureSession || changedProps.contains("torch")
if (changedProps.contains("isActive")) {
updateLifecycleState()
}
if (shouldReconfigureSession) {
configureSession()
}
if (shouldReconfigureZoom) {
val scaled = (zoom.toFloat() * (maxZoom - minZoom)) + minZoom
camera!!.cameraControl.setZoomRatio(scaled)
}
if (shouldReconfigureTorch) {
camera!!.cameraControl.enableTorch(torch == "on")
}
if (changedProps.contains("enableZoomGesture")) {
setOnTouchListener(if (enableZoomGesture) touchEventListener else null)
}
} catch (e: CameraError) {
invokeOnError(e)
}
}
/**
* Configures the camera capture session. This should only be called when the camera device changes.
*/
@SuppressLint("UnsafeExperimentalUsageError", "RestrictedApi")
private suspend fun configureSession() {
try {
Log.d(REACT_CLASS, "Configuring session...")
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
throw MicrophonePermissionError()
}
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
throw CameraPermissionError()
}
if (cameraId == null) {
throw NoCameraDeviceError()
}
if (format != null)
Log.d(REACT_CLASS, "Configuring session with Camera ID $cameraId and custom format...")
else
Log.d(REACT_CLASS, "Configuring session with Camera ID $cameraId and default format options...")
// Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider = getCameraProvider(context)
val cameraSelector = CameraSelector.Builder().byID(cameraId!!).build()
val rotation = previewView.display.rotation
val aspectRatio = aspectRatio(previewView.width, previewView.height)
val previewBuilder = Preview.Builder()
.setTargetAspectRatio(aspectRatio)
.setTargetRotation(rotation)
val imageCaptureBuilder = ImageCapture.Builder()
.setTargetAspectRatio(aspectRatio)
.setTargetRotation(rotation)
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
val videoCaptureBuilder = VideoCapture.Builder()
.setTargetAspectRatio(aspectRatio)
.setTargetRotation(rotation)
if (format != null) {
// User has selected a custom format={}. Use that
val format = DeviceFormat(format!!)
// The format (exported in CameraViewModule) specifies the resolution in ROTATION_90 (horizontal)
val rotationRelativeToFormat = rotation - 1 // subtract one, so that ROTATION_90 becomes ROTATION_0 and so on
fps?.let { fps ->
if (format.frameRateRanges.any { it.contains(fps) }) {
// Camera supports the given FPS (frame rate range)
val frameDuration = (1.0 / fps.toDouble()).toLong() * 1_000_000_000
Log.d(REACT_CLASS, "Setting AE_TARGET_FPS_RANGE to $fps-$fps, and SENSOR_FRAME_DURATION to $frameDuration")
Camera2Interop.Extender(previewBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
Camera2Interop.Extender(videoCaptureBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
} else {
throw FpsNotContainedInFormatError(fps)
}
}
hdr?.let { hdr ->
// Enable HDR scene mode if set
if (hdr) {
val imageExtension = HdrImageCaptureExtender.create(imageCaptureBuilder)
val previewExtension = HdrPreviewExtender.create(previewBuilder)
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
previewExtension.isExtensionAvailable(cameraSelector)
if (isExtensionAvailable) {
Log.d(REACT_CLASS, "Enabling native HDR extension...")
imageExtension.enableExtension(cameraSelector)
previewExtension.enableExtension(cameraSelector)
} else {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
Log.d(REACT_CLASS, "Native HDR extension not available, falling back to CONTROL_SCENE_MODE...")
Camera2Interop.Extender(previewBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR)
Camera2Interop.Extender(imageCaptureBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR)
Camera2Interop.Extender(videoCaptureBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR)
} else {
Log.d(REACT_CLASS, "Native HDR extension and CONTROL_SCENE_MODE_HDR not available!")
throw UnsupportedOSError("hdr", "true", "5.1 (Lollipop/22)")
}
}
}
}
lowLightBoost?.let { lowLightBoost ->
if (lowLightBoost) {
val imageExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
val previewExtension = NightPreviewExtender.create(previewBuilder)
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
previewExtension.isExtensionAvailable(cameraSelector)
if (isExtensionAvailable) {
Log.d(REACT_CLASS, "Enabling native night-mode extension...")
imageExtension.enableExtension(cameraSelector)
previewExtension.enableExtension(cameraSelector)
} else {
Log.d(REACT_CLASS, "Native night-mode extension not available, falling back to CONTROL_SCENE_MODE...")
Camera2Interop.Extender(previewBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT)
Camera2Interop.Extender(imageCaptureBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT)
Camera2Interop.Extender(videoCaptureBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT)
}
}
}
// TODO: qualityPrioritization for ImageCapture
imageCaptureBuilder.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
val photoResolution = format.photoSize.rotated(rotationRelativeToFormat)
// TODO: imageCaptureBuilder.setTargetResolution(photoResolution)
Log.d(REACT_CLASS, "Using Photo Capture resolution $photoResolution")
fps?.let { fps ->
Log.d(REACT_CLASS, "Setting video recording FPS to $fps")
videoCaptureBuilder.setVideoFrameRate(fps)
}
}
val preview = previewBuilder.build()
imageCapture = imageCaptureBuilder.build()
videoCapture = videoCaptureBuilder.build()
// Unbind use cases before rebinding
cameraProvider.unbindAll()
// Bind use cases to camera
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageCapture!!, videoCapture!!)
preview.setSurfaceProvider(previewView.surfaceProvider)
minZoom = camera!!.cameraInfo.zoomState.value?.minZoomRatio ?: 1f
maxZoom = camera!!.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f
Log.d(REACT_CLASS, "Session configured! Camera: ${camera!!}")
invokeOnInitialized()
} catch(exc: Throwable) {
throw when (exc) {
is CameraError -> exc
is IllegalArgumentException -> InvalidCameraDeviceError(exc)
else -> UnknownCameraError(exc)
}
}
}
fun getAvailablePhotoCodecs(): WritableArray {
// TODO
return Arguments.createArray()
}
fun getAvailableVideoCodecs(): WritableArray {
// TODO
return Arguments.createArray()
}
override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) {
super.onLayout(changed, left, top, right, bottom)
Log.i(REACT_CLASS, "onLayout($changed, $left, $top, $right, $bottom) was called! (Width: $width, Height: $height)")
}
private fun invokeOnInitialized() {
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraInitialized", null)
}
private fun invokeOnError(error: CameraError) {
val event = Arguments.createMap()
event.putString("code", error.code)
event.putString("message", error.message)
error.cause?.let { cause ->
event.putMap("cause", errorToMap(cause))
}
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event)
}
private fun errorToMap(error: Throwable): WritableMap {
val map = Arguments.createMap()
map.putString("message", error.message)
map.putString("stacktrace", error.stackTraceToString())
error.cause?.let { cause ->
map.putMap("cause", errorToMap(cause))
}
return map
}
companion object {
const val REACT_CLASS = "CameraView"
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost")
}
}

View File

@ -0,0 +1,156 @@
package com.cuvent.experiences.friends.camera
import android.util.Log
import com.facebook.react.bridge.ReactContext
import com.facebook.react.bridge.ReadableArray
import com.facebook.react.bridge.ReadableMap
import com.facebook.react.common.MapBuilder
import com.facebook.react.uimanager.SimpleViewManager
import com.facebook.react.uimanager.ThemedReactContext
import com.facebook.react.uimanager.annotations.ReactProp
import java.lang.ref.WeakReference
class CameraViewManager : SimpleViewManager<CameraView>() {
private fun addChangedPropToTransaction(view: CameraView, changedProp: String) {
if (cameraViewTransactions[view] == null) {
cameraViewTransactions[view] = ArrayList()
}
cameraViewTransactions[view]!!.add(changedProp)
}
@ReactProp(name = "cameraId")
fun setCameraId(view: CameraView, cameraId: String) {
if (view.cameraId != cameraId)
addChangedPropToTransaction(view, "cameraId")
view.cameraId = cameraId
}
@ReactProp(name = "enableDepthData")
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
if (view.enableDepthData != enableDepthData)
addChangedPropToTransaction(view, "enableDepthData")
view.enableDepthData = enableDepthData
}
@ReactProp(name = "enableHighResolutionCapture")
fun setEnableHighResolutionCapture(view: CameraView, enableHighResolutionCapture: Boolean?) {
if (view.enableHighResolutionCapture != enableHighResolutionCapture)
addChangedPropToTransaction(view, "enableHighResolutionCapture")
view.enableHighResolutionCapture = enableHighResolutionCapture
}
@ReactProp(name = "enablePortraitEffectsMatteDelivery")
fun setEnablePortraitEffectsMatteDelivery(view: CameraView, enablePortraitEffectsMatteDelivery: Boolean) {
if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery)
addChangedPropToTransaction(view, "enablePortraitEffectsMatteDelivery")
view.enablePortraitEffectsMatteDelivery = enablePortraitEffectsMatteDelivery
}
@ReactProp(name = "scannableCodes")
fun setScannableCodes(view: CameraView, scannableCodes: ReadableArray?) {
if (view.scannableCodes != scannableCodes)
addChangedPropToTransaction(view, "scannableCodes")
view.scannableCodes = scannableCodes
}
@ReactProp(name = "format")
fun setFormat(view: CameraView, format: ReadableMap?) {
if (view.format != format)
addChangedPropToTransaction(view, "format")
view.format = format
}
// We're treating -1 as "null" here, because when I make the fps parameter
// of type "Int?" the react bridge throws an error.
@ReactProp(name = "fps", defaultInt = -1)
fun setFps(view: CameraView, fps: Int) {
if (view.fps != fps)
addChangedPropToTransaction(view, "fps")
view.fps = if (fps > 0) fps else null
}
@ReactProp(name = "hdr")
fun setHdr(view: CameraView, hdr: Boolean?) {
if (view.hdr != hdr)
addChangedPropToTransaction(view, "hdr")
view.hdr = hdr
}
@ReactProp(name = "lowLightBoost")
fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) {
if (view.lowLightBoost != lowLightBoost)
addChangedPropToTransaction(view, "lowLightBoost")
view.lowLightBoost = lowLightBoost
}
@ReactProp(name = "colorSpace")
fun setColorSpace(view: CameraView, colorSpace: String?) {
if (view.colorSpace != colorSpace)
addChangedPropToTransaction(view, "colorSpace")
view.colorSpace = colorSpace
}
@ReactProp(name = "isActive")
fun setIsActive(view: CameraView, isActive: Boolean) {
if (view.isActive != isActive)
addChangedPropToTransaction(view, "isActive")
view.isActive = isActive
}
@ReactProp(name = "torch")
fun setTorch(view: CameraView, torch: String) {
if (view.torch != torch)
addChangedPropToTransaction(view, "torch")
// TODO: why THE FUCK is this not being called?
view.torch = torch
}
@ReactProp(name = "zoom")
fun setZoom(view: CameraView, zoom: Double) {
if (view.zoom != zoom)
addChangedPropToTransaction(view, "zoom")
// TODO: why THE FUCK is this not being called?
view.zoom = zoom
}
@ReactProp(name = "enableZoomGesture")
fun setEnableZoomGesture(view: CameraView, enableZoomGesture: Boolean) {
if (view.enableZoomGesture != enableZoomGesture)
addChangedPropToTransaction(view, "enableZoomGesture")
view.enableZoomGesture = enableZoomGesture
}
override fun onAfterUpdateTransaction(view: CameraView) {
super.onAfterUpdateTransaction(view)
val changedProps = cameraViewTransactions[view] ?: ArrayList()
view.update(changedProps)
cameraViewTransactions.remove(view)
}
public override fun createViewInstance(context: ThemedReactContext): CameraView {
return CameraView(context)
}
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? {
return MapBuilder.builder<String, Any>()
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.build()
}
override fun onDropViewInstance(view: CameraView) {
Log.d(REACT_CLASS, "onDropViewInstance() called!")
super.onDropViewInstance(view)
}
override fun getName(): String {
return REACT_CLASS
}
companion object {
const val REACT_CLASS = "CameraView"
val cameraViewTransactions: HashMap<CameraView, ArrayList<String>> = HashMap()
}
}

View File

@ -0,0 +1,312 @@
package com.cuvent.experiences.friends.camera
import android.Manifest
import android.content.Context
import android.content.pm.PackageManager
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraManager
import android.media.ImageReader
import android.media.MediaRecorder
import android.os.Build
import android.util.Log
import androidx.camera.core.CameraSelector
import androidx.camera.core.ImageCapture
import androidx.camera.core.MeteringPoint
import androidx.camera.core.MeteringPointFactory
import androidx.camera.extensions.HdrImageCaptureExtender
import androidx.camera.extensions.NightImageCaptureExtender
import androidx.core.content.ContextCompat
import com.cuvent.experiences.friends.camera.parsers.*
import com.cuvent.experiences.friends.camera.utils.*
import com.facebook.react.bridge.*
import com.facebook.react.modules.core.PermissionAwareActivity
import com.facebook.react.modules.core.PermissionListener
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
companion object {
const val REACT_CLASS = "CameraView"
var RequestCode = 10
fun parsePermissionStatus(status: Int): String {
return when(status) {
PackageManager.PERMISSION_DENIED -> "denied"
PackageManager.PERMISSION_GRANTED -> "authorized"
else -> "not-determined"
}
}
}
override fun getName(): String {
return REACT_CLASS
}
private fun findCameraView(id: Int): CameraView = reactApplicationContext.currentActivity?.findViewById(id) ?: throw ViewNotFoundError(id)
@ReactMethod
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
GlobalScope.launch(Dispatchers.Main) {
withPromise(promise) {
val view = findCameraView(viewTag)
view.takePhoto(options)
}
}
}
@ReactMethod
fun takeSnapshot(viewTag: Int, options: ReadableMap, promise: Promise) {
GlobalScope.launch(Dispatchers.Main) {
withPromise(promise) {
val view = findCameraView(viewTag)
view.takeSnapshot(options)
}
}
}
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
@ReactMethod(isBlockingSynchronousMethod = true)
fun startRecording(viewTag: Int, options: ReadableMap, onRecordCallback: Callback) {
GlobalScope.launch(Dispatchers.Main) {
val view = findCameraView(viewTag)
view.startRecording(options, onRecordCallback)
}
}
@ReactMethod
fun stopRecording(viewTag: Int, promise: Promise) {
withPromise(promise) {
val view = findCameraView(viewTag)
view.stopRecording()
return@withPromise null
}
}
@ReactMethod
fun focus(viewTag: Int, point: ReadableMap, promise: Promise) {
GlobalScope.launch(Dispatchers.Main) {
withPromise(promise) {
val view = findCameraView(viewTag)
view.focus(point)
return@withPromise null
}
}
}
@ReactMethod
fun getAvailableVideoCodecs(viewTag: Int, promise: Promise) {
withPromise(promise) {
val view = findCameraView(viewTag)
view.getAvailableVideoCodecs()
}
}
@ReactMethod
fun getAvailablePhotoCodecs(viewTag: Int, promise: Promise) {
withPromise(promise) {
val view = findCameraView(viewTag)
view.getAvailablePhotoCodecs()
}
}
// TODO: This uses the Camera2 API to list all characteristics of a camera device and therefore doesn't work with Camera1. Find a way to use CameraX for this
@ReactMethod
fun getAvailableCameraDevices(promise: Promise) {
withPromise(promise) {
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager
?: throw CameraManagerUnavailableError()
val cameraDevices: WritableArray = Arguments.createArray()
manager.cameraIdList.forEach loop@{ id ->
val cameraSelector = CameraSelector.Builder().byID(id).build()
// TODO: ImageCapture.Builder - I'm not setting the target resolution, does that matter?
val imageCaptureBuilder = ImageCapture.Builder()
val characteristics = manager.getCameraCharacteristics(id)
val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!!
// Check if the device is actually a "basic camera" (i.e. filter out depth-only sensors)
if (!capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE)) {
Log.d(REACT_CLASS, "Skipping Camera #${id} because it does not support backward compatible capabilities..")
return@loop
}
val isMultiCam = Build.VERSION.SDK_INT >= Build.VERSION_CODES.P &&
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)
val deviceTypes = characteristics.getDeviceTypes()
val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
val lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING)!!
val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)!!
val maxScalerZoom = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM)!!
val supportsDepthCapture = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)
val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE)
val stabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)!! // only digital, no optical
val zoomRange = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R)
characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
else null
val name = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P)
characteristics.get(CameraCharacteristics.INFO_VERSION)
else null
val fpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
val scenes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES)!!
var supportsHdr = false
var supportsLowLightBoost = false
try {
val hdrExtension = HdrImageCaptureExtender.create(imageCaptureBuilder)
supportsHdr = hdrExtension.isExtensionAvailable(cameraSelector)
|| (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP_MR1 && scenes.contains(CameraCharacteristics.CONTROL_SCENE_MODE_HDR))
val nightExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
supportsLowLightBoost = nightExtension.isExtensionAvailable(cameraSelector)
|| scenes.contains(CameraCharacteristics.CONTROL_SCENE_MODE_NIGHT)
} catch (e: Throwable) {
// error on checking availability. falls back to "false"
Log.e(REACT_CLASS, "Failed to check HDR/Night Mode extension availability.", e)
}
val fieldOfView = characteristics.getFieldOfView()
val map = Arguments.createMap()
val formats = Arguments.createArray()
map.putString("id", id)
map.putArray("devices", deviceTypes)
map.putString("position", parseLensFacing(lensFacing))
map.putString("name", name ?: "${parseLensFacing(lensFacing)} ($id)")
map.putBoolean("hasFlash", hasFlash)
map.putBoolean("hasTorch", hasFlash)
map.putBoolean("isMultiCam", isMultiCam)
map.putBoolean("supportsRawCapture", supportsRawCapture)
map.putBoolean("supportsDepthCapture", supportsDepthCapture)
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
if (zoomRange != null) {
map.putDouble("minZoom", zoomRange.lower.toDouble())
map.putDouble("maxZoom", zoomRange.upper.toDouble())
} else {
map.putDouble("minZoom", 1.0)
map.putDouble("maxZoom", maxScalerZoom.toDouble())
}
map.putDouble("neutralZoom", characteristics.neutralZoomPercent.toDouble())
val maxImageOutputSize = cameraConfig.getOutputSizes(ImageReader::class.java).maxByOrNull { it.width * it.height }!!
// TODO: Should I really check MediaRecorder::class instead of SurfaceView::class?
// Recording should always be done in the most efficient format, which is the format native to the camera framework
cameraConfig.getOutputSizes(MediaRecorder::class.java).forEach { size ->
val isHighestPhotoQualitySupported = areUltimatelyEqual(size, maxImageOutputSize)
// Get the number of seconds that each frame will take to process
val secondsPerFrame = cameraConfig.getOutputMinFrameDuration(MediaRecorder::class.java, size) / 1_000_000_000.0
val frameRateRanges = Arguments.createArray()
if (secondsPerFrame > 0) {
val fps = (1.0 / secondsPerFrame).toInt()
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", 1)
frameRateRange.putInt("maxFrameRate", fps)
frameRateRanges.pushMap(frameRateRange)
}
fpsRanges.forEach { range ->
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", range.lower)
frameRateRange.putInt("maxFrameRate", range.upper)
frameRateRanges.pushMap(frameRateRange)
}
// TODO Revisit getAvailableCameraDevices (colorSpaces, more than YUV?)
val colorSpaces = Arguments.createArray()
colorSpaces.pushString("yuv")
// TODO Revisit getAvailableCameraDevices (more accurate video stabilization modes)
val videoStabilizationModes = Arguments.createArray()
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_OFF))
videoStabilizationModes.pushString("off")
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) {
videoStabilizationModes.pushString("auto")
videoStabilizationModes.pushString("standard")
}
val format = Arguments.createMap()
format.putDouble("photoHeight", size.height.toDouble())
format.putDouble("photoWidth", size.width.toDouble())
format.putDouble("videoHeight", size.height.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoHeight == photoHeight?)
format.putDouble("videoWidth", size.width.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoWidth == photoWidth?)
format.putBoolean("isHighestPhotoQualitySupported", isHighestPhotoQualitySupported)
format.putInt("maxISO", isoRange?.upper)
format.putInt("minISO", isoRange?.lower)
format.putDouble("fieldOfView", fieldOfView) // TODO: Revisit getAvailableCameraDevices (is fieldOfView accurate?)
format.putDouble("maxZoom", (zoomRange?.upper ?: maxScalerZoom).toDouble())
format.putArray("colorSpaces", colorSpaces)
format.putBoolean("supportsVideoHDR", false) // TODO: supportsVideoHDR
format.putBoolean("supportsPhotoHDR", supportsHdr)
format.putArray("frameRateRanges", frameRateRanges)
format.putString("autoFocusSystem", "none") // TODO: Revisit getAvailableCameraDevices (autoFocusSystem) (CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES or CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)
format.putArray("videoStabilizationModes", videoStabilizationModes)
formats.pushMap(format)
}
map.putArray("formats", formats)
cameraDevices.pushMap(map)
}
return@withPromise cameraDevices
}
}
@ReactMethod
fun getCameraPermissionStatus(promise: Promise) {
val status = ContextCompat.checkSelfPermission(reactApplicationContext, Manifest.permission.CAMERA)
promise.resolve(parsePermissionStatus(status))
}
@ReactMethod
fun getMicrophonePermissionStatus(promise: Promise) {
val status = ContextCompat.checkSelfPermission(reactApplicationContext, Manifest.permission.RECORD_AUDIO)
promise.resolve(parsePermissionStatus(status))
}
@ReactMethod
fun requestCameraPermission(promise: Promise) {
val activity = reactApplicationContext.currentActivity
if (activity is PermissionAwareActivity) {
val currentRequestCode = RequestCode
RequestCode++
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
if (requestCode == currentRequestCode) {
val permissionStatus = grantResults[0]
promise.resolve(parsePermissionStatus(permissionStatus))
return@PermissionListener true
}
return@PermissionListener false
}
activity.requestPermissions(arrayOf(Manifest.permission.CAMERA), currentRequestCode, listener)
} else {
promise.reject("NO_ACTIVITY", "No PermissionAwareActivity was found! Make sure the app has launched before calling this function.")
}
}
@ReactMethod
fun requestMicrophonePermission(promise: Promise) {
val activity = reactApplicationContext.currentActivity
if (activity is PermissionAwareActivity) {
val currentRequestCode = RequestCode
RequestCode++
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
if (requestCode == currentRequestCode) {
val permissionStatus = grantResults[0]
promise.resolve(parsePermissionStatus(permissionStatus))
return@PermissionListener true
}
return@PermissionListener false
}
activity.requestPermissions(arrayOf(Manifest.permission.RECORD_AUDIO), currentRequestCode, listener)
} else {
promise.reject("NO_ACTIVITY", "No PermissionAwareActivity was found! Make sure the app has launched before calling this function.")
}
}
}

View File

@ -0,0 +1,61 @@
package com.cuvent.experiences.friends.camera
import android.graphics.ImageFormat
abstract class CameraError(
/**
* The domain of the error. Error domains are used to group errors.
*
* Example: "permission"
*/
val domain: String,
/**
* The id of the error. Errors are uniquely identified under a given domain.
*
* Example: "microphone-permission-denied"
*/
val id: String,
/**
* A detailed error description of "what went wrong".
*
* Example: "The microphone permission was denied!"
*/
message: String,
/**
* A throwable that caused this error.
*/
cause: Throwable? = null
): Throwable("[$domain/$id] $message", cause)
val CameraError.code: String
get() = "$domain/$id"
class MicrophonePermissionError: CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied!")
class CameraPermissionError: CameraError("permission", "camera-permission-denied", "The Camera permission was denied!")
class InvalidTypeScriptUnionError(unionName: String, unionValue: String): CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
class UnsupportedOSError(unionName: String, unionValue: String, supportedOnOS: String): CameraError("parameter", "unsupported-os", "The given value \"$unionValue\" could not be used for $unionName, as it is only available on Android $supportedOnOS and above!")
class NoCameraDeviceError: CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
class InvalidCameraDeviceError(cause: Throwable): CameraError("device", "invalid-device", "The given Camera device could not be found for use-case binding!", cause)
class FpsNotContainedInFormatError(fps: Int): CameraError("format", "invalid-fps", "The given FPS were not valid for the currently selected format. Make sure you select a format which `frameRateRanges` includes $fps FPS!")
class HdrNotContainedInFormatError(usesFallback: Boolean): CameraError("format", "invalid-hdr", "The currently selected format does not support HDR capture! " +
"Make sure you select a format which `frameRateRanges` includes `supportsPhotoHDR`! Using CONTROL_SCENE_MODE_HDR as fallback: $usesFallback")
class LowLightBoostNotContainedInFormatError(usesFallback: Boolean): CameraError("format", "invalid-low-light-boost", "The currently selected format does not support low-light boost (night mode)! " +
"Make sure you select a format which includes `supportsLowLightBoost`. Using CONTROL_SCENE_MODE_NIGHT as fallback: $usesFallback")
class CameraNotReadyError: CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
class InvalidFormatError(format: Int): CameraError("capture", "invalid-photo-format", "The Photo has an invalid format! Expected ${ImageFormat.YUV_420_888}, actual: $format")
class VideoEncoderError(message: String, cause: Throwable? = null): CameraError("capture", "encoder-error", message, cause)
class VideoMuxerError(message: String, cause: Throwable? = null): CameraError("capture", "muxer-error", message, cause)
class RecordingInProgressError(message: String, cause: Throwable? = null): CameraError("capture", "recording-in-progress", message, cause)
class FileIOError(message: String, cause: Throwable? = null): CameraError("capture", "file-io-error", message, cause)
class InvalidCameraError(message: String, cause: Throwable? = null): CameraError("capture", "not-bound-error", message, cause)
class CameraManagerUnavailableError: CameraError("system", "no-camera-manager", "The Camera manager instance was unavailable for the current Application!")
class ViewNotFoundError(viewId: Int): CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
class UnknownCameraError(cause: Throwable): CameraError("unknown", "unknown", cause.message ?: "An unknown camera error occured.", cause)

View File

@ -0,0 +1,15 @@
package com.cuvent.experiences.friends.camera.parsers
import android.hardware.camera2.CameraCharacteristics
/**
* Parses Lens Facing int to a string representation useable for the TypeScript types.
*/
fun parseLensFacing(lensFacing: Int?): String? {
return when (lensFacing) {
CameraCharacteristics.LENS_FACING_BACK -> "back"
CameraCharacteristics.LENS_FACING_FRONT -> "front"
CameraCharacteristics.LENS_FACING_EXTERNAL -> "external"
else -> null
}
}

View File

@ -0,0 +1,21 @@
package com.cuvent.experiences.friends.camera.parsers
import android.util.Size
import android.util.SizeF
import com.reactnativenavigation.options.params.Bool
import kotlin.math.max
import kotlin.math.min
val Size.bigger: Int
get() = max(this.width, this.height)
val Size.smaller: Int
get() = min(this.width, this.height)
val SizeF.bigger: Float
get() = max(this.width, this.height)
val SizeF.smaller: Float
get() = min(this.width, this.height)
fun areUltimatelyEqual(size1: Size, size2: Size): Boolean {
return size1.width * size1.height == size2.width * size2.height
}

View File

@ -0,0 +1,28 @@
package com.cuvent.experiences.friends.camera.utils
import androidx.camera.core.AspectRatio
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min
private const val RATIO_4_3_VALUE = 4.0 / 3.0
private const val RATIO_16_9_VALUE = 16.0 / 9.0
/**
* [androidx.camera.core.ImageAnalysisConfig] requires enum value of
* [androidx.camera.core.AspectRatio]. Currently it has values of 4:3 & 16:9.
*
* Detecting the most suitable ratio for dimensions provided in @params by counting absolute
* of preview ratio to one of the provided values.
*
* @param width - preview width
* @param height - preview height
* @return suitable aspect ratio
*/
fun aspectRatio(width: Int, height: Int): Int {
val previewRatio = max(width, height).toDouble() / min(width, height)
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
return AspectRatio.RATIO_4_3
}
return AspectRatio.RATIO_16_9
}

View File

@ -0,0 +1,22 @@
package com.cuvent.experiences.friends.camera.utils
import com.facebook.react.bridge.*
private fun makeErrorCauseMap(throwable: Throwable): ReadableMap {
val map = Arguments.createMap()
map.putString("message", throwable.message)
map.putString("stacktrace", throwable.stackTraceToString())
if (throwable.cause != null) {
map.putMap("cause", makeErrorCauseMap(throwable.cause!!))
}
return map
}
fun makeErrorMap(code: String? = null, message: String? = null, throwable: Throwable? = null, userInfo: WritableMap? = null): ReadableMap {
val map = Arguments.createMap()
map.putString("code", code)
map.putString("message", message)
map.putMap("cause", if (throwable != null) makeErrorCauseMap(throwable) else null)
map.putMap("userInfo", userInfo)
return map
}

View File

@ -0,0 +1,82 @@
package com.cuvent.experiences.friends.camera.utils
import android.hardware.camera2.CameraCharacteristics
import android.util.Size
import com.cuvent.experiences.friends.camera.parsers.bigger
import com.cuvent.experiences.friends.camera.parsers.parseLensFacing
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.ReadableArray
import kotlin.math.PI
import kotlin.math.atan
// 35mm is 135 film format, a standard in which focal lengths are usually measured
val Size35mm = Size(36, 24)
/**
* Convert a given array of focal lengths to the corresponding TypeScript union type name.
*
* Possible values for single cameras:
* * `"wide-angle-camera"`
* * `"ultra-wide-angle-camera"`
* * `"telephoto-camera"`
*
* Sources for the focal length categories:
* * [Telephoto Lens (wikipedia)](https://en.wikipedia.org/wiki/Telephoto_lens)
* * [Normal Lens (wikipedia)](https://en.wikipedia.org/wiki/Normal_lens)
* * [Wide-Angle Lens (wikipedia)](https://en.wikipedia.org/wiki/Wide-angle_lens)
* * [Ultra-Wide-Angle Lens (wikipedia)](https://en.wikipedia.org/wiki/Ultra_wide_angle_lens)
*/
fun CameraCharacteristics.getDeviceTypes(): ReadableArray {
// TODO: Check if getDeviceType() works correctly, even for logical multi-cameras
val focalLengths = this.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
val sensorSize = this.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
// To get valid focal length standards we have to upscale to the 35mm measurement (film standard)
val cropFactor = Size35mm.bigger / sensorSize.bigger
val deviceTypes = Arguments.createArray()
val containsTelephoto = focalLengths.any { l -> (l * cropFactor) > 35 } // TODO: Telephoto lenses are > 85mm, but we don't have anything between that range..
//val containsNormalLens = focalLengths.any { l -> (l * cropFactor) > 35 && (l * cropFactor) <= 55 }
val containsWideAngle = focalLengths.any { l -> (l * cropFactor) >= 24 && (l * cropFactor) <= 35 }
val containsUltraWideAngle = focalLengths.any { l -> (l * cropFactor) < 24 }
if (containsTelephoto)
deviceTypes.pushString("telephoto-camera")
if (containsWideAngle)
deviceTypes.pushString("wide-angle-camera")
if (containsUltraWideAngle)
deviceTypes.pushString("ultra-wide-angle-camera")
return deviceTypes
}
fun CameraCharacteristics.getFieldOfView(): Double {
val focalLengths = this.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
val sensorSize = this.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
return 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
}
fun CameraCharacteristics.supportsFps(fps: Int): Boolean {
return this.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
.any { it.upper >= fps && it.lower <= fps }
}
/**
* Get the value at which the Zoom is at neutral state (wide-angle camera zoom 0) (in percent, between 0.0-1.0)
*
* * On single-camera physical devices this value will always be 0
* * On devices with multiple cameras, e.g. triple-camera, this value will be a value between 0.0 and 1.0, where the field-of-view and zoom looks "neutral"
*/
val CameraCharacteristics.neutralZoomPercent: Float
get() {
val zoomRange = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R)
this.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
else null
return if (zoomRange != null)
((1.0f - zoomRange.lower) / (zoomRange.upper - zoomRange.lower))
else
0.0f
}

View File

@ -0,0 +1,26 @@
package com.cuvent.experiences.friends.camera.utils
import android.annotation.SuppressLint
import androidx.camera.camera2.interop.Camera2CameraInfo
import androidx.camera.core.CameraSelector
import java.lang.IllegalArgumentException
/**
* Create a new [CameraSelector] which selects the camera with the given [cameraId]
*/
@SuppressLint("UnsafeExperimentalUsageError")
fun CameraSelector.Builder.byID(cameraId: String): CameraSelector.Builder {
return this.addCameraFilter { cameras ->
cameras.filter { cameraInfoX ->
try {
val cameraInfo = Camera2CameraInfo.from(cameraInfoX)
return@filter cameraInfo.cameraId == cameraId
} catch (e: IllegalArgumentException) {
// Occurs when the [cameraInfoX] is not castable to a Camera2 Info object.
// We can ignore this error because the [getAvailableCameraDevices()] func only returns Camera2 devices.
return@filter false
}
}
}
}

View File

@ -0,0 +1,32 @@
package com.cuvent.experiences.friends.camera.utils
import android.util.Range
import android.util.Size
import com.facebook.react.bridge.ReadableMap
class DeviceFormat(map: ReadableMap) {
val frameRateRanges: List<Range<Int>>
val photoSize: Size
val videoSize: Size
val maxZoom: Double
init {
frameRateRanges = map.getArray("frameRateRanges")!!.toArrayList().map { range ->
if (range is HashMap<*, *>)
rangeFactory(range["minFrameRate"], range["maxFrameRate"])
else
throw IllegalArgumentException()
}
photoSize = Size(map.getInt("photoWidth"), map.getInt("photoHeight"))
videoSize = Size(map.getInt("videoWidth"), map.getInt("videoHeight"))
maxZoom = map.getDouble("maxZoom")
}
}
fun rangeFactory(minFrameRate: Any?, maxFrameRate: Any?): Range<Int> {
return when(minFrameRate) {
is Int -> Range(minFrameRate, maxFrameRate as Int)
is Double -> Range(minFrameRate.toInt(), (maxFrameRate as Double).toInt())
else -> throw IllegalArgumentException()
}
}

View File

@ -0,0 +1,63 @@
package com.cuvent.experiences.friends.camera.utils
import androidx.exifinterface.media.ExifInterface
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.WritableMap
fun ExifInterface.buildMetadataMap(): WritableMap {
val metadataMap = Arguments.createMap()
metadataMap.putInt("Orientation", this.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL))
val tiffMap = Arguments.createMap()
tiffMap.putInt("ResolutionUnit", this.getAttributeInt(ExifInterface.TAG_RESOLUTION_UNIT, 0))
tiffMap.putString("Software", this.getAttribute(ExifInterface.TAG_SOFTWARE))
tiffMap.putString("Make", this.getAttribute(ExifInterface.TAG_MAKE))
tiffMap.putString("DateTime", this.getAttribute(ExifInterface.TAG_DATETIME))
tiffMap.putDouble("XResolution", this.getAttributeDouble(ExifInterface.TAG_X_RESOLUTION, 0.0))
tiffMap.putString("Model", this.getAttribute(ExifInterface.TAG_MODEL))
tiffMap.putDouble("YResolution", this.getAttributeDouble(ExifInterface.TAG_Y_RESOLUTION, 0.0))
metadataMap.putMap("{TIFF}", tiffMap)
val exifMap = Arguments.createMap()
exifMap.putString("DateTimeOriginal", this.getAttribute(ExifInterface.TAG_DATETIME_ORIGINAL))
exifMap.putDouble("ExposureTime", this.getAttributeDouble(ExifInterface.TAG_EXPOSURE_TIME, 0.0))
exifMap.putDouble("FNumber", this.getAttributeDouble(ExifInterface.TAG_F_NUMBER, 0.0))
val lensSpecificationArray = Arguments.createArray()
this.getAttributeRange(ExifInterface.TAG_LENS_SPECIFICATION)?.forEach { lensSpecificationArray.pushInt(it.toInt()) }
exifMap.putArray("LensSpecification", lensSpecificationArray)
exifMap.putDouble("ExposureBiasValue", this.getAttributeDouble(ExifInterface.TAG_EXPOSURE_BIAS_VALUE, 0.0))
exifMap.putInt("ColorSpace", this.getAttributeInt(ExifInterface.TAG_COLOR_SPACE, ExifInterface.COLOR_SPACE_S_RGB))
exifMap.putInt("FocalLenIn35mmFilm", this.getAttributeInt(ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM, 0))
exifMap.putDouble("BrightnessValue", this.getAttributeDouble(ExifInterface.TAG_BRIGHTNESS_VALUE, 0.0))
exifMap.putInt("ExposureMode", this.getAttributeInt(ExifInterface.TAG_EXPOSURE_MODE, ExifInterface.EXPOSURE_MODE_AUTO.toInt()))
exifMap.putString("LensModel", this.getAttribute(ExifInterface.TAG_LENS_MODEL))
exifMap.putInt("SceneType", this.getAttributeInt(ExifInterface.TAG_SCENE_TYPE, ExifInterface.SCENE_TYPE_DIRECTLY_PHOTOGRAPHED.toInt()))
exifMap.putInt("PixelXDimension", this.getAttributeInt(ExifInterface.TAG_PIXEL_X_DIMENSION, 0))
exifMap.putDouble("ShutterSpeedValue", this.getAttributeDouble(ExifInterface.TAG_SHUTTER_SPEED_VALUE, 0.0))
exifMap.putInt("SensingMethod", this.getAttributeInt(ExifInterface.TAG_SENSING_METHOD, ExifInterface.SENSOR_TYPE_NOT_DEFINED.toInt()))
val subjectAreaArray = Arguments.createArray()
this.getAttributeRange(ExifInterface.TAG_SUBJECT_AREA)?.forEach { subjectAreaArray.pushInt(it.toInt()) }
exifMap.putArray("SubjectArea", subjectAreaArray)
exifMap.putDouble("ApertureValue", this.getAttributeDouble(ExifInterface.TAG_APERTURE_VALUE, 0.0))
exifMap.putString("SubsecTimeDigitized", this.getAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED))
exifMap.putDouble("FocalLength", this.getAttributeDouble(ExifInterface.TAG_FOCAL_LENGTH, 0.0))
exifMap.putString("LensMake", this.getAttribute(ExifInterface.TAG_LENS_MAKE))
exifMap.putString("SubsecTimeOriginal", this.getAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL))
exifMap.putString("OffsetTimeDigitized", this.getAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED))
exifMap.putInt("PixelYDimension", this.getAttributeInt(ExifInterface.TAG_PIXEL_Y_DIMENSION, 0))
val isoSpeedRatingsArray = Arguments.createArray()
this.getAttributeRange(ExifInterface.TAG_PHOTOGRAPHIC_SENSITIVITY)?.forEach { isoSpeedRatingsArray.pushInt(it.toInt()) }
exifMap.putArray("ISOSpeedRatings", isoSpeedRatingsArray)
exifMap.putInt("WhiteBalance", this.getAttributeInt(ExifInterface.TAG_WHITE_BALANCE, 0))
exifMap.putString("DateTimeDigitized", this.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED))
exifMap.putString("OffsetTimeOriginal", this.getAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL))
exifMap.putString("ExifVersion", this.getAttribute(ExifInterface.TAG_EXIF_VERSION))
exifMap.putString("OffsetTime", this.getAttribute(ExifInterface.TAG_OFFSET_TIME))
exifMap.putInt("Flash", this.getAttributeInt(ExifInterface.TAG_FLASH, ExifInterface.FLAG_FLASH_FIRED.toInt()))
exifMap.putInt("ExposureProgram", this.getAttributeInt(ExifInterface.TAG_EXPOSURE_PROGRAM, ExifInterface.EXPOSURE_PROGRAM_NOT_DEFINED.toInt()))
exifMap.putInt("MeteringMode", this.getAttributeInt(ExifInterface.TAG_METERING_MODE, ExifInterface.METERING_MODE_UNKNOWN.toInt()))
metadataMap.putMap("{Exif}", exifMap)
return metadataMap
}

View File

@ -0,0 +1,37 @@
package com.cuvent.experiences.friends.camera.utils
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageCaptureException
import androidx.camera.core.ImageProxy
import java.util.concurrent.Executor
import java.util.concurrent.Executors
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
import kotlin.coroutines.suspendCoroutine
suspend inline fun ImageCapture.takePicture(options: ImageCapture.OutputFileOptions, executor: Executor) = suspendCoroutine<ImageCapture.OutputFileResults> { cont ->
this.takePicture(options, executor, object: ImageCapture.OnImageSavedCallback {
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
cont.resume(outputFileResults)
}
override fun onError(exception: ImageCaptureException) {
cont.resumeWithException(exception)
}
})
}
suspend inline fun ImageCapture.takePicture(executor: Executor) = suspendCoroutine<ImageProxy> { cont ->
this.takePicture(executor, object: ImageCapture.OnImageCapturedCallback() {
override fun onCaptureSuccess(image: ImageProxy) {
super.onCaptureSuccess(image)
cont.resume(image)
}
override fun onError(exception: ImageCaptureException) {
super.onError(exception)
cont.resumeWithException(exception)
}
})
}

View File

@ -0,0 +1,12 @@
package com.cuvent.experiences.friends.camera.utils
import android.graphics.ImageFormat
import androidx.camera.core.ImageProxy
val ImageProxy.isRaw: Boolean
get() {
return when (format) {
ImageFormat.RAW_SENSOR, ImageFormat.RAW10, ImageFormat.RAW12, ImageFormat.RAW_PRIVATE -> true
else -> false
}
}

View File

@ -0,0 +1,79 @@
package com.cuvent.experiences.friends.camera.utils
import android.annotation.SuppressLint
import android.graphics.BitmapFactory
import android.graphics.ImageFormat
import androidx.camera.core.ImageProxy
import com.cuvent.experiences.friends.camera.InvalidFormatError
import java.io.File
import java.io.FileOutputStream
import java.nio.ByteBuffer
import java.util.stream.Stream.concat
// TODO: Fix this flip() function (this outputs a black image)
fun flip(imageBytes: ByteArray, imageWidth: Int): ByteArray {
//separate out the sub arrays
var holder = ByteArray(imageBytes.size)
var subArray = ByteArray(imageWidth)
var subCount = 0
for (i in imageBytes.indices) {
subArray[subCount] = imageBytes[i]
subCount++
if (i % imageWidth == 0) {
subArray.reverse()
if (i == imageWidth) {
holder = subArray
} else {
holder += subArray
}
subCount = 0
subArray = ByteArray(imageWidth)
}
}
subArray = ByteArray(imageWidth)
System.arraycopy(imageBytes, imageBytes.size - imageWidth, subArray, 0, subArray.size)
return holder + subArray
}
@SuppressLint("UnsafeExperimentalUsageError")
fun ImageProxy.save(file: File, flipHorizontally: Boolean) {
when (format) {
// TODO: ImageFormat.RAW_SENSOR
// TODO: ImageFormat.DEPTH_JPEG
ImageFormat.JPEG -> {
val buffer = planes[0].buffer
val bytes = ByteArray(buffer.remaining())
// copy image from buffer to byte array
buffer.get(bytes)
val output = FileOutputStream(file)
output.write(bytes)
output.close()
}
ImageFormat.YUV_420_888 -> {
// "prebuffer" simply contains the meta information about the following planes.
val prebuffer = ByteBuffer.allocate(16)
prebuffer.putInt(width)
.putInt(height)
.putInt(planes[1].pixelStride)
.putInt(planes[1].rowStride)
val output = FileOutputStream(file)
output.write(prebuffer.array()) // write meta information to file
// Now write the actual planes.
var buffer: ByteBuffer
var bytes: ByteArray
for (i in 0..2) {
buffer = planes[i].buffer
bytes = ByteArray(buffer.remaining()) // makes byte array large enough to hold image
buffer.get(bytes) // copies image from buffer to byte array
output.write(bytes) // write the byte array to file
}
output.close()
}
else -> throw InvalidFormatError(format)
}
}

View File

@ -0,0 +1,5 @@
package com.cuvent.experiences.friends.camera.utils
fun <T> List<T>.containsAny(elements: List<T>): Boolean {
return elements.any { element -> this.contains(element) }
}

View File

@ -0,0 +1,15 @@
package com.cuvent.experiences.friends.camera.utils
import android.content.Context
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.core.content.ContextCompat
import kotlin.coroutines.resume
import kotlin.coroutines.suspendCoroutine
suspend fun getCameraProvider(context: Context) = suspendCoroutine<ProcessCameraProvider> { cont ->
val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
cameraProviderFuture.addListener({
cont.resume(cameraProviderFuture.get())
}, ContextCompat.getMainExecutor(context))
}

View File

@ -0,0 +1,18 @@
package com.cuvent.experiences.friends.camera.utils
import android.util.Size
import android.view.Surface
/**
* Rotate by a given Surface Rotation
*/
fun Size.rotated(surfaceRotation: Int): Size {
return when (surfaceRotation) {
Surface.ROTATION_0 -> Size(width, height)
Surface.ROTATION_90 -> Size(height, width)
Surface.ROTATION_180 -> Size(width, height)
Surface.ROTATION_270 -> Size(height, width)
else -> Size(width, height)
}
}

View File

@ -0,0 +1,20 @@
package com.cuvent.experiences.friends.camera.utils
import android.view.View
import android.view.ViewGroup
// React does not trigger onLayout events for dynamically added views (`addView`).
// This fixes that.
// https://github.com/facebook/react-native/issues/17968#issuecomment-633308615
fun ViewGroup.installHierarchyFitter() {
setOnHierarchyChangeListener(object : ViewGroup.OnHierarchyChangeListener {
override fun onChildViewRemoved(parent: View?, child: View?) = Unit
override fun onChildViewAdded(parent: View?, child: View?) {
parent?.measure(
View.MeasureSpec.makeMeasureSpec(measuredWidth, View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(measuredHeight, View.MeasureSpec.EXACTLY)
)
parent?.layout(0, 0, parent.measuredWidth, parent.measuredHeight)
}
})
}

View File

@ -0,0 +1,24 @@
package com.cuvent.experiences.friends.camera.utils
import com.facebook.react.bridge.WritableArray
fun WritableArray.pushInt(value: Int?) {
if (value == null)
this.pushNull()
else
this.pushInt(value)
}
fun WritableArray.pushDouble(value: Double?) {
if (value == null)
this.pushNull()
else
this.pushDouble(value)
}
fun WritableArray.pushBoolean(value: Boolean?) {
if (value == null)
this.pushNull()
else
this.pushBoolean(value)
}

View File

@ -0,0 +1,24 @@
package com.cuvent.experiences.friends.camera.utils
import com.facebook.react.bridge.WritableMap
fun WritableMap.putInt(key: String, value: Int?) {
if (value == null)
this.putNull(key)
else
this.putInt(key, value)
}
fun WritableMap.putDouble(key: String, value: Double?) {
if (value == null)
this.putNull(key)
else
this.putDouble(key, value)
}
fun WritableMap.putBoolean(key: String, value: Boolean?) {
if (value == null)
this.putNull(key)
else
this.putBoolean(key, value)
}

View File

@ -0,0 +1,27 @@
package com.cuvent.experiences.friends.camera.utils
import com.cuvent.experiences.friends.camera.CameraError
import com.cuvent.experiences.friends.camera.UnknownCameraError
import com.facebook.react.bridge.Promise
inline fun withPromise(promise: Promise, closure: () -> Any?) {
try {
val result = closure()
promise.resolve(result)
} catch (e: Throwable) {
e.printStackTrace()
val error = if (e is CameraError) e else UnknownCameraError(e)
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
}
}
inline fun withSuspendablePromise(promise: Promise, closure: () -> Any?) {
try {
val result = closure()
promise.resolve(result)
} catch (e: Throwable) {
e.printStackTrace()
val error = if (e is CameraError) e else UnknownCameraError(e)
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
}
}

View File

@ -1,24 +0,0 @@
package com.reactnativevisioncamera
import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.ReactContextBaseJavaModule
import com.facebook.react.bridge.ReactMethod
import com.facebook.react.bridge.Promise
class VisionCameraModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
override fun getName(): String {
return "VisionCamera"
}
// Example method
// See https://reactnative.dev/docs/native-modules-android
@ReactMethod
fun multiply(a: Int, b: Int, promise: Promise) {
promise.resolve(a * b)
}
}