Devops: KTLint to lint Kotlin code (#6)
* Adds KTLint as a GitHub action * Adds KTLint to the gradle project for IDE integration * Adds .editorconfig to configure KTLint (android/)
This commit is contained in:
parent
2e60110070
commit
03b9246afe
11
.github/workflows/validate-android.yml
vendored
11
.github/workflows/validate-android.yml
vendored
@ -7,10 +7,12 @@ on:
|
||||
paths:
|
||||
- '.github/workflows/validate-android.yml'
|
||||
- 'android/**'
|
||||
- '.editorconfig'
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/validate-android.yml'
|
||||
- 'android/**'
|
||||
- '.editorconfig'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
@ -32,3 +34,12 @@ jobs:
|
||||
- uses: yutailang0119/action-android-lint@v1.0.2
|
||||
with:
|
||||
xml_path: android/build/reports/lint-results.xml
|
||||
ktlint:
|
||||
name: Kotlin Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run KTLint
|
||||
uses: mrousavy/action-ktlint@v1.6
|
||||
with:
|
||||
github_token: ${{ secrets.github_token }}
|
||||
|
5
android/.editorconfig
Normal file
5
android/.editorconfig
Normal file
@ -0,0 +1,5 @@
|
||||
[*.{kt,kts}]
|
||||
indent_size=2
|
||||
insert_final_newline=true
|
||||
max_line_length=off
|
||||
disabled_rules=no-wildcard-imports
|
16
android/README.md
Normal file
16
android/README.md
Normal file
@ -0,0 +1,16 @@
|
||||
# android
|
||||
|
||||
This folder contains the Android-platform-specific code for react-native-vision-camera.
|
||||
|
||||
## Prerequesites
|
||||
|
||||
1. Install ktlint
|
||||
```sh
|
||||
brew install ktlint
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
It is recommended that you work on the code using the Example project (`example/android/`), since that always includes the React Native header files, plus you can easily test changes that way.
|
||||
|
||||
You can however still edit the library project here by opening this folder with Android Studio.
|
@ -15,12 +15,15 @@ buildscript {
|
||||
// noinspection DifferentKotlinGradleVersion
|
||||
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
|
||||
classpath "org.jetbrains.kotlin:kotlin-android-extensions:$kotlin_version"
|
||||
// ktlint
|
||||
classpath "org.jlleitschuh.gradle:ktlint-gradle:10.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'com.android.library'
|
||||
apply plugin: 'kotlin-android'
|
||||
apply plugin: 'kotlin-android-extensions'
|
||||
apply plugin: 'org.jlleitschuh.gradle.ktlint'
|
||||
|
||||
def getExtOrDefault(name) {
|
||||
return rootProject.ext.has(name) ? rootProject.ext.get(name) : project.properties['VisionCamera_' + name]
|
||||
|
@ -6,11 +6,11 @@ import com.facebook.react.bridge.ReactApplicationContext
|
||||
import com.facebook.react.uimanager.ViewManager
|
||||
|
||||
class CameraPackage : ReactPackage {
|
||||
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> {
|
||||
return listOf(CameraViewModule(reactContext))
|
||||
}
|
||||
override fun createNativeModules(reactContext: ReactApplicationContext): List<NativeModule> {
|
||||
return listOf(CameraViewModule(reactContext))
|
||||
}
|
||||
|
||||
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> {
|
||||
return listOf(CameraViewManager())
|
||||
}
|
||||
override fun createViewManagers(reactContext: ReactApplicationContext): List<ViewManager<*, *>> {
|
||||
return listOf(CameraViewManager())
|
||||
}
|
||||
}
|
||||
|
@ -7,20 +7,20 @@ import kotlinx.coroutines.guava.await
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
suspend fun CameraView.focus(pointMap: ReadableMap) {
|
||||
val cameraControl = camera?.cameraControl ?: throw CameraNotReadyError()
|
||||
if (!pointMap.hasKey("x") || !pointMap.hasKey("y")) {
|
||||
throw InvalidTypeScriptUnionError("point", pointMap.toString())
|
||||
}
|
||||
val cameraControl = camera?.cameraControl ?: throw CameraNotReadyError()
|
||||
if (!pointMap.hasKey("x") || !pointMap.hasKey("y")) {
|
||||
throw InvalidTypeScriptUnionError("point", pointMap.toString())
|
||||
}
|
||||
|
||||
val dpi = resources.displayMetrics.density
|
||||
val x = pointMap.getDouble("x") * dpi
|
||||
val y = pointMap.getDouble("y") * dpi
|
||||
val dpi = resources.displayMetrics.density
|
||||
val x = pointMap.getDouble("x") * dpi
|
||||
val y = pointMap.getDouble("y") * dpi
|
||||
|
||||
val factory = SurfaceOrientedMeteringPointFactory(this.width.toFloat(), this.height.toFloat())
|
||||
val point = factory.createPoint(x.toFloat(), y.toFloat())
|
||||
val action = FocusMeteringAction.Builder(point, FocusMeteringAction.FLAG_AF or FocusMeteringAction.FLAG_AE)
|
||||
.setAutoCancelDuration(5, TimeUnit.SECONDS) // auto-reset after 5 seconds
|
||||
.build()
|
||||
val factory = SurfaceOrientedMeteringPointFactory(this.width.toFloat(), this.height.toFloat())
|
||||
val point = factory.createPoint(x.toFloat(), y.toFloat())
|
||||
val action = FocusMeteringAction.Builder(point, FocusMeteringAction.FLAG_AF or FocusMeteringAction.FLAG_AE)
|
||||
.setAutoCancelDuration(5, TimeUnit.SECONDS) // auto-reset after 5 seconds
|
||||
.build()
|
||||
|
||||
cameraControl.startFocusAndMetering(action).await()
|
||||
cameraControl.startFocusAndMetering(action).await()
|
||||
}
|
||||
|
@ -2,8 +2,8 @@ package com.mrousavy.camera
|
||||
|
||||
import android.annotation.SuppressLint
|
||||
import androidx.camera.core.VideoCapture
|
||||
import com.mrousavy.camera.utils.makeErrorMap
|
||||
import com.facebook.react.bridge.*
|
||||
import com.mrousavy.camera.utils.makeErrorMap
|
||||
import kotlinx.coroutines.*
|
||||
import java.io.File
|
||||
|
||||
@ -11,60 +11,62 @@ data class TemporaryFile(val path: String)
|
||||
|
||||
@SuppressLint("RestrictedApi")
|
||||
suspend fun CameraView.startRecording(options: ReadableMap, onRecordCallback: Callback): TemporaryFile {
|
||||
if (videoCapture == null) {
|
||||
throw CameraNotReadyError()
|
||||
}
|
||||
if (options.hasKey("flash")) {
|
||||
val enableFlash = options.getString("flash") == "on"
|
||||
// overrides current torch mode value to enable flash while recording
|
||||
camera!!.cameraControl.enableTorch(enableFlash)
|
||||
}
|
||||
if (videoCapture == null) {
|
||||
throw CameraNotReadyError()
|
||||
}
|
||||
if (options.hasKey("flash")) {
|
||||
val enableFlash = options.getString("flash") == "on"
|
||||
// overrides current torch mode value to enable flash while recording
|
||||
camera!!.cameraControl.enableTorch(enableFlash)
|
||||
}
|
||||
|
||||
@Suppress("BlockingMethodInNonBlockingContext") // in withContext we are not blocking. False positive.
|
||||
val videoFile = withContext(Dispatchers.IO) {
|
||||
File.createTempFile("video", ".mp4", context.cacheDir).apply { deleteOnExit() }
|
||||
}
|
||||
val videoFileOptions = VideoCapture.OutputFileOptions.Builder(videoFile)
|
||||
@Suppress("BlockingMethodInNonBlockingContext") // in withContext we are not blocking. False positive.
|
||||
val videoFile = withContext(Dispatchers.IO) {
|
||||
File.createTempFile("video", ".mp4", context.cacheDir).apply { deleteOnExit() }
|
||||
}
|
||||
val videoFileOptions = VideoCapture.OutputFileOptions.Builder(videoFile)
|
||||
|
||||
videoCapture!!.startRecording(videoFileOptions.build(), recordVideoExecutor, object : VideoCapture.OnVideoSavedCallback {
|
||||
override fun onVideoSaved(outputFileResults: VideoCapture.OutputFileResults) {
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", videoFile.absolutePath)
|
||||
// TODO: duration and size
|
||||
onRecordCallback(map, null)
|
||||
videoCapture!!.startRecording(
|
||||
videoFileOptions.build(), recordVideoExecutor,
|
||||
object : VideoCapture.OnVideoSavedCallback {
|
||||
override fun onVideoSaved(outputFileResults: VideoCapture.OutputFileResults) {
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", videoFile.absolutePath)
|
||||
// TODO: duration and size
|
||||
onRecordCallback(map, null)
|
||||
|
||||
// reset the torch mode
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
// reset the torch mode
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
}
|
||||
|
||||
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
|
||||
val error = when (videoCaptureError) {
|
||||
VideoCapture.ERROR_ENCODER -> VideoEncoderError(message, cause)
|
||||
VideoCapture.ERROR_FILE_IO -> FileIOError(message, cause)
|
||||
VideoCapture.ERROR_INVALID_CAMERA -> InvalidCameraError(message, cause)
|
||||
VideoCapture.ERROR_MUXER -> VideoMuxerError(message, cause)
|
||||
VideoCapture.ERROR_RECORDING_IN_PROGRESS -> RecordingInProgressError(message, cause)
|
||||
else -> UnknownCameraError(Error(message, cause))
|
||||
}
|
||||
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
|
||||
onRecordCallback(null, map)
|
||||
|
||||
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
|
||||
val error = when (videoCaptureError) {
|
||||
VideoCapture.ERROR_ENCODER -> VideoEncoderError(message, cause)
|
||||
VideoCapture.ERROR_FILE_IO -> FileIOError(message, cause)
|
||||
VideoCapture.ERROR_INVALID_CAMERA -> InvalidCameraError(message, cause)
|
||||
VideoCapture.ERROR_MUXER -> VideoMuxerError(message, cause)
|
||||
VideoCapture.ERROR_RECORDING_IN_PROGRESS -> RecordingInProgressError(message, cause)
|
||||
else -> UnknownCameraError(Error(message, cause))
|
||||
}
|
||||
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
|
||||
onRecordCallback(null, map)
|
||||
// reset the torch mode
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// reset the torch mode
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
}
|
||||
})
|
||||
|
||||
return TemporaryFile(videoFile.absolutePath)
|
||||
return TemporaryFile(videoFile.absolutePath)
|
||||
}
|
||||
|
||||
|
||||
@SuppressLint("RestrictedApi")
|
||||
fun CameraView.stopRecording() {
|
||||
if (videoCapture == null) {
|
||||
throw CameraNotReadyError()
|
||||
}
|
||||
if (videoCapture == null) {
|
||||
throw CameraNotReadyError()
|
||||
}
|
||||
|
||||
videoCapture!!.stopRecording()
|
||||
// reset torch mode to original value
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
videoCapture!!.stopRecording()
|
||||
// reset torch mode to original value
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
}
|
||||
|
@ -7,10 +7,10 @@ import androidx.camera.camera2.interop.Camera2CameraInfo
|
||||
import androidx.camera.core.ImageCapture
|
||||
import androidx.camera.core.ImageProxy
|
||||
import androidx.exifinterface.media.ExifInterface
|
||||
import com.mrousavy.camera.utils.*
|
||||
import com.facebook.react.bridge.Arguments
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.facebook.react.bridge.WritableMap
|
||||
import com.mrousavy.camera.utils.*
|
||||
import kotlinx.coroutines.*
|
||||
import java.io.File
|
||||
|
||||
@ -18,88 +18,89 @@ private const val TAG = "CameraView.performance"
|
||||
|
||||
@SuppressLint("UnsafeExperimentalUsageError")
|
||||
suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope {
|
||||
val startFunc = System.nanoTime()
|
||||
Log.d(CameraView.REACT_CLASS, "takePhoto() called")
|
||||
val imageCapture = imageCapture ?: throw CameraNotReadyError()
|
||||
val startFunc = System.nanoTime()
|
||||
Log.d(CameraView.REACT_CLASS, "takePhoto() called")
|
||||
val imageCapture = imageCapture ?: throw CameraNotReadyError()
|
||||
|
||||
if (options.hasKey("photoCodec")) {
|
||||
// TODO photoCodec
|
||||
if (options.hasKey("photoCodec")) {
|
||||
// TODO photoCodec
|
||||
}
|
||||
if (options.hasKey("qualityPrioritization")) {
|
||||
// TODO qualityPrioritization
|
||||
}
|
||||
if (options.hasKey("flash")) {
|
||||
val flashMode = options.getString("flash")
|
||||
imageCapture.flashMode = when (flashMode) {
|
||||
"on" -> ImageCapture.FLASH_MODE_ON
|
||||
"off" -> ImageCapture.FLASH_MODE_OFF
|
||||
"auto" -> ImageCapture.FLASH_MODE_AUTO
|
||||
else -> throw InvalidTypeScriptUnionError("flash", flashMode ?: "(null)")
|
||||
}
|
||||
if (options.hasKey("qualityPrioritization")) {
|
||||
// TODO qualityPrioritization
|
||||
}
|
||||
if (options.hasKey("flash")) {
|
||||
val flashMode = options.getString("flash")
|
||||
imageCapture.flashMode = when (flashMode) {
|
||||
"on" -> ImageCapture.FLASH_MODE_ON
|
||||
"off" -> ImageCapture.FLASH_MODE_OFF
|
||||
"auto" -> ImageCapture.FLASH_MODE_AUTO
|
||||
else -> throw InvalidTypeScriptUnionError("flash", flashMode ?: "(null)")
|
||||
}
|
||||
}
|
||||
if (options.hasKey("enableAutoRedEyeReduction")) {
|
||||
// TODO enableAutoRedEyeReduction
|
||||
}
|
||||
if (options.hasKey("enableDualCameraFusion")) {
|
||||
// TODO enableDualCameraFusion
|
||||
}
|
||||
if (options.hasKey("enableVirtualDeviceFusion")) {
|
||||
// TODO enableVirtualDeviceFusion
|
||||
}
|
||||
if (options.hasKey("enableAutoStabilization")) {
|
||||
// TODO enableAutoStabilization
|
||||
}
|
||||
if (options.hasKey("enableAutoDistortionCorrection")) {
|
||||
// TODO enableAutoDistortionCorrection
|
||||
}
|
||||
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
|
||||
}
|
||||
if (options.hasKey("enableAutoRedEyeReduction")) {
|
||||
// TODO enableAutoRedEyeReduction
|
||||
}
|
||||
if (options.hasKey("enableDualCameraFusion")) {
|
||||
// TODO enableDualCameraFusion
|
||||
}
|
||||
if (options.hasKey("enableVirtualDeviceFusion")) {
|
||||
// TODO enableVirtualDeviceFusion
|
||||
}
|
||||
if (options.hasKey("enableAutoStabilization")) {
|
||||
// TODO enableAutoStabilization
|
||||
}
|
||||
if (options.hasKey("enableAutoDistortionCorrection")) {
|
||||
// TODO enableAutoDistortionCorrection
|
||||
}
|
||||
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
|
||||
|
||||
val camera2Info = Camera2CameraInfo.from(camera!!.cameraInfo)
|
||||
val lensFacing = camera2Info.getCameraCharacteristic(CameraCharacteristics.LENS_FACING)
|
||||
// TODO: Flip image if lens is front side
|
||||
val camera2Info = Camera2CameraInfo.from(camera!!.cameraInfo)
|
||||
val lensFacing = camera2Info.getCameraCharacteristic(CameraCharacteristics.LENS_FACING)
|
||||
// TODO: Flip image if lens is front side
|
||||
|
||||
val results = awaitAll(
|
||||
async(coroutineContext) {
|
||||
Log.d(CameraView.REACT_CLASS, "Taking picture...")
|
||||
val startCapture = System.nanoTime()
|
||||
val pic = imageCapture.takePicture(takePhotoExecutor)
|
||||
val endCapture = System.nanoTime()
|
||||
Log.d(TAG, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms")
|
||||
pic
|
||||
},
|
||||
async(Dispatchers.IO) {
|
||||
Log.d(CameraView.REACT_CLASS, "Creating temp file...")
|
||||
File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
|
||||
})
|
||||
val photo = results.first { it is ImageProxy } as ImageProxy
|
||||
val file = results.first { it is File } as File
|
||||
|
||||
val exif: ExifInterface?
|
||||
@Suppress("BlockingMethodInNonBlockingContext")
|
||||
withContext(Dispatchers.IO) {
|
||||
Log.d(CameraView.REACT_CLASS, "Saving picture to ${file.absolutePath}...")
|
||||
val startSave = System.nanoTime()
|
||||
photo.save(file, lensFacing == CameraCharacteristics.LENS_FACING_FRONT)
|
||||
val endSave = System.nanoTime()
|
||||
Log.d(TAG, "Finished image saving in ${(endSave - startSave) / 1_000_000}ms")
|
||||
// TODO: Read Exif from existing in-memory photo buffer instead of file?
|
||||
exif = if (skipMetadata) null else ExifInterface(file)
|
||||
val results = awaitAll(
|
||||
async(coroutineContext) {
|
||||
Log.d(CameraView.REACT_CLASS, "Taking picture...")
|
||||
val startCapture = System.nanoTime()
|
||||
val pic = imageCapture.takePicture(takePhotoExecutor)
|
||||
val endCapture = System.nanoTime()
|
||||
Log.d(TAG, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms")
|
||||
pic
|
||||
},
|
||||
async(Dispatchers.IO) {
|
||||
Log.d(CameraView.REACT_CLASS, "Creating temp file...")
|
||||
File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
|
||||
}
|
||||
)
|
||||
val photo = results.first { it is ImageProxy } as ImageProxy
|
||||
val file = results.first { it is File } as File
|
||||
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", file.absolutePath)
|
||||
map.putInt("width", photo.width)
|
||||
map.putInt("height", photo.height)
|
||||
map.putBoolean("isRawPhoto", photo.isRaw)
|
||||
val exif: ExifInterface?
|
||||
@Suppress("BlockingMethodInNonBlockingContext")
|
||||
withContext(Dispatchers.IO) {
|
||||
Log.d(CameraView.REACT_CLASS, "Saving picture to ${file.absolutePath}...")
|
||||
val startSave = System.nanoTime()
|
||||
photo.save(file, lensFacing == CameraCharacteristics.LENS_FACING_FRONT)
|
||||
val endSave = System.nanoTime()
|
||||
Log.d(TAG, "Finished image saving in ${(endSave - startSave) / 1_000_000}ms")
|
||||
// TODO: Read Exif from existing in-memory photo buffer instead of file?
|
||||
exif = if (skipMetadata) null else ExifInterface(file)
|
||||
}
|
||||
|
||||
val metadata = exif?.buildMetadataMap()
|
||||
map.putMap("metadata", metadata)
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", file.absolutePath)
|
||||
map.putInt("width", photo.width)
|
||||
map.putInt("height", photo.height)
|
||||
map.putBoolean("isRawPhoto", photo.isRaw)
|
||||
|
||||
photo.close()
|
||||
val metadata = exif?.buildMetadataMap()
|
||||
map.putMap("metadata", metadata)
|
||||
|
||||
Log.d(CameraView.REACT_CLASS, "Finished taking photo!")
|
||||
photo.close()
|
||||
|
||||
val endFunc = System.nanoTime()
|
||||
Log.d(TAG, "Finished function execution in ${(endFunc - startFunc) / 1_000_000}ms")
|
||||
return@coroutineScope map
|
||||
Log.d(CameraView.REACT_CLASS, "Finished taking photo!")
|
||||
|
||||
val endFunc = System.nanoTime()
|
||||
Log.d(TAG, "Finished function execution in ${(endFunc - startFunc) / 1_000_000}ms")
|
||||
return@coroutineScope map
|
||||
}
|
||||
|
@ -2,10 +2,10 @@ package com.mrousavy.camera
|
||||
|
||||
import android.graphics.Bitmap
|
||||
import androidx.exifinterface.media.ExifInterface
|
||||
import com.mrousavy.camera.utils.buildMetadataMap
|
||||
import com.facebook.react.bridge.Arguments
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.facebook.react.bridge.WritableMap
|
||||
import com.mrousavy.camera.utils.buildMetadataMap
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.coroutineScope
|
||||
import kotlinx.coroutines.withContext
|
||||
@ -13,30 +13,30 @@ import java.io.File
|
||||
import java.io.FileOutputStream
|
||||
|
||||
suspend fun CameraView.takeSnapshot(options: ReadableMap): WritableMap = coroutineScope {
|
||||
val bitmap = this@takeSnapshot.previewView.bitmap ?: throw CameraNotReadyError()
|
||||
val bitmap = this@takeSnapshot.previewView.bitmap ?: throw CameraNotReadyError()
|
||||
|
||||
val quality = if (options.hasKey("quality")) options.getInt("quality") else 100
|
||||
val quality = if (options.hasKey("quality")) options.getInt("quality") else 100
|
||||
|
||||
val file: File
|
||||
val exif: ExifInterface
|
||||
@Suppress("BlockingMethodInNonBlockingContext")
|
||||
withContext(Dispatchers.IO) {
|
||||
file = File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
|
||||
FileOutputStream(file).use { stream ->
|
||||
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, stream)
|
||||
}
|
||||
exif = ExifInterface(file)
|
||||
val file: File
|
||||
val exif: ExifInterface
|
||||
@Suppress("BlockingMethodInNonBlockingContext")
|
||||
withContext(Dispatchers.IO) {
|
||||
file = File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
|
||||
FileOutputStream(file).use { stream ->
|
||||
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, stream)
|
||||
}
|
||||
exif = ExifInterface(file)
|
||||
}
|
||||
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", file.absolutePath)
|
||||
map.putInt("width", bitmap.width)
|
||||
map.putInt("height", bitmap.height)
|
||||
map.putBoolean("isRawPhoto", false)
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", file.absolutePath)
|
||||
map.putInt("width", bitmap.width)
|
||||
map.putInt("height", bitmap.height)
|
||||
map.putBoolean("isRawPhoto", false)
|
||||
|
||||
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
|
||||
val metadata = if (skipMetadata) null else exif.buildMetadataMap()
|
||||
map.putMap("metadata", metadata)
|
||||
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
|
||||
val metadata = if (skipMetadata) null else exif.buildMetadataMap()
|
||||
map.putMap("metadata", metadata)
|
||||
|
||||
return@coroutineScope map
|
||||
return@coroutineScope map
|
||||
}
|
||||
|
@ -20,9 +20,9 @@ import androidx.camera.extensions.NightPreviewExtender
|
||||
import androidx.camera.view.PreviewView
|
||||
import androidx.core.content.ContextCompat
|
||||
import androidx.lifecycle.*
|
||||
import com.mrousavy.camera.utils.*
|
||||
import com.facebook.react.bridge.*
|
||||
import com.facebook.react.uimanager.events.RCTEventEmitter
|
||||
import com.mrousavy.camera.utils.*
|
||||
import kotlinx.coroutines.*
|
||||
import java.lang.IllegalArgumentException
|
||||
import java.util.concurrent.Executors
|
||||
@ -75,326 +75,326 @@ import kotlin.math.min
|
||||
|
||||
@SuppressLint("ClickableViewAccessibility") // suppresses the warning that the pinch to zoom gesture is not accessible
|
||||
class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
// react properties
|
||||
// props that require reconfiguring
|
||||
var cameraId: String? = null // this is actually not a react prop directly, but the result of setting device={}
|
||||
var enableDepthData = false
|
||||
var enableHighResolutionCapture: Boolean? = null
|
||||
var enablePortraitEffectsMatteDelivery = false
|
||||
var scannableCodes: ReadableArray? = null
|
||||
// props that require format reconfiguring
|
||||
var format: ReadableMap? = null
|
||||
var fps: Int? = null
|
||||
var hdr: Boolean? = null // nullable bool
|
||||
var colorSpace: String? = null
|
||||
var lowLightBoost: Boolean? = null // nullable bool
|
||||
// other props
|
||||
var isActive = false
|
||||
var torch = "off"
|
||||
var zoom = 0.0 // in percent
|
||||
var enableZoomGesture = false
|
||||
// react properties
|
||||
// props that require reconfiguring
|
||||
var cameraId: String? = null // this is actually not a react prop directly, but the result of setting device={}
|
||||
var enableDepthData = false
|
||||
var enableHighResolutionCapture: Boolean? = null
|
||||
var enablePortraitEffectsMatteDelivery = false
|
||||
var scannableCodes: ReadableArray? = null
|
||||
// props that require format reconfiguring
|
||||
var format: ReadableMap? = null
|
||||
var fps: Int? = null
|
||||
var hdr: Boolean? = null // nullable bool
|
||||
var colorSpace: String? = null
|
||||
var lowLightBoost: Boolean? = null // nullable bool
|
||||
// other props
|
||||
var isActive = false
|
||||
var torch = "off"
|
||||
var zoom = 0.0 // in percent
|
||||
var enableZoomGesture = false
|
||||
|
||||
// private properties
|
||||
private val reactContext: ReactContext
|
||||
get() = context as ReactContext
|
||||
// private properties
|
||||
private val reactContext: ReactContext
|
||||
get() = context as ReactContext
|
||||
|
||||
internal val previewView: PreviewView
|
||||
private val cameraExecutor = Executors.newSingleThreadExecutor()
|
||||
internal val takePhotoExecutor = Executors.newSingleThreadExecutor()
|
||||
internal val recordVideoExecutor = Executors.newSingleThreadExecutor()
|
||||
internal val previewView: PreviewView
|
||||
private val cameraExecutor = Executors.newSingleThreadExecutor()
|
||||
internal val takePhotoExecutor = Executors.newSingleThreadExecutor()
|
||||
internal val recordVideoExecutor = Executors.newSingleThreadExecutor()
|
||||
|
||||
internal var camera: Camera? = null
|
||||
internal var imageCapture: ImageCapture? = null
|
||||
internal var videoCapture: VideoCapture? = null
|
||||
internal var camera: Camera? = null
|
||||
internal var imageCapture: ImageCapture? = null
|
||||
internal var videoCapture: VideoCapture? = null
|
||||
|
||||
private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
|
||||
private val scaleGestureDetector: ScaleGestureDetector
|
||||
private val touchEventListener: OnTouchListener
|
||||
private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
|
||||
private val scaleGestureDetector: ScaleGestureDetector
|
||||
private val touchEventListener: OnTouchListener
|
||||
|
||||
private val lifecycleRegistry: LifecycleRegistry
|
||||
private var hostLifecycleState: Lifecycle.State
|
||||
private val lifecycleRegistry: LifecycleRegistry
|
||||
private var hostLifecycleState: Lifecycle.State
|
||||
|
||||
private var minZoom: Float = 1f
|
||||
private var maxZoom: Float = 1f
|
||||
private var minZoom: Float = 1f
|
||||
private var maxZoom: Float = 1f
|
||||
|
||||
init {
|
||||
previewView = PreviewView(context)
|
||||
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
||||
previewView.installHierarchyFitter() // If this is not called correctly, view finder will be black/blank
|
||||
addView(previewView)
|
||||
init {
|
||||
previewView = PreviewView(context)
|
||||
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
||||
previewView.installHierarchyFitter() // If this is not called correctly, view finder will be black/blank
|
||||
addView(previewView)
|
||||
|
||||
scaleGestureListener = object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
|
||||
override fun onScale(detector: ScaleGestureDetector): Boolean {
|
||||
zoom = min(max(((zoom + 1) * detector.scaleFactor) - 1, 0.0), 1.0)
|
||||
update(arrayListOf("zoom"))
|
||||
return true
|
||||
}
|
||||
scaleGestureListener = object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
|
||||
override fun onScale(detector: ScaleGestureDetector): Boolean {
|
||||
zoom = min(max(((zoom + 1) * detector.scaleFactor) - 1, 0.0), 1.0)
|
||||
update(arrayListOf("zoom"))
|
||||
return true
|
||||
}
|
||||
}
|
||||
scaleGestureDetector = ScaleGestureDetector(context, scaleGestureListener)
|
||||
touchEventListener = OnTouchListener { _, event -> return@OnTouchListener scaleGestureDetector.onTouchEvent(event) }
|
||||
|
||||
hostLifecycleState = Lifecycle.State.INITIALIZED
|
||||
lifecycleRegistry = LifecycleRegistry(this)
|
||||
reactContext.addLifecycleEventListener(object : LifecycleEventListener {
|
||||
override fun onHostResume() {
|
||||
hostLifecycleState = Lifecycle.State.RESUMED
|
||||
updateLifecycleState()
|
||||
}
|
||||
override fun onHostPause() {
|
||||
hostLifecycleState = Lifecycle.State.CREATED
|
||||
updateLifecycleState()
|
||||
}
|
||||
override fun onHostDestroy() {
|
||||
hostLifecycleState = Lifecycle.State.DESTROYED
|
||||
updateLifecycleState()
|
||||
cameraExecutor.shutdown()
|
||||
takePhotoExecutor.shutdown()
|
||||
recordVideoExecutor.shutdown()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getLifecycle(): Lifecycle {
|
||||
return lifecycleRegistry
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the custom Lifecycle to match the host activity's lifecycle, and if it's active we narrow it down to the [isActive] and [isAttachedToWindow] fields.
|
||||
*/
|
||||
private fun updateLifecycleState() {
|
||||
val lifecycleBefore = lifecycleRegistry.currentState
|
||||
if (hostLifecycleState == Lifecycle.State.RESUMED) {
|
||||
// Host Lifecycle (Activity) is currently active (RESUMED), so we narrow it down to the view's lifecycle
|
||||
if (isActive && isAttachedToWindow) {
|
||||
lifecycleRegistry.currentState = Lifecycle.State.RESUMED
|
||||
} else {
|
||||
lifecycleRegistry.currentState = Lifecycle.State.CREATED
|
||||
}
|
||||
} else {
|
||||
// Host Lifecycle (Activity) is currently inactive (STARTED or DESTROYED), so that overrules our view's lifecycle
|
||||
lifecycleRegistry.currentState = hostLifecycleState
|
||||
}
|
||||
Log.d(REACT_CLASS, "Lifecycle went from ${lifecycleBefore.name} -> ${lifecycleRegistry.currentState.name} (isActive: $isActive | isAttachedToWindow: $isAttachedToWindow)")
|
||||
}
|
||||
|
||||
override fun onAttachedToWindow() {
|
||||
super.onAttachedToWindow()
|
||||
updateLifecycleState()
|
||||
}
|
||||
|
||||
override fun onDetachedFromWindow() {
|
||||
super.onDetachedFromWindow()
|
||||
updateLifecycleState()
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate all React Props and reconfigure the device
|
||||
*/
|
||||
fun update(changedProps: ArrayList<String>) = GlobalScope.launch(Dispatchers.Main) {
|
||||
try {
|
||||
val shouldReconfigureSession = changedProps.containsAny(propsThatRequireSessionReconfiguration)
|
||||
val shouldReconfigureZoom = shouldReconfigureSession || changedProps.contains("zoom")
|
||||
val shouldReconfigureTorch = shouldReconfigureSession || changedProps.contains("torch")
|
||||
|
||||
if (changedProps.contains("isActive")) {
|
||||
updateLifecycleState()
|
||||
}
|
||||
if (shouldReconfigureSession) {
|
||||
configureSession()
|
||||
}
|
||||
if (shouldReconfigureZoom) {
|
||||
val scaled = (zoom.toFloat() * (maxZoom - minZoom)) + minZoom
|
||||
camera!!.cameraControl.setZoomRatio(scaled)
|
||||
}
|
||||
if (shouldReconfigureTorch) {
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
}
|
||||
if (changedProps.contains("enableZoomGesture")) {
|
||||
setOnTouchListener(if (enableZoomGesture) touchEventListener else null)
|
||||
}
|
||||
} catch (e: CameraError) {
|
||||
invokeOnError(e)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the camera capture session. This should only be called when the camera device changes.
|
||||
*/
|
||||
@SuppressLint("UnsafeExperimentalUsageError", "RestrictedApi")
|
||||
private suspend fun configureSession() {
|
||||
try {
|
||||
Log.d(REACT_CLASS, "Configuring session...")
|
||||
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
|
||||
throw MicrophonePermissionError()
|
||||
}
|
||||
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
||||
throw CameraPermissionError()
|
||||
}
|
||||
if (cameraId == null) {
|
||||
throw NoCameraDeviceError()
|
||||
}
|
||||
if (format != null)
|
||||
Log.d(REACT_CLASS, "Configuring session with Camera ID $cameraId and custom format...")
|
||||
else
|
||||
Log.d(REACT_CLASS, "Configuring session with Camera ID $cameraId and default format options...")
|
||||
|
||||
// Used to bind the lifecycle of cameras to the lifecycle owner
|
||||
val cameraProvider = getCameraProvider(context)
|
||||
|
||||
val cameraSelector = CameraSelector.Builder().byID(cameraId!!).build()
|
||||
|
||||
val rotation = previewView.display.rotation
|
||||
val aspectRatio = aspectRatio(previewView.width, previewView.height)
|
||||
|
||||
val previewBuilder = Preview.Builder()
|
||||
.setTargetAspectRatio(aspectRatio)
|
||||
.setTargetRotation(rotation)
|
||||
val imageCaptureBuilder = ImageCapture.Builder()
|
||||
.setTargetAspectRatio(aspectRatio)
|
||||
.setTargetRotation(rotation)
|
||||
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
||||
val videoCaptureBuilder = VideoCapture.Builder()
|
||||
.setTargetAspectRatio(aspectRatio)
|
||||
.setTargetRotation(rotation)
|
||||
|
||||
if (format != null) {
|
||||
// User has selected a custom format={}. Use that
|
||||
val format = DeviceFormat(format!!)
|
||||
|
||||
// The format (exported in CameraViewModule) specifies the resolution in ROTATION_90 (horizontal)
|
||||
val rotationRelativeToFormat = rotation - 1 // subtract one, so that ROTATION_90 becomes ROTATION_0 and so on
|
||||
|
||||
fps?.let { fps ->
|
||||
if (format.frameRateRanges.any { it.contains(fps) }) {
|
||||
// Camera supports the given FPS (frame rate range)
|
||||
val frameDuration = (1.0 / fps.toDouble()).toLong() * 1_000_000_000
|
||||
|
||||
Log.d(REACT_CLASS, "Setting AE_TARGET_FPS_RANGE to $fps-$fps, and SENSOR_FRAME_DURATION to $frameDuration")
|
||||
Camera2Interop.Extender(previewBuilder)
|
||||
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
|
||||
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
|
||||
Camera2Interop.Extender(videoCaptureBuilder)
|
||||
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
|
||||
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
|
||||
} else {
|
||||
throw FpsNotContainedInFormatError(fps)
|
||||
}
|
||||
}
|
||||
scaleGestureDetector = ScaleGestureDetector(context, scaleGestureListener)
|
||||
touchEventListener = OnTouchListener { _, event -> return@OnTouchListener scaleGestureDetector.onTouchEvent(event) }
|
||||
|
||||
hostLifecycleState = Lifecycle.State.INITIALIZED
|
||||
lifecycleRegistry = LifecycleRegistry(this)
|
||||
reactContext.addLifecycleEventListener(object : LifecycleEventListener {
|
||||
override fun onHostResume() {
|
||||
hostLifecycleState = Lifecycle.State.RESUMED
|
||||
updateLifecycleState()
|
||||
}
|
||||
override fun onHostPause() {
|
||||
hostLifecycleState = Lifecycle.State.CREATED
|
||||
updateLifecycleState()
|
||||
}
|
||||
override fun onHostDestroy() {
|
||||
hostLifecycleState = Lifecycle.State.DESTROYED
|
||||
updateLifecycleState()
|
||||
cameraExecutor.shutdown()
|
||||
takePhotoExecutor.shutdown()
|
||||
recordVideoExecutor.shutdown()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getLifecycle(): Lifecycle {
|
||||
return lifecycleRegistry
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the custom Lifecycle to match the host activity's lifecycle, and if it's active we narrow it down to the [isActive] and [isAttachedToWindow] fields.
|
||||
*/
|
||||
private fun updateLifecycleState() {
|
||||
val lifecycleBefore = lifecycleRegistry.currentState
|
||||
if (hostLifecycleState == Lifecycle.State.RESUMED) {
|
||||
// Host Lifecycle (Activity) is currently active (RESUMED), so we narrow it down to the view's lifecycle
|
||||
if (isActive && isAttachedToWindow) {
|
||||
lifecycleRegistry.currentState = Lifecycle.State.RESUMED
|
||||
hdr?.let { hdr ->
|
||||
// Enable HDR scene mode if set
|
||||
if (hdr) {
|
||||
val imageExtension = HdrImageCaptureExtender.create(imageCaptureBuilder)
|
||||
val previewExtension = HdrPreviewExtender.create(previewBuilder)
|
||||
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
|
||||
previewExtension.isExtensionAvailable(cameraSelector)
|
||||
if (isExtensionAvailable) {
|
||||
Log.i(REACT_CLASS, "Enabling native HDR extension...")
|
||||
imageExtension.enableExtension(cameraSelector)
|
||||
previewExtension.enableExtension(cameraSelector)
|
||||
} else {
|
||||
lifecycleRegistry.currentState = Lifecycle.State.CREATED
|
||||
Log.e(REACT_CLASS, "Native HDR vendor extension not available!")
|
||||
throw HdrNotContainedInFormatError()
|
||||
}
|
||||
} else {
|
||||
// Host Lifecycle (Activity) is currently inactive (STARTED or DESTROYED), so that overrules our view's lifecycle
|
||||
lifecycleRegistry.currentState = hostLifecycleState
|
||||
}
|
||||
}
|
||||
Log.d(REACT_CLASS, "Lifecycle went from ${lifecycleBefore.name} -> ${lifecycleRegistry.currentState.name} (isActive: $isActive | isAttachedToWindow: $isAttachedToWindow)")
|
||||
}
|
||||
|
||||
override fun onAttachedToWindow() {
|
||||
super.onAttachedToWindow()
|
||||
updateLifecycleState()
|
||||
}
|
||||
|
||||
override fun onDetachedFromWindow() {
|
||||
super.onDetachedFromWindow()
|
||||
updateLifecycleState()
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate all React Props and reconfigure the device
|
||||
*/
|
||||
fun update(changedProps: ArrayList<String>) = GlobalScope.launch(Dispatchers.Main) {
|
||||
try {
|
||||
val shouldReconfigureSession = changedProps.containsAny(propsThatRequireSessionReconfiguration)
|
||||
val shouldReconfigureZoom = shouldReconfigureSession || changedProps.contains("zoom")
|
||||
val shouldReconfigureTorch = shouldReconfigureSession || changedProps.contains("torch")
|
||||
|
||||
if (changedProps.contains("isActive")) {
|
||||
updateLifecycleState()
|
||||
lowLightBoost?.let { lowLightBoost ->
|
||||
if (lowLightBoost) {
|
||||
val imageExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
|
||||
val previewExtension = NightPreviewExtender.create(previewBuilder)
|
||||
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
|
||||
previewExtension.isExtensionAvailable(cameraSelector)
|
||||
if (isExtensionAvailable) {
|
||||
Log.i(REACT_CLASS, "Enabling native night-mode extension...")
|
||||
imageExtension.enableExtension(cameraSelector)
|
||||
previewExtension.enableExtension(cameraSelector)
|
||||
} else {
|
||||
Log.e(REACT_CLASS, "Native night-mode vendor extension not available!")
|
||||
throw LowLightBoostNotContainedInFormatError()
|
||||
}
|
||||
if (shouldReconfigureSession) {
|
||||
configureSession()
|
||||
}
|
||||
if (shouldReconfigureZoom) {
|
||||
val scaled = (zoom.toFloat() * (maxZoom - minZoom)) + minZoom
|
||||
camera!!.cameraControl.setZoomRatio(scaled)
|
||||
}
|
||||
if (shouldReconfigureTorch) {
|
||||
camera!!.cameraControl.enableTorch(torch == "on")
|
||||
}
|
||||
if (changedProps.contains("enableZoomGesture")) {
|
||||
setOnTouchListener(if (enableZoomGesture) touchEventListener else null)
|
||||
}
|
||||
} catch (e: CameraError) {
|
||||
invokeOnError(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the camera capture session. This should only be called when the camera device changes.
|
||||
*/
|
||||
@SuppressLint("UnsafeExperimentalUsageError", "RestrictedApi")
|
||||
private suspend fun configureSession() {
|
||||
try {
|
||||
Log.d(REACT_CLASS, "Configuring session...")
|
||||
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
|
||||
throw MicrophonePermissionError()
|
||||
}
|
||||
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
||||
throw CameraPermissionError()
|
||||
}
|
||||
if (cameraId == null) {
|
||||
throw NoCameraDeviceError()
|
||||
}
|
||||
if (format != null)
|
||||
Log.d(REACT_CLASS, "Configuring session with Camera ID $cameraId and custom format...")
|
||||
else
|
||||
Log.d(REACT_CLASS, "Configuring session with Camera ID $cameraId and default format options...")
|
||||
// TODO: qualityPrioritization for ImageCapture
|
||||
imageCaptureBuilder.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
||||
val photoResolution = format.photoSize.rotated(rotationRelativeToFormat)
|
||||
// TODO: imageCaptureBuilder.setTargetResolution(photoResolution)
|
||||
Log.d(REACT_CLASS, "Using Photo Capture resolution $photoResolution")
|
||||
|
||||
// Used to bind the lifecycle of cameras to the lifecycle owner
|
||||
val cameraProvider = getCameraProvider(context)
|
||||
|
||||
val cameraSelector = CameraSelector.Builder().byID(cameraId!!).build()
|
||||
|
||||
val rotation = previewView.display.rotation
|
||||
val aspectRatio = aspectRatio(previewView.width, previewView.height)
|
||||
|
||||
val previewBuilder = Preview.Builder()
|
||||
.setTargetAspectRatio(aspectRatio)
|
||||
.setTargetRotation(rotation)
|
||||
val imageCaptureBuilder = ImageCapture.Builder()
|
||||
.setTargetAspectRatio(aspectRatio)
|
||||
.setTargetRotation(rotation)
|
||||
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
||||
val videoCaptureBuilder = VideoCapture.Builder()
|
||||
.setTargetAspectRatio(aspectRatio)
|
||||
.setTargetRotation(rotation)
|
||||
|
||||
if (format != null) {
|
||||
// User has selected a custom format={}. Use that
|
||||
val format = DeviceFormat(format!!)
|
||||
|
||||
// The format (exported in CameraViewModule) specifies the resolution in ROTATION_90 (horizontal)
|
||||
val rotationRelativeToFormat = rotation - 1 // subtract one, so that ROTATION_90 becomes ROTATION_0 and so on
|
||||
|
||||
fps?.let { fps ->
|
||||
if (format.frameRateRanges.any { it.contains(fps) }) {
|
||||
// Camera supports the given FPS (frame rate range)
|
||||
val frameDuration = (1.0 / fps.toDouble()).toLong() * 1_000_000_000
|
||||
|
||||
Log.d(REACT_CLASS, "Setting AE_TARGET_FPS_RANGE to $fps-$fps, and SENSOR_FRAME_DURATION to $frameDuration")
|
||||
Camera2Interop.Extender(previewBuilder)
|
||||
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
|
||||
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
|
||||
Camera2Interop.Extender(videoCaptureBuilder)
|
||||
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
|
||||
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
|
||||
} else {
|
||||
throw FpsNotContainedInFormatError(fps)
|
||||
}
|
||||
}
|
||||
hdr?.let { hdr ->
|
||||
// Enable HDR scene mode if set
|
||||
if (hdr) {
|
||||
val imageExtension = HdrImageCaptureExtender.create(imageCaptureBuilder)
|
||||
val previewExtension = HdrPreviewExtender.create(previewBuilder)
|
||||
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
|
||||
previewExtension.isExtensionAvailable(cameraSelector)
|
||||
if (isExtensionAvailable) {
|
||||
Log.i(REACT_CLASS, "Enabling native HDR extension...")
|
||||
imageExtension.enableExtension(cameraSelector)
|
||||
previewExtension.enableExtension(cameraSelector)
|
||||
} else {
|
||||
Log.e(REACT_CLASS, "Native HDR vendor extension not available!")
|
||||
throw HdrNotContainedInFormatError()
|
||||
}
|
||||
}
|
||||
}
|
||||
lowLightBoost?.let { lowLightBoost ->
|
||||
if (lowLightBoost) {
|
||||
val imageExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
|
||||
val previewExtension = NightPreviewExtender.create(previewBuilder)
|
||||
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
|
||||
previewExtension.isExtensionAvailable(cameraSelector)
|
||||
if (isExtensionAvailable) {
|
||||
Log.i(REACT_CLASS, "Enabling native night-mode extension...")
|
||||
imageExtension.enableExtension(cameraSelector)
|
||||
previewExtension.enableExtension(cameraSelector)
|
||||
} else {
|
||||
Log.e(REACT_CLASS, "Native night-mode vendor extension not available!")
|
||||
throw LowLightBoostNotContainedInFormatError()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: qualityPrioritization for ImageCapture
|
||||
imageCaptureBuilder.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
||||
val photoResolution = format.photoSize.rotated(rotationRelativeToFormat)
|
||||
// TODO: imageCaptureBuilder.setTargetResolution(photoResolution)
|
||||
Log.d(REACT_CLASS, "Using Photo Capture resolution $photoResolution")
|
||||
|
||||
fps?.let { fps ->
|
||||
Log.d(REACT_CLASS, "Setting video recording FPS to $fps")
|
||||
videoCaptureBuilder.setVideoFrameRate(fps)
|
||||
}
|
||||
}
|
||||
|
||||
val preview = previewBuilder.build()
|
||||
imageCapture = imageCaptureBuilder.build()
|
||||
videoCapture = videoCaptureBuilder.build()
|
||||
|
||||
// Unbind use cases before rebinding
|
||||
cameraProvider.unbindAll()
|
||||
|
||||
// Bind use cases to camera
|
||||
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageCapture!!, videoCapture!!)
|
||||
preview.setSurfaceProvider(previewView.surfaceProvider)
|
||||
|
||||
minZoom = camera!!.cameraInfo.zoomState.value?.minZoomRatio ?: 1f
|
||||
maxZoom = camera!!.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f
|
||||
|
||||
Log.d(REACT_CLASS, "Session configured! Camera: ${camera!!}")
|
||||
invokeOnInitialized()
|
||||
} catch(exc: Throwable) {
|
||||
throw when (exc) {
|
||||
is CameraError -> exc
|
||||
is IllegalArgumentException -> InvalidCameraDeviceError(exc)
|
||||
else -> UnknownCameraError(exc)
|
||||
}
|
||||
fps?.let { fps ->
|
||||
Log.d(REACT_CLASS, "Setting video recording FPS to $fps")
|
||||
videoCaptureBuilder.setVideoFrameRate(fps)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun getAvailablePhotoCodecs(): WritableArray {
|
||||
// TODO
|
||||
return Arguments.createArray()
|
||||
}
|
||||
val preview = previewBuilder.build()
|
||||
imageCapture = imageCaptureBuilder.build()
|
||||
videoCapture = videoCaptureBuilder.build()
|
||||
|
||||
fun getAvailableVideoCodecs(): WritableArray {
|
||||
// TODO
|
||||
return Arguments.createArray()
|
||||
}
|
||||
// Unbind use cases before rebinding
|
||||
cameraProvider.unbindAll()
|
||||
|
||||
override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) {
|
||||
super.onLayout(changed, left, top, right, bottom)
|
||||
Log.i(REACT_CLASS, "onLayout($changed, $left, $top, $right, $bottom) was called! (Width: $width, Height: $height)")
|
||||
}
|
||||
// Bind use cases to camera
|
||||
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageCapture!!, videoCapture!!)
|
||||
preview.setSurfaceProvider(previewView.surfaceProvider)
|
||||
|
||||
private fun invokeOnInitialized() {
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraInitialized", null)
|
||||
}
|
||||
minZoom = camera!!.cameraInfo.zoomState.value?.minZoomRatio ?: 1f
|
||||
maxZoom = camera!!.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f
|
||||
|
||||
private fun invokeOnError(error: CameraError) {
|
||||
val event = Arguments.createMap()
|
||||
event.putString("code", error.code)
|
||||
event.putString("message", error.message)
|
||||
error.cause?.let { cause ->
|
||||
event.putMap("cause", errorToMap(cause))
|
||||
}
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event)
|
||||
Log.d(REACT_CLASS, "Session configured! Camera: ${camera!!}")
|
||||
invokeOnInitialized()
|
||||
} catch (exc: Throwable) {
|
||||
throw when (exc) {
|
||||
is CameraError -> exc
|
||||
is IllegalArgumentException -> InvalidCameraDeviceError(exc)
|
||||
else -> UnknownCameraError(exc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun errorToMap(error: Throwable): WritableMap {
|
||||
val map = Arguments.createMap()
|
||||
map.putString("message", error.message)
|
||||
map.putString("stacktrace", error.stackTraceToString())
|
||||
error.cause?.let { cause ->
|
||||
map.putMap("cause", errorToMap(cause))
|
||||
}
|
||||
return map
|
||||
fun getAvailablePhotoCodecs(): WritableArray {
|
||||
// TODO
|
||||
return Arguments.createArray()
|
||||
}
|
||||
|
||||
fun getAvailableVideoCodecs(): WritableArray {
|
||||
// TODO
|
||||
return Arguments.createArray()
|
||||
}
|
||||
|
||||
override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) {
|
||||
super.onLayout(changed, left, top, right, bottom)
|
||||
Log.i(REACT_CLASS, "onLayout($changed, $left, $top, $right, $bottom) was called! (Width: $width, Height: $height)")
|
||||
}
|
||||
|
||||
private fun invokeOnInitialized() {
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraInitialized", null)
|
||||
}
|
||||
|
||||
private fun invokeOnError(error: CameraError) {
|
||||
val event = Arguments.createMap()
|
||||
event.putString("code", error.code)
|
||||
event.putString("message", error.message)
|
||||
error.cause?.let { cause ->
|
||||
event.putMap("cause", errorToMap(cause))
|
||||
}
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event)
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
|
||||
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost")
|
||||
private fun errorToMap(error: Throwable): WritableMap {
|
||||
val map = Arguments.createMap()
|
||||
map.putString("message", error.message)
|
||||
map.putString("stacktrace", error.stackTraceToString())
|
||||
error.cause?.let { cause ->
|
||||
map.putMap("cause", errorToMap(cause))
|
||||
}
|
||||
return map
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
|
||||
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost")
|
||||
}
|
||||
}
|
||||
|
@ -1,156 +1,154 @@
|
||||
package com.mrousavy.camera
|
||||
|
||||
import android.util.Log
|
||||
import com.facebook.react.bridge.ReactContext
|
||||
import com.facebook.react.bridge.ReadableArray
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.facebook.react.common.MapBuilder
|
||||
import com.facebook.react.uimanager.SimpleViewManager
|
||||
import com.facebook.react.uimanager.ThemedReactContext
|
||||
import com.facebook.react.uimanager.annotations.ReactProp
|
||||
import java.lang.ref.WeakReference
|
||||
|
||||
class CameraViewManager : SimpleViewManager<CameraView>() {
|
||||
private fun addChangedPropToTransaction(view: CameraView, changedProp: String) {
|
||||
if (cameraViewTransactions[view] == null) {
|
||||
cameraViewTransactions[view] = ArrayList()
|
||||
}
|
||||
cameraViewTransactions[view]!!.add(changedProp)
|
||||
private fun addChangedPropToTransaction(view: CameraView, changedProp: String) {
|
||||
if (cameraViewTransactions[view] == null) {
|
||||
cameraViewTransactions[view] = ArrayList()
|
||||
}
|
||||
cameraViewTransactions[view]!!.add(changedProp)
|
||||
}
|
||||
|
||||
@ReactProp(name = "cameraId")
|
||||
fun setCameraId(view: CameraView, cameraId: String) {
|
||||
if (view.cameraId != cameraId)
|
||||
addChangedPropToTransaction(view, "cameraId")
|
||||
view.cameraId = cameraId
|
||||
}
|
||||
@ReactProp(name = "cameraId")
|
||||
fun setCameraId(view: CameraView, cameraId: String) {
|
||||
if (view.cameraId != cameraId)
|
||||
addChangedPropToTransaction(view, "cameraId")
|
||||
view.cameraId = cameraId
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableDepthData")
|
||||
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
|
||||
if (view.enableDepthData != enableDepthData)
|
||||
addChangedPropToTransaction(view, "enableDepthData")
|
||||
view.enableDepthData = enableDepthData
|
||||
}
|
||||
@ReactProp(name = "enableDepthData")
|
||||
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
|
||||
if (view.enableDepthData != enableDepthData)
|
||||
addChangedPropToTransaction(view, "enableDepthData")
|
||||
view.enableDepthData = enableDepthData
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableHighResolutionCapture")
|
||||
fun setEnableHighResolutionCapture(view: CameraView, enableHighResolutionCapture: Boolean?) {
|
||||
if (view.enableHighResolutionCapture != enableHighResolutionCapture)
|
||||
addChangedPropToTransaction(view, "enableHighResolutionCapture")
|
||||
view.enableHighResolutionCapture = enableHighResolutionCapture
|
||||
}
|
||||
@ReactProp(name = "enableHighResolutionCapture")
|
||||
fun setEnableHighResolutionCapture(view: CameraView, enableHighResolutionCapture: Boolean?) {
|
||||
if (view.enableHighResolutionCapture != enableHighResolutionCapture)
|
||||
addChangedPropToTransaction(view, "enableHighResolutionCapture")
|
||||
view.enableHighResolutionCapture = enableHighResolutionCapture
|
||||
}
|
||||
|
||||
@ReactProp(name = "enablePortraitEffectsMatteDelivery")
|
||||
fun setEnablePortraitEffectsMatteDelivery(view: CameraView, enablePortraitEffectsMatteDelivery: Boolean) {
|
||||
if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery)
|
||||
addChangedPropToTransaction(view, "enablePortraitEffectsMatteDelivery")
|
||||
view.enablePortraitEffectsMatteDelivery = enablePortraitEffectsMatteDelivery
|
||||
}
|
||||
@ReactProp(name = "enablePortraitEffectsMatteDelivery")
|
||||
fun setEnablePortraitEffectsMatteDelivery(view: CameraView, enablePortraitEffectsMatteDelivery: Boolean) {
|
||||
if (view.enablePortraitEffectsMatteDelivery != enablePortraitEffectsMatteDelivery)
|
||||
addChangedPropToTransaction(view, "enablePortraitEffectsMatteDelivery")
|
||||
view.enablePortraitEffectsMatteDelivery = enablePortraitEffectsMatteDelivery
|
||||
}
|
||||
|
||||
@ReactProp(name = "scannableCodes")
|
||||
fun setScannableCodes(view: CameraView, scannableCodes: ReadableArray?) {
|
||||
if (view.scannableCodes != scannableCodes)
|
||||
addChangedPropToTransaction(view, "scannableCodes")
|
||||
view.scannableCodes = scannableCodes
|
||||
}
|
||||
@ReactProp(name = "scannableCodes")
|
||||
fun setScannableCodes(view: CameraView, scannableCodes: ReadableArray?) {
|
||||
if (view.scannableCodes != scannableCodes)
|
||||
addChangedPropToTransaction(view, "scannableCodes")
|
||||
view.scannableCodes = scannableCodes
|
||||
}
|
||||
|
||||
@ReactProp(name = "format")
|
||||
fun setFormat(view: CameraView, format: ReadableMap?) {
|
||||
if (view.format != format)
|
||||
addChangedPropToTransaction(view, "format")
|
||||
view.format = format
|
||||
}
|
||||
@ReactProp(name = "format")
|
||||
fun setFormat(view: CameraView, format: ReadableMap?) {
|
||||
if (view.format != format)
|
||||
addChangedPropToTransaction(view, "format")
|
||||
view.format = format
|
||||
}
|
||||
|
||||
// We're treating -1 as "null" here, because when I make the fps parameter
|
||||
// of type "Int?" the react bridge throws an error.
|
||||
@ReactProp(name = "fps", defaultInt = -1)
|
||||
fun setFps(view: CameraView, fps: Int) {
|
||||
if (view.fps != fps)
|
||||
addChangedPropToTransaction(view, "fps")
|
||||
view.fps = if (fps > 0) fps else null
|
||||
}
|
||||
// We're treating -1 as "null" here, because when I make the fps parameter
|
||||
// of type "Int?" the react bridge throws an error.
|
||||
@ReactProp(name = "fps", defaultInt = -1)
|
||||
fun setFps(view: CameraView, fps: Int) {
|
||||
if (view.fps != fps)
|
||||
addChangedPropToTransaction(view, "fps")
|
||||
view.fps = if (fps > 0) fps else null
|
||||
}
|
||||
|
||||
@ReactProp(name = "hdr")
|
||||
fun setHdr(view: CameraView, hdr: Boolean?) {
|
||||
if (view.hdr != hdr)
|
||||
addChangedPropToTransaction(view, "hdr")
|
||||
view.hdr = hdr
|
||||
}
|
||||
@ReactProp(name = "hdr")
|
||||
fun setHdr(view: CameraView, hdr: Boolean?) {
|
||||
if (view.hdr != hdr)
|
||||
addChangedPropToTransaction(view, "hdr")
|
||||
view.hdr = hdr
|
||||
}
|
||||
|
||||
@ReactProp(name = "lowLightBoost")
|
||||
fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) {
|
||||
if (view.lowLightBoost != lowLightBoost)
|
||||
addChangedPropToTransaction(view, "lowLightBoost")
|
||||
view.lowLightBoost = lowLightBoost
|
||||
}
|
||||
@ReactProp(name = "lowLightBoost")
|
||||
fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) {
|
||||
if (view.lowLightBoost != lowLightBoost)
|
||||
addChangedPropToTransaction(view, "lowLightBoost")
|
||||
view.lowLightBoost = lowLightBoost
|
||||
}
|
||||
|
||||
@ReactProp(name = "colorSpace")
|
||||
fun setColorSpace(view: CameraView, colorSpace: String?) {
|
||||
if (view.colorSpace != colorSpace)
|
||||
addChangedPropToTransaction(view, "colorSpace")
|
||||
view.colorSpace = colorSpace
|
||||
}
|
||||
@ReactProp(name = "colorSpace")
|
||||
fun setColorSpace(view: CameraView, colorSpace: String?) {
|
||||
if (view.colorSpace != colorSpace)
|
||||
addChangedPropToTransaction(view, "colorSpace")
|
||||
view.colorSpace = colorSpace
|
||||
}
|
||||
|
||||
@ReactProp(name = "isActive")
|
||||
fun setIsActive(view: CameraView, isActive: Boolean) {
|
||||
if (view.isActive != isActive)
|
||||
addChangedPropToTransaction(view, "isActive")
|
||||
view.isActive = isActive
|
||||
}
|
||||
@ReactProp(name = "isActive")
|
||||
fun setIsActive(view: CameraView, isActive: Boolean) {
|
||||
if (view.isActive != isActive)
|
||||
addChangedPropToTransaction(view, "isActive")
|
||||
view.isActive = isActive
|
||||
}
|
||||
|
||||
@ReactProp(name = "torch")
|
||||
fun setTorch(view: CameraView, torch: String) {
|
||||
if (view.torch != torch)
|
||||
addChangedPropToTransaction(view, "torch")
|
||||
// TODO: why THE FUCK is this not being called?
|
||||
view.torch = torch
|
||||
}
|
||||
@ReactProp(name = "torch")
|
||||
fun setTorch(view: CameraView, torch: String) {
|
||||
if (view.torch != torch)
|
||||
addChangedPropToTransaction(view, "torch")
|
||||
// TODO: why THE FUCK is this not being called?
|
||||
view.torch = torch
|
||||
}
|
||||
|
||||
@ReactProp(name = "zoom")
|
||||
fun setZoom(view: CameraView, zoom: Double) {
|
||||
if (view.zoom != zoom)
|
||||
addChangedPropToTransaction(view, "zoom")
|
||||
// TODO: why THE FUCK is this not being called?
|
||||
view.zoom = zoom
|
||||
}
|
||||
@ReactProp(name = "zoom")
|
||||
fun setZoom(view: CameraView, zoom: Double) {
|
||||
if (view.zoom != zoom)
|
||||
addChangedPropToTransaction(view, "zoom")
|
||||
// TODO: why THE FUCK is this not being called?
|
||||
view.zoom = zoom
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableZoomGesture")
|
||||
fun setEnableZoomGesture(view: CameraView, enableZoomGesture: Boolean) {
|
||||
if (view.enableZoomGesture != enableZoomGesture)
|
||||
addChangedPropToTransaction(view, "enableZoomGesture")
|
||||
view.enableZoomGesture = enableZoomGesture
|
||||
}
|
||||
@ReactProp(name = "enableZoomGesture")
|
||||
fun setEnableZoomGesture(view: CameraView, enableZoomGesture: Boolean) {
|
||||
if (view.enableZoomGesture != enableZoomGesture)
|
||||
addChangedPropToTransaction(view, "enableZoomGesture")
|
||||
view.enableZoomGesture = enableZoomGesture
|
||||
}
|
||||
|
||||
override fun onAfterUpdateTransaction(view: CameraView) {
|
||||
super.onAfterUpdateTransaction(view)
|
||||
val changedProps = cameraViewTransactions[view] ?: ArrayList()
|
||||
view.update(changedProps)
|
||||
cameraViewTransactions.remove(view)
|
||||
}
|
||||
override fun onAfterUpdateTransaction(view: CameraView) {
|
||||
super.onAfterUpdateTransaction(view)
|
||||
val changedProps = cameraViewTransactions[view] ?: ArrayList()
|
||||
view.update(changedProps)
|
||||
cameraViewTransactions.remove(view)
|
||||
}
|
||||
|
||||
public override fun createViewInstance(context: ThemedReactContext): CameraView {
|
||||
return CameraView(context)
|
||||
}
|
||||
public override fun createViewInstance(context: ThemedReactContext): CameraView {
|
||||
return CameraView(context)
|
||||
}
|
||||
|
||||
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? {
|
||||
return MapBuilder.builder<String, Any>()
|
||||
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
|
||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
||||
.build()
|
||||
}
|
||||
override fun getExportedCustomDirectEventTypeConstants(): MutableMap<String, Any>? {
|
||||
return MapBuilder.builder<String, Any>()
|
||||
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
|
||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
||||
.build()
|
||||
}
|
||||
|
||||
override fun onDropViewInstance(view: CameraView) {
|
||||
Log.d(REACT_CLASS, "onDropViewInstance() called!")
|
||||
super.onDropViewInstance(view)
|
||||
}
|
||||
override fun onDropViewInstance(view: CameraView) {
|
||||
Log.d(REACT_CLASS, "onDropViewInstance() called!")
|
||||
super.onDropViewInstance(view)
|
||||
}
|
||||
|
||||
override fun getName(): String {
|
||||
return REACT_CLASS
|
||||
}
|
||||
override fun getName(): String {
|
||||
return REACT_CLASS
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
|
||||
val cameraViewTransactions: HashMap<CameraView, ArrayList<String>> = HashMap()
|
||||
}
|
||||
val cameraViewTransactions: HashMap<CameraView, ArrayList<String>> = HashMap()
|
||||
}
|
||||
}
|
||||
|
@ -14,296 +14,299 @@ import androidx.camera.core.ImageCapture
|
||||
import androidx.camera.extensions.HdrImageCaptureExtender
|
||||
import androidx.camera.extensions.NightImageCaptureExtender
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.mrousavy.camera.parsers.*
|
||||
import com.mrousavy.camera.utils.*
|
||||
import com.facebook.react.bridge.*
|
||||
import com.facebook.react.modules.core.PermissionAwareActivity
|
||||
import com.facebook.react.modules.core.PermissionListener
|
||||
import com.mrousavy.camera.parsers.*
|
||||
import com.mrousavy.camera.utils.*
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.GlobalScope
|
||||
import kotlinx.coroutines.launch
|
||||
|
||||
class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
var RequestCode = 10
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
var RequestCode = 10
|
||||
|
||||
fun parsePermissionStatus(status: Int): String {
|
||||
return when(status) {
|
||||
PackageManager.PERMISSION_DENIED -> "denied"
|
||||
PackageManager.PERMISSION_GRANTED -> "authorized"
|
||||
else -> "not-determined"
|
||||
}
|
||||
fun parsePermissionStatus(status: Int): String {
|
||||
return when (status) {
|
||||
PackageManager.PERMISSION_DENIED -> "denied"
|
||||
PackageManager.PERMISSION_GRANTED -> "authorized"
|
||||
else -> "not-determined"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun getName(): String {
|
||||
return REACT_CLASS
|
||||
}
|
||||
|
||||
private fun findCameraView(id: Int): CameraView = reactApplicationContext.currentActivity?.findViewById(id) ?: throw ViewNotFoundError(id)
|
||||
|
||||
@ReactMethod
|
||||
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.takePhoto(options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun takeSnapshot(viewTag: Int, options: ReadableMap, promise: Promise) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.takeSnapshot(options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
|
||||
@ReactMethod(isBlockingSynchronousMethod = true)
|
||||
fun startRecording(viewTag: Int, options: ReadableMap, onRecordCallback: Callback) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.startRecording(options, onRecordCallback)
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun stopRecording(viewTag: Int, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.stopRecording()
|
||||
return@withPromise null
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun focus(viewTag: Int, point: ReadableMap, promise: Promise) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.focus(point)
|
||||
return@withPromise null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getAvailableVideoCodecs(viewTag: Int, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.getAvailableVideoCodecs()
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getAvailablePhotoCodecs(viewTag: Int, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.getAvailablePhotoCodecs()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: This uses the Camera2 API to list all characteristics of a camera device and therefore doesn't work with Camera1. Find a way to use CameraX for this
|
||||
@ReactMethod
|
||||
fun getAvailableCameraDevices(promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager
|
||||
?: throw CameraManagerUnavailableError()
|
||||
|
||||
val cameraDevices: WritableArray = Arguments.createArray()
|
||||
|
||||
manager.cameraIdList.forEach loop@{ id ->
|
||||
val cameraSelector = CameraSelector.Builder().byID(id).build()
|
||||
// TODO: ImageCapture.Builder - I'm not setting the target resolution, does that matter?
|
||||
val imageCaptureBuilder = ImageCapture.Builder()
|
||||
|
||||
val characteristics = manager.getCameraCharacteristics(id)
|
||||
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
|
||||
|
||||
// Filters out cameras that are LEGACY hardware level. Those don't support Preview + Photo Capture + Video Capture at the same time.
|
||||
if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
Log.i(
|
||||
REACT_CLASS,
|
||||
"Skipping Camera #$id because it does not meet the minimum requirements for react-native-vision-camera. " +
|
||||
"See the tables at https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture for more information."
|
||||
)
|
||||
return@loop
|
||||
}
|
||||
}
|
||||
|
||||
override fun getName(): String {
|
||||
return REACT_CLASS
|
||||
}
|
||||
val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!!
|
||||
val isMultiCam = Build.VERSION.SDK_INT >= Build.VERSION_CODES.P &&
|
||||
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)
|
||||
val deviceTypes = characteristics.getDeviceTypes()
|
||||
|
||||
private fun findCameraView(id: Int): CameraView = reactApplicationContext.currentActivity?.findViewById(id) ?: throw ViewNotFoundError(id)
|
||||
val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
|
||||
val lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING)!!
|
||||
val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)!!
|
||||
val maxScalerZoom = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM)!!
|
||||
val supportsDepthCapture = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
|
||||
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)
|
||||
val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
|
||||
val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE)
|
||||
val stabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)!! // only digital, no optical
|
||||
val zoomRange = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R)
|
||||
characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
|
||||
else null
|
||||
val name = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P)
|
||||
characteristics.get(CameraCharacteristics.INFO_VERSION)
|
||||
else null
|
||||
val fpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
|
||||
|
||||
@ReactMethod
|
||||
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.takePhoto(options)
|
||||
}
|
||||
var supportsHdr = false
|
||||
var supportsLowLightBoost = false
|
||||
try {
|
||||
val hdrExtension = HdrImageCaptureExtender.create(imageCaptureBuilder)
|
||||
supportsHdr = hdrExtension.isExtensionAvailable(cameraSelector)
|
||||
|
||||
val nightExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
|
||||
supportsLowLightBoost = nightExtension.isExtensionAvailable(cameraSelector)
|
||||
} catch (e: Throwable) {
|
||||
// error on checking availability. falls back to "false"
|
||||
Log.e(REACT_CLASS, "Failed to check HDR/Night Mode extension availability.", e)
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun takeSnapshot(viewTag: Int, options: ReadableMap, promise: Promise) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.takeSnapshot(options)
|
||||
}
|
||||
}
|
||||
}
|
||||
val fieldOfView = characteristics.getFieldOfView()
|
||||
|
||||
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
|
||||
@ReactMethod(isBlockingSynchronousMethod = true)
|
||||
fun startRecording(viewTag: Int, options: ReadableMap, onRecordCallback: Callback) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.startRecording(options, onRecordCallback)
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun stopRecording(viewTag: Int, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.stopRecording()
|
||||
return@withPromise null
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun focus(viewTag: Int, point: ReadableMap, promise: Promise) {
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.focus(point)
|
||||
return@withPromise null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getAvailableVideoCodecs(viewTag: Int, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.getAvailableVideoCodecs()
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getAvailablePhotoCodecs(viewTag: Int, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val view = findCameraView(viewTag)
|
||||
view.getAvailablePhotoCodecs()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: This uses the Camera2 API to list all characteristics of a camera device and therefore doesn't work with Camera1. Find a way to use CameraX for this
|
||||
@ReactMethod
|
||||
fun getAvailableCameraDevices(promise: Promise) {
|
||||
withPromise(promise) {
|
||||
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager
|
||||
?: throw CameraManagerUnavailableError()
|
||||
|
||||
val cameraDevices: WritableArray = Arguments.createArray()
|
||||
|
||||
manager.cameraIdList.forEach loop@{ id ->
|
||||
val cameraSelector = CameraSelector.Builder().byID(id).build()
|
||||
// TODO: ImageCapture.Builder - I'm not setting the target resolution, does that matter?
|
||||
val imageCaptureBuilder = ImageCapture.Builder()
|
||||
|
||||
val characteristics = manager.getCameraCharacteristics(id)
|
||||
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
|
||||
|
||||
// Filters out cameras that are LEGACY hardware level. Those don't support Preview + Photo Capture + Video Capture at the same time.
|
||||
if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
Log.i(REACT_CLASS, "Skipping Camera #${id} because it does not meet the minimum requirements for react-native-vision-camera. " +
|
||||
"See the tables at https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture for more information.")
|
||||
return@loop
|
||||
}
|
||||
|
||||
val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!!
|
||||
val isMultiCam = Build.VERSION.SDK_INT >= Build.VERSION_CODES.P &&
|
||||
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)
|
||||
val deviceTypes = characteristics.getDeviceTypes()
|
||||
|
||||
val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
|
||||
val lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING)!!
|
||||
val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)!!
|
||||
val maxScalerZoom = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM)!!
|
||||
val supportsDepthCapture = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
|
||||
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)
|
||||
val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
|
||||
val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE)
|
||||
val stabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)!! // only digital, no optical
|
||||
val zoomRange = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R)
|
||||
characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
|
||||
else null
|
||||
val name = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P)
|
||||
characteristics.get(CameraCharacteristics.INFO_VERSION)
|
||||
else null
|
||||
val fpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
|
||||
|
||||
var supportsHdr = false
|
||||
var supportsLowLightBoost = false
|
||||
try {
|
||||
val hdrExtension = HdrImageCaptureExtender.create(imageCaptureBuilder)
|
||||
supportsHdr = hdrExtension.isExtensionAvailable(cameraSelector)
|
||||
|
||||
val nightExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
|
||||
supportsLowLightBoost = nightExtension.isExtensionAvailable(cameraSelector)
|
||||
} catch (e: Throwable) {
|
||||
// error on checking availability. falls back to "false"
|
||||
Log.e(REACT_CLASS, "Failed to check HDR/Night Mode extension availability.", e)
|
||||
}
|
||||
|
||||
val fieldOfView = characteristics.getFieldOfView()
|
||||
|
||||
val map = Arguments.createMap()
|
||||
val formats = Arguments.createArray()
|
||||
map.putString("id", id)
|
||||
map.putArray("devices", deviceTypes)
|
||||
map.putString("position", parseLensFacing(lensFacing))
|
||||
map.putString("name", name ?: "${parseLensFacing(lensFacing)} ($id)")
|
||||
map.putBoolean("hasFlash", hasFlash)
|
||||
map.putBoolean("hasTorch", hasFlash)
|
||||
map.putBoolean("isMultiCam", isMultiCam)
|
||||
map.putBoolean("supportsRawCapture", supportsRawCapture)
|
||||
map.putBoolean("supportsDepthCapture", supportsDepthCapture)
|
||||
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
|
||||
if (zoomRange != null) {
|
||||
map.putDouble("minZoom", zoomRange.lower.toDouble())
|
||||
map.putDouble("maxZoom", zoomRange.upper.toDouble())
|
||||
} else {
|
||||
map.putDouble("minZoom", 1.0)
|
||||
map.putDouble("maxZoom", maxScalerZoom.toDouble())
|
||||
}
|
||||
map.putDouble("neutralZoom", characteristics.neutralZoomPercent.toDouble())
|
||||
|
||||
val maxImageOutputSize = cameraConfig.getOutputSizes(ImageReader::class.java).maxByOrNull { it.width * it.height }!!
|
||||
|
||||
// TODO: Should I really check MediaRecorder::class instead of SurfaceView::class?
|
||||
// Recording should always be done in the most efficient format, which is the format native to the camera framework
|
||||
cameraConfig.getOutputSizes(MediaRecorder::class.java).forEach { size ->
|
||||
val isHighestPhotoQualitySupported = areUltimatelyEqual(size, maxImageOutputSize)
|
||||
|
||||
// Get the number of seconds that each frame will take to process
|
||||
val secondsPerFrame = cameraConfig.getOutputMinFrameDuration(MediaRecorder::class.java, size) / 1_000_000_000.0
|
||||
|
||||
val frameRateRanges = Arguments.createArray()
|
||||
if (secondsPerFrame > 0) {
|
||||
val fps = (1.0 / secondsPerFrame).toInt()
|
||||
val frameRateRange = Arguments.createMap()
|
||||
frameRateRange.putInt("minFrameRate", 1)
|
||||
frameRateRange.putInt("maxFrameRate", fps)
|
||||
frameRateRanges.pushMap(frameRateRange)
|
||||
}
|
||||
fpsRanges.forEach { range ->
|
||||
val frameRateRange = Arguments.createMap()
|
||||
frameRateRange.putInt("minFrameRate", range.lower)
|
||||
frameRateRange.putInt("maxFrameRate", range.upper)
|
||||
frameRateRanges.pushMap(frameRateRange)
|
||||
}
|
||||
|
||||
// TODO Revisit getAvailableCameraDevices (colorSpaces, more than YUV?)
|
||||
val colorSpaces = Arguments.createArray()
|
||||
colorSpaces.pushString("yuv")
|
||||
|
||||
// TODO Revisit getAvailableCameraDevices (more accurate video stabilization modes)
|
||||
val videoStabilizationModes = Arguments.createArray()
|
||||
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_OFF))
|
||||
videoStabilizationModes.pushString("off")
|
||||
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) {
|
||||
videoStabilizationModes.pushString("auto")
|
||||
videoStabilizationModes.pushString("standard")
|
||||
}
|
||||
|
||||
val format = Arguments.createMap()
|
||||
format.putDouble("photoHeight", size.height.toDouble())
|
||||
format.putDouble("photoWidth", size.width.toDouble())
|
||||
format.putDouble("videoHeight", size.height.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoHeight == photoHeight?)
|
||||
format.putDouble("videoWidth", size.width.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoWidth == photoWidth?)
|
||||
format.putBoolean("isHighestPhotoQualitySupported", isHighestPhotoQualitySupported)
|
||||
format.putInt("maxISO", isoRange?.upper)
|
||||
format.putInt("minISO", isoRange?.lower)
|
||||
format.putDouble("fieldOfView", fieldOfView) // TODO: Revisit getAvailableCameraDevices (is fieldOfView accurate?)
|
||||
format.putDouble("maxZoom", (zoomRange?.upper ?: maxScalerZoom).toDouble())
|
||||
format.putArray("colorSpaces", colorSpaces)
|
||||
format.putBoolean("supportsVideoHDR", false) // TODO: supportsVideoHDR
|
||||
format.putBoolean("supportsPhotoHDR", supportsHdr)
|
||||
format.putArray("frameRateRanges", frameRateRanges)
|
||||
format.putString("autoFocusSystem", "none") // TODO: Revisit getAvailableCameraDevices (autoFocusSystem) (CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES or CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)
|
||||
format.putArray("videoStabilizationModes", videoStabilizationModes)
|
||||
formats.pushMap(format)
|
||||
}
|
||||
|
||||
map.putArray("formats", formats)
|
||||
cameraDevices.pushMap(map)
|
||||
}
|
||||
|
||||
return@withPromise cameraDevices
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getCameraPermissionStatus(promise: Promise) {
|
||||
val status = ContextCompat.checkSelfPermission(reactApplicationContext, Manifest.permission.CAMERA)
|
||||
promise.resolve(parsePermissionStatus(status))
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getMicrophonePermissionStatus(promise: Promise) {
|
||||
val status = ContextCompat.checkSelfPermission(reactApplicationContext, Manifest.permission.RECORD_AUDIO)
|
||||
promise.resolve(parsePermissionStatus(status))
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun requestCameraPermission(promise: Promise) {
|
||||
val activity = reactApplicationContext.currentActivity
|
||||
if (activity is PermissionAwareActivity) {
|
||||
val currentRequestCode = RequestCode
|
||||
RequestCode++
|
||||
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
|
||||
if (requestCode == currentRequestCode) {
|
||||
val permissionStatus = grantResults[0]
|
||||
promise.resolve(parsePermissionStatus(permissionStatus))
|
||||
return@PermissionListener true
|
||||
}
|
||||
return@PermissionListener false
|
||||
}
|
||||
activity.requestPermissions(arrayOf(Manifest.permission.CAMERA), currentRequestCode, listener)
|
||||
val map = Arguments.createMap()
|
||||
val formats = Arguments.createArray()
|
||||
map.putString("id", id)
|
||||
map.putArray("devices", deviceTypes)
|
||||
map.putString("position", parseLensFacing(lensFacing))
|
||||
map.putString("name", name ?: "${parseLensFacing(lensFacing)} ($id)")
|
||||
map.putBoolean("hasFlash", hasFlash)
|
||||
map.putBoolean("hasTorch", hasFlash)
|
||||
map.putBoolean("isMultiCam", isMultiCam)
|
||||
map.putBoolean("supportsRawCapture", supportsRawCapture)
|
||||
map.putBoolean("supportsDepthCapture", supportsDepthCapture)
|
||||
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
|
||||
if (zoomRange != null) {
|
||||
map.putDouble("minZoom", zoomRange.lower.toDouble())
|
||||
map.putDouble("maxZoom", zoomRange.upper.toDouble())
|
||||
} else {
|
||||
promise.reject("NO_ACTIVITY", "No PermissionAwareActivity was found! Make sure the app has launched before calling this function.")
|
||||
map.putDouble("minZoom", 1.0)
|
||||
map.putDouble("maxZoom", maxScalerZoom.toDouble())
|
||||
}
|
||||
}
|
||||
map.putDouble("neutralZoom", characteristics.neutralZoomPercent.toDouble())
|
||||
|
||||
@ReactMethod
|
||||
fun requestMicrophonePermission(promise: Promise) {
|
||||
val activity = reactApplicationContext.currentActivity
|
||||
if (activity is PermissionAwareActivity) {
|
||||
val currentRequestCode = RequestCode
|
||||
RequestCode++
|
||||
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
|
||||
if (requestCode == currentRequestCode) {
|
||||
val permissionStatus = grantResults[0]
|
||||
promise.resolve(parsePermissionStatus(permissionStatus))
|
||||
return@PermissionListener true
|
||||
}
|
||||
return@PermissionListener false
|
||||
}
|
||||
activity.requestPermissions(arrayOf(Manifest.permission.RECORD_AUDIO), currentRequestCode, listener)
|
||||
} else {
|
||||
promise.reject("NO_ACTIVITY", "No PermissionAwareActivity was found! Make sure the app has launched before calling this function.")
|
||||
val maxImageOutputSize = cameraConfig.getOutputSizes(ImageReader::class.java).maxByOrNull { it.width * it.height }!!
|
||||
|
||||
// TODO: Should I really check MediaRecorder::class instead of SurfaceView::class?
|
||||
// Recording should always be done in the most efficient format, which is the format native to the camera framework
|
||||
cameraConfig.getOutputSizes(MediaRecorder::class.java).forEach { size ->
|
||||
val isHighestPhotoQualitySupported = areUltimatelyEqual(size, maxImageOutputSize)
|
||||
|
||||
// Get the number of seconds that each frame will take to process
|
||||
val secondsPerFrame = cameraConfig.getOutputMinFrameDuration(MediaRecorder::class.java, size) / 1_000_000_000.0
|
||||
|
||||
val frameRateRanges = Arguments.createArray()
|
||||
if (secondsPerFrame > 0) {
|
||||
val fps = (1.0 / secondsPerFrame).toInt()
|
||||
val frameRateRange = Arguments.createMap()
|
||||
frameRateRange.putInt("minFrameRate", 1)
|
||||
frameRateRange.putInt("maxFrameRate", fps)
|
||||
frameRateRanges.pushMap(frameRateRange)
|
||||
}
|
||||
fpsRanges.forEach { range ->
|
||||
val frameRateRange = Arguments.createMap()
|
||||
frameRateRange.putInt("minFrameRate", range.lower)
|
||||
frameRateRange.putInt("maxFrameRate", range.upper)
|
||||
frameRateRanges.pushMap(frameRateRange)
|
||||
}
|
||||
|
||||
// TODO Revisit getAvailableCameraDevices (colorSpaces, more than YUV?)
|
||||
val colorSpaces = Arguments.createArray()
|
||||
colorSpaces.pushString("yuv")
|
||||
|
||||
// TODO Revisit getAvailableCameraDevices (more accurate video stabilization modes)
|
||||
val videoStabilizationModes = Arguments.createArray()
|
||||
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_OFF))
|
||||
videoStabilizationModes.pushString("off")
|
||||
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) {
|
||||
videoStabilizationModes.pushString("auto")
|
||||
videoStabilizationModes.pushString("standard")
|
||||
}
|
||||
|
||||
val format = Arguments.createMap()
|
||||
format.putDouble("photoHeight", size.height.toDouble())
|
||||
format.putDouble("photoWidth", size.width.toDouble())
|
||||
format.putDouble("videoHeight", size.height.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoHeight == photoHeight?)
|
||||
format.putDouble("videoWidth", size.width.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoWidth == photoWidth?)
|
||||
format.putBoolean("isHighestPhotoQualitySupported", isHighestPhotoQualitySupported)
|
||||
format.putInt("maxISO", isoRange?.upper)
|
||||
format.putInt("minISO", isoRange?.lower)
|
||||
format.putDouble("fieldOfView", fieldOfView) // TODO: Revisit getAvailableCameraDevices (is fieldOfView accurate?)
|
||||
format.putDouble("maxZoom", (zoomRange?.upper ?: maxScalerZoom).toDouble())
|
||||
format.putArray("colorSpaces", colorSpaces)
|
||||
format.putBoolean("supportsVideoHDR", false) // TODO: supportsVideoHDR
|
||||
format.putBoolean("supportsPhotoHDR", supportsHdr)
|
||||
format.putArray("frameRateRanges", frameRateRanges)
|
||||
format.putString("autoFocusSystem", "none") // TODO: Revisit getAvailableCameraDevices (autoFocusSystem) (CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES or CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)
|
||||
format.putArray("videoStabilizationModes", videoStabilizationModes)
|
||||
formats.pushMap(format)
|
||||
}
|
||||
|
||||
map.putArray("formats", formats)
|
||||
cameraDevices.pushMap(map)
|
||||
}
|
||||
|
||||
return@withPromise cameraDevices
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getCameraPermissionStatus(promise: Promise) {
|
||||
val status = ContextCompat.checkSelfPermission(reactApplicationContext, Manifest.permission.CAMERA)
|
||||
promise.resolve(parsePermissionStatus(status))
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun getMicrophonePermissionStatus(promise: Promise) {
|
||||
val status = ContextCompat.checkSelfPermission(reactApplicationContext, Manifest.permission.RECORD_AUDIO)
|
||||
promise.resolve(parsePermissionStatus(status))
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun requestCameraPermission(promise: Promise) {
|
||||
val activity = reactApplicationContext.currentActivity
|
||||
if (activity is PermissionAwareActivity) {
|
||||
val currentRequestCode = RequestCode
|
||||
RequestCode++
|
||||
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
|
||||
if (requestCode == currentRequestCode) {
|
||||
val permissionStatus = grantResults[0]
|
||||
promise.resolve(parsePermissionStatus(permissionStatus))
|
||||
return@PermissionListener true
|
||||
}
|
||||
return@PermissionListener false
|
||||
}
|
||||
activity.requestPermissions(arrayOf(Manifest.permission.CAMERA), currentRequestCode, listener)
|
||||
} else {
|
||||
promise.reject("NO_ACTIVITY", "No PermissionAwareActivity was found! Make sure the app has launched before calling this function.")
|
||||
}
|
||||
}
|
||||
|
||||
@ReactMethod
|
||||
fun requestMicrophonePermission(promise: Promise) {
|
||||
val activity = reactApplicationContext.currentActivity
|
||||
if (activity is PermissionAwareActivity) {
|
||||
val currentRequestCode = RequestCode
|
||||
RequestCode++
|
||||
val listener = PermissionListener { requestCode: Int, _: Array<String>, grantResults: IntArray ->
|
||||
if (requestCode == currentRequestCode) {
|
||||
val permissionStatus = grantResults[0]
|
||||
promise.resolve(parsePermissionStatus(permissionStatus))
|
||||
return@PermissionListener true
|
||||
}
|
||||
return@PermissionListener false
|
||||
}
|
||||
activity.requestPermissions(arrayOf(Manifest.permission.RECORD_AUDIO), currentRequestCode, listener)
|
||||
} else {
|
||||
promise.reject("NO_ACTIVITY", "No PermissionAwareActivity was found! Make sure the app has launched before calling this function.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,59 +3,64 @@ package com.mrousavy.camera
|
||||
import android.graphics.ImageFormat
|
||||
|
||||
abstract class CameraError(
|
||||
/**
|
||||
* The domain of the error. Error domains are used to group errors.
|
||||
*
|
||||
* Example: "permission"
|
||||
*/
|
||||
val domain: String,
|
||||
/**
|
||||
* The id of the error. Errors are uniquely identified under a given domain.
|
||||
*
|
||||
* Example: "microphone-permission-denied"
|
||||
*/
|
||||
val id: String,
|
||||
/**
|
||||
* A detailed error description of "what went wrong".
|
||||
*
|
||||
* Example: "The microphone permission was denied!"
|
||||
*/
|
||||
message: String,
|
||||
/**
|
||||
* A throwable that caused this error.
|
||||
*/
|
||||
cause: Throwable? = null
|
||||
): Throwable("[$domain/$id] $message", cause)
|
||||
/**
|
||||
* The domain of the error. Error domains are used to group errors.
|
||||
*
|
||||
* Example: "permission"
|
||||
*/
|
||||
val domain: String,
|
||||
/**
|
||||
* The id of the error. Errors are uniquely identified under a given domain.
|
||||
*
|
||||
* Example: "microphone-permission-denied"
|
||||
*/
|
||||
val id: String,
|
||||
/**
|
||||
* A detailed error description of "what went wrong".
|
||||
*
|
||||
* Example: "The microphone permission was denied!"
|
||||
*/
|
||||
message: String,
|
||||
/**
|
||||
* A throwable that caused this error.
|
||||
*/
|
||||
cause: Throwable? = null
|
||||
) : Throwable("[$domain/$id] $message", cause)
|
||||
|
||||
val CameraError.code: String
|
||||
get() = "$domain/$id"
|
||||
get() = "$domain/$id"
|
||||
|
||||
class MicrophonePermissionError : CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied!")
|
||||
class CameraPermissionError : CameraError("permission", "camera-permission-denied", "The Camera permission was denied!")
|
||||
|
||||
class MicrophonePermissionError: CameraError("permission", "microphone-permission-denied", "The Microphone permission was denied!")
|
||||
class CameraPermissionError: CameraError("permission", "camera-permission-denied", "The Camera permission was denied!")
|
||||
class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
|
||||
class UnsupportedOSError(unionName: String, unionValue: String, supportedOnOS: String) : CameraError("parameter", "unsupported-os", "The given value \"$unionValue\" could not be used for $unionName, as it is only available on Android $supportedOnOS and above!")
|
||||
|
||||
class InvalidTypeScriptUnionError(unionName: String, unionValue: String): CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
|
||||
class UnsupportedOSError(unionName: String, unionValue: String, supportedOnOS: String): CameraError("parameter", "unsupported-os", "The given value \"$unionValue\" could not be used for $unionName, as it is only available on Android $supportedOnOS and above!")
|
||||
class NoCameraDeviceError : CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||
class InvalidCameraDeviceError(cause: Throwable) : CameraError("device", "invalid-device", "The given Camera device could not be found for use-case binding!", cause)
|
||||
|
||||
class NoCameraDeviceError: CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||
class InvalidCameraDeviceError(cause: Throwable): CameraError("device", "invalid-device", "The given Camera device could not be found for use-case binding!", cause)
|
||||
class FpsNotContainedInFormatError(fps: Int) : CameraError("format", "invalid-fps", "The given FPS were not valid for the currently selected format. Make sure you select a format which `frameRateRanges` includes $fps FPS!")
|
||||
class HdrNotContainedInFormatError() : CameraError(
|
||||
"format", "invalid-hdr",
|
||||
"The currently selected format does not support HDR capture! " +
|
||||
"Make sure you select a format which `frameRateRanges` includes `supportsPhotoHDR`!"
|
||||
)
|
||||
class LowLightBoostNotContainedInFormatError() : CameraError(
|
||||
"format", "invalid-low-light-boost",
|
||||
"The currently selected format does not support low-light boost (night mode)! " +
|
||||
"Make sure you select a format which includes `supportsLowLightBoost`."
|
||||
)
|
||||
|
||||
class FpsNotContainedInFormatError(fps: Int): CameraError("format", "invalid-fps", "The given FPS were not valid for the currently selected format. Make sure you select a format which `frameRateRanges` includes $fps FPS!")
|
||||
class HdrNotContainedInFormatError(): CameraError("format", "invalid-hdr", "The currently selected format does not support HDR capture! " +
|
||||
"Make sure you select a format which `frameRateRanges` includes `supportsPhotoHDR`!")
|
||||
class LowLightBoostNotContainedInFormatError(): CameraError("format", "invalid-low-light-boost", "The currently selected format does not support low-light boost (night mode)! " +
|
||||
"Make sure you select a format which includes `supportsLowLightBoost`.")
|
||||
class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
||||
|
||||
class CameraNotReadyError: CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
||||
class InvalidFormatError(format: Int) : CameraError("capture", "invalid-photo-format", "The Photo has an invalid format! Expected ${ImageFormat.YUV_420_888}, actual: $format")
|
||||
class VideoEncoderError(message: String, cause: Throwable? = null) : CameraError("capture", "encoder-error", message, cause)
|
||||
class VideoMuxerError(message: String, cause: Throwable? = null) : CameraError("capture", "muxer-error", message, cause)
|
||||
class RecordingInProgressError(message: String, cause: Throwable? = null) : CameraError("capture", "recording-in-progress", message, cause)
|
||||
class FileIOError(message: String, cause: Throwable? = null) : CameraError("capture", "file-io-error", message, cause)
|
||||
class InvalidCameraError(message: String, cause: Throwable? = null) : CameraError("capture", "not-bound-error", message, cause)
|
||||
|
||||
class InvalidFormatError(format: Int): CameraError("capture", "invalid-photo-format", "The Photo has an invalid format! Expected ${ImageFormat.YUV_420_888}, actual: $format")
|
||||
class VideoEncoderError(message: String, cause: Throwable? = null): CameraError("capture", "encoder-error", message, cause)
|
||||
class VideoMuxerError(message: String, cause: Throwable? = null): CameraError("capture", "muxer-error", message, cause)
|
||||
class RecordingInProgressError(message: String, cause: Throwable? = null): CameraError("capture", "recording-in-progress", message, cause)
|
||||
class FileIOError(message: String, cause: Throwable? = null): CameraError("capture", "file-io-error", message, cause)
|
||||
class InvalidCameraError(message: String, cause: Throwable? = null): CameraError("capture", "not-bound-error", message, cause)
|
||||
class CameraManagerUnavailableError : CameraError("system", "no-camera-manager", "The Camera manager instance was unavailable for the current Application!")
|
||||
class ViewNotFoundError(viewId: Int) : CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
|
||||
|
||||
class CameraManagerUnavailableError: CameraError("system", "no-camera-manager", "The Camera manager instance was unavailable for the current Application!")
|
||||
class ViewNotFoundError(viewId: Int): CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
|
||||
|
||||
class UnknownCameraError(cause: Throwable): CameraError("unknown", "unknown", cause.message ?: "An unknown camera error occured.", cause)
|
||||
class UnknownCameraError(cause: Throwable) : CameraError("unknown", "unknown", cause.message ?: "An unknown camera error occured.", cause)
|
||||
|
@ -6,10 +6,10 @@ import android.hardware.camera2.CameraCharacteristics
|
||||
* Parses Lens Facing int to a string representation useable for the TypeScript types.
|
||||
*/
|
||||
fun parseLensFacing(lensFacing: Int?): String? {
|
||||
return when (lensFacing) {
|
||||
CameraCharacteristics.LENS_FACING_BACK -> "back"
|
||||
CameraCharacteristics.LENS_FACING_FRONT -> "front"
|
||||
CameraCharacteristics.LENS_FACING_EXTERNAL -> "external"
|
||||
else -> null
|
||||
}
|
||||
return when (lensFacing) {
|
||||
CameraCharacteristics.LENS_FACING_BACK -> "back"
|
||||
CameraCharacteristics.LENS_FACING_FRONT -> "front"
|
||||
CameraCharacteristics.LENS_FACING_EXTERNAL -> "external"
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
@ -6,15 +6,15 @@ import kotlin.math.max
|
||||
import kotlin.math.min
|
||||
|
||||
val Size.bigger: Int
|
||||
get() = max(this.width, this.height)
|
||||
get() = max(this.width, this.height)
|
||||
val Size.smaller: Int
|
||||
get() = min(this.width, this.height)
|
||||
get() = min(this.width, this.height)
|
||||
|
||||
val SizeF.bigger: Float
|
||||
get() = max(this.width, this.height)
|
||||
get() = max(this.width, this.height)
|
||||
val SizeF.smaller: Float
|
||||
get() = min(this.width, this.height)
|
||||
get() = min(this.width, this.height)
|
||||
|
||||
fun areUltimatelyEqual(size1: Size, size2: Size): Boolean {
|
||||
return size1.width * size1.height == size2.width * size2.height
|
||||
return size1.width * size1.height == size2.width * size2.height
|
||||
}
|
||||
|
@ -20,9 +20,9 @@ private const val RATIO_16_9_VALUE = 16.0 / 9.0
|
||||
* @return suitable aspect ratio
|
||||
*/
|
||||
fun aspectRatio(width: Int, height: Int): Int {
|
||||
val previewRatio = max(width, height).toDouble() / min(width, height)
|
||||
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
|
||||
return AspectRatio.RATIO_4_3
|
||||
}
|
||||
return AspectRatio.RATIO_16_9
|
||||
val previewRatio = max(width, height).toDouble() / min(width, height)
|
||||
if (abs(previewRatio - RATIO_4_3_VALUE) <= abs(previewRatio - RATIO_16_9_VALUE)) {
|
||||
return AspectRatio.RATIO_4_3
|
||||
}
|
||||
return AspectRatio.RATIO_16_9
|
||||
}
|
||||
|
@ -3,20 +3,20 @@ package com.mrousavy.camera.utils
|
||||
import com.facebook.react.bridge.*
|
||||
|
||||
private fun makeErrorCauseMap(throwable: Throwable): ReadableMap {
|
||||
val map = Arguments.createMap()
|
||||
map.putString("message", throwable.message)
|
||||
map.putString("stacktrace", throwable.stackTraceToString())
|
||||
if (throwable.cause != null) {
|
||||
map.putMap("cause", makeErrorCauseMap(throwable.cause!!))
|
||||
}
|
||||
return map
|
||||
val map = Arguments.createMap()
|
||||
map.putString("message", throwable.message)
|
||||
map.putString("stacktrace", throwable.stackTraceToString())
|
||||
if (throwable.cause != null) {
|
||||
map.putMap("cause", makeErrorCauseMap(throwable.cause!!))
|
||||
}
|
||||
return map
|
||||
}
|
||||
|
||||
fun makeErrorMap(code: String? = null, message: String? = null, throwable: Throwable? = null, userInfo: WritableMap? = null): ReadableMap {
|
||||
val map = Arguments.createMap()
|
||||
map.putString("code", code)
|
||||
map.putString("message", message)
|
||||
map.putMap("cause", if (throwable != null) makeErrorCauseMap(throwable) else null)
|
||||
map.putMap("userInfo", userInfo)
|
||||
return map
|
||||
val map = Arguments.createMap()
|
||||
map.putString("code", code)
|
||||
map.putString("message", message)
|
||||
map.putMap("cause", if (throwable != null) makeErrorCauseMap(throwable) else null)
|
||||
map.putMap("userInfo", userInfo)
|
||||
return map
|
||||
}
|
||||
|
@ -2,14 +2,12 @@ package com.mrousavy.camera.utils
|
||||
|
||||
import android.hardware.camera2.CameraCharacteristics
|
||||
import android.util.Size
|
||||
import com.mrousavy.camera.parsers.bigger
|
||||
import com.mrousavy.camera.parsers.parseLensFacing
|
||||
import com.facebook.react.bridge.Arguments
|
||||
import com.facebook.react.bridge.ReadableArray
|
||||
import com.mrousavy.camera.parsers.bigger
|
||||
import kotlin.math.PI
|
||||
import kotlin.math.atan
|
||||
|
||||
|
||||
// 35mm is 135 film format, a standard in which focal lengths are usually measured
|
||||
val Size35mm = Size(36, 24)
|
||||
|
||||
@ -28,40 +26,40 @@ val Size35mm = Size(36, 24)
|
||||
* * [Ultra-Wide-Angle Lens (wikipedia)](https://en.wikipedia.org/wiki/Ultra_wide_angle_lens)
|
||||
*/
|
||||
fun CameraCharacteristics.getDeviceTypes(): ReadableArray {
|
||||
// TODO: Check if getDeviceType() works correctly, even for logical multi-cameras
|
||||
val focalLengths = this.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
|
||||
val sensorSize = this.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
|
||||
// TODO: Check if getDeviceType() works correctly, even for logical multi-cameras
|
||||
val focalLengths = this.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
|
||||
val sensorSize = this.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
|
||||
|
||||
// To get valid focal length standards we have to upscale to the 35mm measurement (film standard)
|
||||
val cropFactor = Size35mm.bigger / sensorSize.bigger
|
||||
// To get valid focal length standards we have to upscale to the 35mm measurement (film standard)
|
||||
val cropFactor = Size35mm.bigger / sensorSize.bigger
|
||||
|
||||
val deviceTypes = Arguments.createArray()
|
||||
val deviceTypes = Arguments.createArray()
|
||||
|
||||
val containsTelephoto = focalLengths.any { l -> (l * cropFactor) > 35 } // TODO: Telephoto lenses are > 85mm, but we don't have anything between that range..
|
||||
//val containsNormalLens = focalLengths.any { l -> (l * cropFactor) > 35 && (l * cropFactor) <= 55 }
|
||||
val containsWideAngle = focalLengths.any { l -> (l * cropFactor) >= 24 && (l * cropFactor) <= 35 }
|
||||
val containsUltraWideAngle = focalLengths.any { l -> (l * cropFactor) < 24 }
|
||||
val containsTelephoto = focalLengths.any { l -> (l * cropFactor) > 35 } // TODO: Telephoto lenses are > 85mm, but we don't have anything between that range..
|
||||
// val containsNormalLens = focalLengths.any { l -> (l * cropFactor) > 35 && (l * cropFactor) <= 55 }
|
||||
val containsWideAngle = focalLengths.any { l -> (l * cropFactor) >= 24 && (l * cropFactor) <= 35 }
|
||||
val containsUltraWideAngle = focalLengths.any { l -> (l * cropFactor) < 24 }
|
||||
|
||||
if (containsTelephoto)
|
||||
deviceTypes.pushString("telephoto-camera")
|
||||
if (containsWideAngle)
|
||||
deviceTypes.pushString("wide-angle-camera")
|
||||
if (containsUltraWideAngle)
|
||||
deviceTypes.pushString("ultra-wide-angle-camera")
|
||||
if (containsTelephoto)
|
||||
deviceTypes.pushString("telephoto-camera")
|
||||
if (containsWideAngle)
|
||||
deviceTypes.pushString("wide-angle-camera")
|
||||
if (containsUltraWideAngle)
|
||||
deviceTypes.pushString("ultra-wide-angle-camera")
|
||||
|
||||
return deviceTypes
|
||||
return deviceTypes
|
||||
}
|
||||
|
||||
fun CameraCharacteristics.getFieldOfView(): Double {
|
||||
val focalLengths = this.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
|
||||
val sensorSize = this.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
|
||||
val focalLengths = this.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
|
||||
val sensorSize = this.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
|
||||
|
||||
return 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
|
||||
return 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
|
||||
}
|
||||
|
||||
fun CameraCharacteristics.supportsFps(fps: Int): Boolean {
|
||||
return this.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
|
||||
.any { it.upper >= fps && it.lower <= fps }
|
||||
return this.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
|
||||
.any { it.upper >= fps && it.lower <= fps }
|
||||
}
|
||||
|
||||
/**
|
||||
@ -71,12 +69,12 @@ fun CameraCharacteristics.supportsFps(fps: Int): Boolean {
|
||||
* * On devices with multiple cameras, e.g. triple-camera, this value will be a value between 0.0 and 1.0, where the field-of-view and zoom looks "neutral"
|
||||
*/
|
||||
val CameraCharacteristics.neutralZoomPercent: Float
|
||||
get() {
|
||||
val zoomRange = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R)
|
||||
this.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
|
||||
else null
|
||||
return if (zoomRange != null)
|
||||
((1.0f - zoomRange.lower) / (zoomRange.upper - zoomRange.lower))
|
||||
else
|
||||
0.0f
|
||||
}
|
||||
get() {
|
||||
val zoomRange = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R)
|
||||
this.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
|
||||
else null
|
||||
return if (zoomRange != null)
|
||||
((1.0f - zoomRange.lower) / (zoomRange.upper - zoomRange.lower))
|
||||
else
|
||||
0.0f
|
||||
}
|
||||
|
@ -5,22 +5,21 @@ import androidx.camera.camera2.interop.Camera2CameraInfo
|
||||
import androidx.camera.core.CameraSelector
|
||||
import java.lang.IllegalArgumentException
|
||||
|
||||
|
||||
/**
|
||||
* Create a new [CameraSelector] which selects the camera with the given [cameraId]
|
||||
*/
|
||||
@SuppressLint("UnsafeExperimentalUsageError")
|
||||
fun CameraSelector.Builder.byID(cameraId: String): CameraSelector.Builder {
|
||||
return this.addCameraFilter { cameras ->
|
||||
cameras.filter { cameraInfoX ->
|
||||
try {
|
||||
val cameraInfo = Camera2CameraInfo.from(cameraInfoX)
|
||||
return@filter cameraInfo.cameraId == cameraId
|
||||
} catch (e: IllegalArgumentException) {
|
||||
// Occurs when the [cameraInfoX] is not castable to a Camera2 Info object.
|
||||
// We can ignore this error because the [getAvailableCameraDevices()] func only returns Camera2 devices.
|
||||
return@filter false
|
||||
}
|
||||
}
|
||||
}
|
||||
return this.addCameraFilter { cameras ->
|
||||
cameras.filter { cameraInfoX ->
|
||||
try {
|
||||
val cameraInfo = Camera2CameraInfo.from(cameraInfoX)
|
||||
return@filter cameraInfo.cameraId == cameraId
|
||||
} catch (e: IllegalArgumentException) {
|
||||
// Occurs when the [cameraInfoX] is not castable to a Camera2 Info object.
|
||||
// We can ignore this error because the [getAvailableCameraDevices()] func only returns Camera2 devices.
|
||||
return@filter false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,28 +5,28 @@ import android.util.Size
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
|
||||
class DeviceFormat(map: ReadableMap) {
|
||||
val frameRateRanges: List<Range<Int>>
|
||||
val photoSize: Size
|
||||
val videoSize: Size
|
||||
val maxZoom: Double
|
||||
val frameRateRanges: List<Range<Int>>
|
||||
val photoSize: Size
|
||||
val videoSize: Size
|
||||
val maxZoom: Double
|
||||
|
||||
init {
|
||||
frameRateRanges = map.getArray("frameRateRanges")!!.toArrayList().map { range ->
|
||||
if (range is HashMap<*, *>)
|
||||
rangeFactory(range["minFrameRate"], range["maxFrameRate"])
|
||||
else
|
||||
throw IllegalArgumentException()
|
||||
}
|
||||
photoSize = Size(map.getInt("photoWidth"), map.getInt("photoHeight"))
|
||||
videoSize = Size(map.getInt("videoWidth"), map.getInt("videoHeight"))
|
||||
maxZoom = map.getDouble("maxZoom")
|
||||
init {
|
||||
frameRateRanges = map.getArray("frameRateRanges")!!.toArrayList().map { range ->
|
||||
if (range is HashMap<*, *>)
|
||||
rangeFactory(range["minFrameRate"], range["maxFrameRate"])
|
||||
else
|
||||
throw IllegalArgumentException()
|
||||
}
|
||||
photoSize = Size(map.getInt("photoWidth"), map.getInt("photoHeight"))
|
||||
videoSize = Size(map.getInt("videoWidth"), map.getInt("videoHeight"))
|
||||
maxZoom = map.getDouble("maxZoom")
|
||||
}
|
||||
}
|
||||
|
||||
fun rangeFactory(minFrameRate: Any?, maxFrameRate: Any?): Range<Int> {
|
||||
return when(minFrameRate) {
|
||||
is Int -> Range(minFrameRate, maxFrameRate as Int)
|
||||
is Double -> Range(minFrameRate.toInt(), (maxFrameRate as Double).toInt())
|
||||
else -> throw IllegalArgumentException()
|
||||
}
|
||||
return when (minFrameRate) {
|
||||
is Int -> Range(minFrameRate, maxFrameRate as Int)
|
||||
is Double -> Range(minFrameRate.toInt(), (maxFrameRate as Double).toInt())
|
||||
else -> throw IllegalArgumentException()
|
||||
}
|
||||
}
|
||||
|
@ -4,60 +4,59 @@ import androidx.exifinterface.media.ExifInterface
|
||||
import com.facebook.react.bridge.Arguments
|
||||
import com.facebook.react.bridge.WritableMap
|
||||
|
||||
|
||||
fun ExifInterface.buildMetadataMap(): WritableMap {
|
||||
val metadataMap = Arguments.createMap()
|
||||
metadataMap.putInt("Orientation", this.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL))
|
||||
val metadataMap = Arguments.createMap()
|
||||
metadataMap.putInt("Orientation", this.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL))
|
||||
|
||||
val tiffMap = Arguments.createMap()
|
||||
tiffMap.putInt("ResolutionUnit", this.getAttributeInt(ExifInterface.TAG_RESOLUTION_UNIT, 0))
|
||||
tiffMap.putString("Software", this.getAttribute(ExifInterface.TAG_SOFTWARE))
|
||||
tiffMap.putString("Make", this.getAttribute(ExifInterface.TAG_MAKE))
|
||||
tiffMap.putString("DateTime", this.getAttribute(ExifInterface.TAG_DATETIME))
|
||||
tiffMap.putDouble("XResolution", this.getAttributeDouble(ExifInterface.TAG_X_RESOLUTION, 0.0))
|
||||
tiffMap.putString("Model", this.getAttribute(ExifInterface.TAG_MODEL))
|
||||
tiffMap.putDouble("YResolution", this.getAttributeDouble(ExifInterface.TAG_Y_RESOLUTION, 0.0))
|
||||
metadataMap.putMap("{TIFF}", tiffMap)
|
||||
val tiffMap = Arguments.createMap()
|
||||
tiffMap.putInt("ResolutionUnit", this.getAttributeInt(ExifInterface.TAG_RESOLUTION_UNIT, 0))
|
||||
tiffMap.putString("Software", this.getAttribute(ExifInterface.TAG_SOFTWARE))
|
||||
tiffMap.putString("Make", this.getAttribute(ExifInterface.TAG_MAKE))
|
||||
tiffMap.putString("DateTime", this.getAttribute(ExifInterface.TAG_DATETIME))
|
||||
tiffMap.putDouble("XResolution", this.getAttributeDouble(ExifInterface.TAG_X_RESOLUTION, 0.0))
|
||||
tiffMap.putString("Model", this.getAttribute(ExifInterface.TAG_MODEL))
|
||||
tiffMap.putDouble("YResolution", this.getAttributeDouble(ExifInterface.TAG_Y_RESOLUTION, 0.0))
|
||||
metadataMap.putMap("{TIFF}", tiffMap)
|
||||
|
||||
val exifMap = Arguments.createMap()
|
||||
exifMap.putString("DateTimeOriginal", this.getAttribute(ExifInterface.TAG_DATETIME_ORIGINAL))
|
||||
exifMap.putDouble("ExposureTime", this.getAttributeDouble(ExifInterface.TAG_EXPOSURE_TIME, 0.0))
|
||||
exifMap.putDouble("FNumber", this.getAttributeDouble(ExifInterface.TAG_F_NUMBER, 0.0))
|
||||
val lensSpecificationArray = Arguments.createArray()
|
||||
this.getAttributeRange(ExifInterface.TAG_LENS_SPECIFICATION)?.forEach { lensSpecificationArray.pushInt(it.toInt()) }
|
||||
exifMap.putArray("LensSpecification", lensSpecificationArray)
|
||||
exifMap.putDouble("ExposureBiasValue", this.getAttributeDouble(ExifInterface.TAG_EXPOSURE_BIAS_VALUE, 0.0))
|
||||
exifMap.putInt("ColorSpace", this.getAttributeInt(ExifInterface.TAG_COLOR_SPACE, ExifInterface.COLOR_SPACE_S_RGB))
|
||||
exifMap.putInt("FocalLenIn35mmFilm", this.getAttributeInt(ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM, 0))
|
||||
exifMap.putDouble("BrightnessValue", this.getAttributeDouble(ExifInterface.TAG_BRIGHTNESS_VALUE, 0.0))
|
||||
exifMap.putInt("ExposureMode", this.getAttributeInt(ExifInterface.TAG_EXPOSURE_MODE, ExifInterface.EXPOSURE_MODE_AUTO.toInt()))
|
||||
exifMap.putString("LensModel", this.getAttribute(ExifInterface.TAG_LENS_MODEL))
|
||||
exifMap.putInt("SceneType", this.getAttributeInt(ExifInterface.TAG_SCENE_TYPE, ExifInterface.SCENE_TYPE_DIRECTLY_PHOTOGRAPHED.toInt()))
|
||||
exifMap.putInt("PixelXDimension", this.getAttributeInt(ExifInterface.TAG_PIXEL_X_DIMENSION, 0))
|
||||
exifMap.putDouble("ShutterSpeedValue", this.getAttributeDouble(ExifInterface.TAG_SHUTTER_SPEED_VALUE, 0.0))
|
||||
exifMap.putInt("SensingMethod", this.getAttributeInt(ExifInterface.TAG_SENSING_METHOD, ExifInterface.SENSOR_TYPE_NOT_DEFINED.toInt()))
|
||||
val subjectAreaArray = Arguments.createArray()
|
||||
this.getAttributeRange(ExifInterface.TAG_SUBJECT_AREA)?.forEach { subjectAreaArray.pushInt(it.toInt()) }
|
||||
exifMap.putArray("SubjectArea", subjectAreaArray)
|
||||
exifMap.putDouble("ApertureValue", this.getAttributeDouble(ExifInterface.TAG_APERTURE_VALUE, 0.0))
|
||||
exifMap.putString("SubsecTimeDigitized", this.getAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED))
|
||||
exifMap.putDouble("FocalLength", this.getAttributeDouble(ExifInterface.TAG_FOCAL_LENGTH, 0.0))
|
||||
exifMap.putString("LensMake", this.getAttribute(ExifInterface.TAG_LENS_MAKE))
|
||||
exifMap.putString("SubsecTimeOriginal", this.getAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL))
|
||||
exifMap.putString("OffsetTimeDigitized", this.getAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED))
|
||||
exifMap.putInt("PixelYDimension", this.getAttributeInt(ExifInterface.TAG_PIXEL_Y_DIMENSION, 0))
|
||||
val isoSpeedRatingsArray = Arguments.createArray()
|
||||
this.getAttributeRange(ExifInterface.TAG_PHOTOGRAPHIC_SENSITIVITY)?.forEach { isoSpeedRatingsArray.pushInt(it.toInt()) }
|
||||
exifMap.putArray("ISOSpeedRatings", isoSpeedRatingsArray)
|
||||
exifMap.putInt("WhiteBalance", this.getAttributeInt(ExifInterface.TAG_WHITE_BALANCE, 0))
|
||||
exifMap.putString("DateTimeDigitized", this.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED))
|
||||
exifMap.putString("OffsetTimeOriginal", this.getAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL))
|
||||
exifMap.putString("ExifVersion", this.getAttribute(ExifInterface.TAG_EXIF_VERSION))
|
||||
exifMap.putString("OffsetTime", this.getAttribute(ExifInterface.TAG_OFFSET_TIME))
|
||||
exifMap.putInt("Flash", this.getAttributeInt(ExifInterface.TAG_FLASH, ExifInterface.FLAG_FLASH_FIRED.toInt()))
|
||||
exifMap.putInt("ExposureProgram", this.getAttributeInt(ExifInterface.TAG_EXPOSURE_PROGRAM, ExifInterface.EXPOSURE_PROGRAM_NOT_DEFINED.toInt()))
|
||||
exifMap.putInt("MeteringMode", this.getAttributeInt(ExifInterface.TAG_METERING_MODE, ExifInterface.METERING_MODE_UNKNOWN.toInt()))
|
||||
metadataMap.putMap("{Exif}", exifMap)
|
||||
val exifMap = Arguments.createMap()
|
||||
exifMap.putString("DateTimeOriginal", this.getAttribute(ExifInterface.TAG_DATETIME_ORIGINAL))
|
||||
exifMap.putDouble("ExposureTime", this.getAttributeDouble(ExifInterface.TAG_EXPOSURE_TIME, 0.0))
|
||||
exifMap.putDouble("FNumber", this.getAttributeDouble(ExifInterface.TAG_F_NUMBER, 0.0))
|
||||
val lensSpecificationArray = Arguments.createArray()
|
||||
this.getAttributeRange(ExifInterface.TAG_LENS_SPECIFICATION)?.forEach { lensSpecificationArray.pushInt(it.toInt()) }
|
||||
exifMap.putArray("LensSpecification", lensSpecificationArray)
|
||||
exifMap.putDouble("ExposureBiasValue", this.getAttributeDouble(ExifInterface.TAG_EXPOSURE_BIAS_VALUE, 0.0))
|
||||
exifMap.putInt("ColorSpace", this.getAttributeInt(ExifInterface.TAG_COLOR_SPACE, ExifInterface.COLOR_SPACE_S_RGB))
|
||||
exifMap.putInt("FocalLenIn35mmFilm", this.getAttributeInt(ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM, 0))
|
||||
exifMap.putDouble("BrightnessValue", this.getAttributeDouble(ExifInterface.TAG_BRIGHTNESS_VALUE, 0.0))
|
||||
exifMap.putInt("ExposureMode", this.getAttributeInt(ExifInterface.TAG_EXPOSURE_MODE, ExifInterface.EXPOSURE_MODE_AUTO.toInt()))
|
||||
exifMap.putString("LensModel", this.getAttribute(ExifInterface.TAG_LENS_MODEL))
|
||||
exifMap.putInt("SceneType", this.getAttributeInt(ExifInterface.TAG_SCENE_TYPE, ExifInterface.SCENE_TYPE_DIRECTLY_PHOTOGRAPHED.toInt()))
|
||||
exifMap.putInt("PixelXDimension", this.getAttributeInt(ExifInterface.TAG_PIXEL_X_DIMENSION, 0))
|
||||
exifMap.putDouble("ShutterSpeedValue", this.getAttributeDouble(ExifInterface.TAG_SHUTTER_SPEED_VALUE, 0.0))
|
||||
exifMap.putInt("SensingMethod", this.getAttributeInt(ExifInterface.TAG_SENSING_METHOD, ExifInterface.SENSOR_TYPE_NOT_DEFINED.toInt()))
|
||||
val subjectAreaArray = Arguments.createArray()
|
||||
this.getAttributeRange(ExifInterface.TAG_SUBJECT_AREA)?.forEach { subjectAreaArray.pushInt(it.toInt()) }
|
||||
exifMap.putArray("SubjectArea", subjectAreaArray)
|
||||
exifMap.putDouble("ApertureValue", this.getAttributeDouble(ExifInterface.TAG_APERTURE_VALUE, 0.0))
|
||||
exifMap.putString("SubsecTimeDigitized", this.getAttribute(ExifInterface.TAG_SUBSEC_TIME_DIGITIZED))
|
||||
exifMap.putDouble("FocalLength", this.getAttributeDouble(ExifInterface.TAG_FOCAL_LENGTH, 0.0))
|
||||
exifMap.putString("LensMake", this.getAttribute(ExifInterface.TAG_LENS_MAKE))
|
||||
exifMap.putString("SubsecTimeOriginal", this.getAttribute(ExifInterface.TAG_SUBSEC_TIME_ORIGINAL))
|
||||
exifMap.putString("OffsetTimeDigitized", this.getAttribute(ExifInterface.TAG_OFFSET_TIME_DIGITIZED))
|
||||
exifMap.putInt("PixelYDimension", this.getAttributeInt(ExifInterface.TAG_PIXEL_Y_DIMENSION, 0))
|
||||
val isoSpeedRatingsArray = Arguments.createArray()
|
||||
this.getAttributeRange(ExifInterface.TAG_PHOTOGRAPHIC_SENSITIVITY)?.forEach { isoSpeedRatingsArray.pushInt(it.toInt()) }
|
||||
exifMap.putArray("ISOSpeedRatings", isoSpeedRatingsArray)
|
||||
exifMap.putInt("WhiteBalance", this.getAttributeInt(ExifInterface.TAG_WHITE_BALANCE, 0))
|
||||
exifMap.putString("DateTimeDigitized", this.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED))
|
||||
exifMap.putString("OffsetTimeOriginal", this.getAttribute(ExifInterface.TAG_OFFSET_TIME_ORIGINAL))
|
||||
exifMap.putString("ExifVersion", this.getAttribute(ExifInterface.TAG_EXIF_VERSION))
|
||||
exifMap.putString("OffsetTime", this.getAttribute(ExifInterface.TAG_OFFSET_TIME))
|
||||
exifMap.putInt("Flash", this.getAttributeInt(ExifInterface.TAG_FLASH, ExifInterface.FLAG_FLASH_FIRED.toInt()))
|
||||
exifMap.putInt("ExposureProgram", this.getAttributeInt(ExifInterface.TAG_EXPOSURE_PROGRAM, ExifInterface.EXPOSURE_PROGRAM_NOT_DEFINED.toInt()))
|
||||
exifMap.putInt("MeteringMode", this.getAttributeInt(ExifInterface.TAG_METERING_MODE, ExifInterface.METERING_MODE_UNKNOWN.toInt()))
|
||||
metadataMap.putMap("{Exif}", exifMap)
|
||||
|
||||
return metadataMap
|
||||
return metadataMap
|
||||
}
|
||||
|
@ -4,34 +4,38 @@ import androidx.camera.core.ImageCapture
|
||||
import androidx.camera.core.ImageCaptureException
|
||||
import androidx.camera.core.ImageProxy
|
||||
import java.util.concurrent.Executor
|
||||
import java.util.concurrent.Executors
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlin.coroutines.suspendCoroutine
|
||||
|
||||
|
||||
suspend inline fun ImageCapture.takePicture(options: ImageCapture.OutputFileOptions, executor: Executor) = suspendCoroutine<ImageCapture.OutputFileResults> { cont ->
|
||||
this.takePicture(options, executor, object: ImageCapture.OnImageSavedCallback {
|
||||
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
|
||||
cont.resume(outputFileResults)
|
||||
}
|
||||
this.takePicture(
|
||||
options, executor,
|
||||
object : ImageCapture.OnImageSavedCallback {
|
||||
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
|
||||
cont.resume(outputFileResults)
|
||||
}
|
||||
|
||||
override fun onError(exception: ImageCaptureException) {
|
||||
cont.resumeWithException(exception)
|
||||
}
|
||||
})
|
||||
override fun onError(exception: ImageCaptureException) {
|
||||
cont.resumeWithException(exception)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
suspend inline fun ImageCapture.takePicture(executor: Executor) = suspendCoroutine<ImageProxy> { cont ->
|
||||
this.takePicture(executor, object: ImageCapture.OnImageCapturedCallback() {
|
||||
override fun onCaptureSuccess(image: ImageProxy) {
|
||||
super.onCaptureSuccess(image)
|
||||
cont.resume(image)
|
||||
}
|
||||
this.takePicture(
|
||||
executor,
|
||||
object : ImageCapture.OnImageCapturedCallback() {
|
||||
override fun onCaptureSuccess(image: ImageProxy) {
|
||||
super.onCaptureSuccess(image)
|
||||
cont.resume(image)
|
||||
}
|
||||
|
||||
override fun onError(exception: ImageCaptureException) {
|
||||
super.onError(exception)
|
||||
cont.resumeWithException(exception)
|
||||
}
|
||||
})
|
||||
override fun onError(exception: ImageCaptureException) {
|
||||
super.onError(exception)
|
||||
cont.resumeWithException(exception)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -4,9 +4,9 @@ import android.graphics.ImageFormat
|
||||
import androidx.camera.core.ImageProxy
|
||||
|
||||
val ImageProxy.isRaw: Boolean
|
||||
get() {
|
||||
return when (format) {
|
||||
ImageFormat.RAW_SENSOR, ImageFormat.RAW10, ImageFormat.RAW12, ImageFormat.RAW_PRIVATE -> true
|
||||
else -> false
|
||||
}
|
||||
get() {
|
||||
return when (format) {
|
||||
ImageFormat.RAW_SENSOR, ImageFormat.RAW10, ImageFormat.RAW12, ImageFormat.RAW_PRIVATE -> true
|
||||
else -> false
|
||||
}
|
||||
}
|
||||
|
@ -1,79 +1,76 @@
|
||||
package com.mrousavy.camera.utils
|
||||
|
||||
import android.annotation.SuppressLint
|
||||
import android.graphics.BitmapFactory
|
||||
import android.graphics.ImageFormat
|
||||
import androidx.camera.core.ImageProxy
|
||||
import com.mrousavy.camera.InvalidFormatError
|
||||
import java.io.File
|
||||
import java.io.FileOutputStream
|
||||
import java.nio.ByteBuffer
|
||||
import java.util.stream.Stream.concat
|
||||
|
||||
// TODO: Fix this flip() function (this outputs a black image)
|
||||
fun flip(imageBytes: ByteArray, imageWidth: Int): ByteArray {
|
||||
//separate out the sub arrays
|
||||
var holder = ByteArray(imageBytes.size)
|
||||
var subArray = ByteArray(imageWidth)
|
||||
var subCount = 0
|
||||
for (i in imageBytes.indices) {
|
||||
subArray[subCount] = imageBytes[i]
|
||||
subCount++
|
||||
if (i % imageWidth == 0) {
|
||||
subArray.reverse()
|
||||
if (i == imageWidth) {
|
||||
holder = subArray
|
||||
} else {
|
||||
holder += subArray
|
||||
}
|
||||
subCount = 0
|
||||
subArray = ByteArray(imageWidth)
|
||||
}
|
||||
// separate out the sub arrays
|
||||
var holder = ByteArray(imageBytes.size)
|
||||
var subArray = ByteArray(imageWidth)
|
||||
var subCount = 0
|
||||
for (i in imageBytes.indices) {
|
||||
subArray[subCount] = imageBytes[i]
|
||||
subCount++
|
||||
if (i % imageWidth == 0) {
|
||||
subArray.reverse()
|
||||
if (i == imageWidth) {
|
||||
holder = subArray
|
||||
} else {
|
||||
holder += subArray
|
||||
}
|
||||
subCount = 0
|
||||
subArray = ByteArray(imageWidth)
|
||||
}
|
||||
subArray = ByteArray(imageWidth)
|
||||
System.arraycopy(imageBytes, imageBytes.size - imageWidth, subArray, 0, subArray.size)
|
||||
return holder + subArray
|
||||
}
|
||||
subArray = ByteArray(imageWidth)
|
||||
System.arraycopy(imageBytes, imageBytes.size - imageWidth, subArray, 0, subArray.size)
|
||||
return holder + subArray
|
||||
}
|
||||
|
||||
|
||||
@SuppressLint("UnsafeExperimentalUsageError")
|
||||
fun ImageProxy.save(file: File, flipHorizontally: Boolean) {
|
||||
when (format) {
|
||||
// TODO: ImageFormat.RAW_SENSOR
|
||||
// TODO: ImageFormat.DEPTH_JPEG
|
||||
ImageFormat.JPEG -> {
|
||||
val buffer = planes[0].buffer
|
||||
val bytes = ByteArray(buffer.remaining())
|
||||
when (format) {
|
||||
// TODO: ImageFormat.RAW_SENSOR
|
||||
// TODO: ImageFormat.DEPTH_JPEG
|
||||
ImageFormat.JPEG -> {
|
||||
val buffer = planes[0].buffer
|
||||
val bytes = ByteArray(buffer.remaining())
|
||||
|
||||
// copy image from buffer to byte array
|
||||
buffer.get(bytes)
|
||||
// copy image from buffer to byte array
|
||||
buffer.get(bytes)
|
||||
|
||||
val output = FileOutputStream(file)
|
||||
output.write(bytes)
|
||||
output.close()
|
||||
}
|
||||
ImageFormat.YUV_420_888 -> {
|
||||
// "prebuffer" simply contains the meta information about the following planes.
|
||||
val prebuffer = ByteBuffer.allocate(16)
|
||||
prebuffer.putInt(width)
|
||||
.putInt(height)
|
||||
.putInt(planes[1].pixelStride)
|
||||
.putInt(planes[1].rowStride)
|
||||
|
||||
val output = FileOutputStream(file)
|
||||
output.write(prebuffer.array()) // write meta information to file
|
||||
// Now write the actual planes.
|
||||
var buffer: ByteBuffer
|
||||
var bytes: ByteArray
|
||||
|
||||
for (i in 0..2) {
|
||||
buffer = planes[i].buffer
|
||||
bytes = ByteArray(buffer.remaining()) // makes byte array large enough to hold image
|
||||
buffer.get(bytes) // copies image from buffer to byte array
|
||||
output.write(bytes) // write the byte array to file
|
||||
}
|
||||
output.close()
|
||||
}
|
||||
else -> throw InvalidFormatError(format)
|
||||
val output = FileOutputStream(file)
|
||||
output.write(bytes)
|
||||
output.close()
|
||||
}
|
||||
ImageFormat.YUV_420_888 -> {
|
||||
// "prebuffer" simply contains the meta information about the following planes.
|
||||
val prebuffer = ByteBuffer.allocate(16)
|
||||
prebuffer.putInt(width)
|
||||
.putInt(height)
|
||||
.putInt(planes[1].pixelStride)
|
||||
.putInt(planes[1].rowStride)
|
||||
|
||||
val output = FileOutputStream(file)
|
||||
output.write(prebuffer.array()) // write meta information to file
|
||||
// Now write the actual planes.
|
||||
var buffer: ByteBuffer
|
||||
var bytes: ByteArray
|
||||
|
||||
for (i in 0..2) {
|
||||
buffer = planes[i].buffer
|
||||
bytes = ByteArray(buffer.remaining()) // makes byte array large enough to hold image
|
||||
buffer.get(bytes) // copies image from buffer to byte array
|
||||
output.write(bytes) // write the byte array to file
|
||||
}
|
||||
output.close()
|
||||
}
|
||||
else -> throw InvalidFormatError(format)
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
package com.mrousavy.camera.utils
|
||||
|
||||
fun <T> List<T>.containsAny(elements: List<T>): Boolean {
|
||||
return elements.any { element -> this.contains(element) }
|
||||
return elements.any { element -> this.contains(element) }
|
||||
}
|
||||
|
@ -7,9 +7,12 @@ import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.suspendCoroutine
|
||||
|
||||
suspend fun getCameraProvider(context: Context) = suspendCoroutine<ProcessCameraProvider> { cont ->
|
||||
val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
|
||||
val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
|
||||
|
||||
cameraProviderFuture.addListener({
|
||||
cont.resume(cameraProviderFuture.get())
|
||||
}, ContextCompat.getMainExecutor(context))
|
||||
cameraProviderFuture.addListener(
|
||||
{
|
||||
cont.resume(cameraProviderFuture.get())
|
||||
},
|
||||
ContextCompat.getMainExecutor(context)
|
||||
)
|
||||
}
|
||||
|
@ -3,16 +3,15 @@ package com.mrousavy.camera.utils
|
||||
import android.util.Size
|
||||
import android.view.Surface
|
||||
|
||||
|
||||
/**
|
||||
* Rotate by a given Surface Rotation
|
||||
*/
|
||||
fun Size.rotated(surfaceRotation: Int): Size {
|
||||
return when (surfaceRotation) {
|
||||
Surface.ROTATION_0 -> Size(width, height)
|
||||
Surface.ROTATION_90 -> Size(height, width)
|
||||
Surface.ROTATION_180 -> Size(width, height)
|
||||
Surface.ROTATION_270 -> Size(height, width)
|
||||
else -> Size(width, height)
|
||||
}
|
||||
return when (surfaceRotation) {
|
||||
Surface.ROTATION_0 -> Size(width, height)
|
||||
Surface.ROTATION_90 -> Size(height, width)
|
||||
Surface.ROTATION_180 -> Size(width, height)
|
||||
Surface.ROTATION_270 -> Size(height, width)
|
||||
else -> Size(width, height)
|
||||
}
|
||||
}
|
||||
|
@ -7,14 +7,14 @@ import android.view.ViewGroup
|
||||
// This fixes that.
|
||||
// https://github.com/facebook/react-native/issues/17968#issuecomment-633308615
|
||||
fun ViewGroup.installHierarchyFitter() {
|
||||
setOnHierarchyChangeListener(object : ViewGroup.OnHierarchyChangeListener {
|
||||
override fun onChildViewRemoved(parent: View?, child: View?) = Unit
|
||||
override fun onChildViewAdded(parent: View?, child: View?) {
|
||||
parent?.measure(
|
||||
View.MeasureSpec.makeMeasureSpec(measuredWidth, View.MeasureSpec.EXACTLY),
|
||||
View.MeasureSpec.makeMeasureSpec(measuredHeight, View.MeasureSpec.EXACTLY)
|
||||
)
|
||||
parent?.layout(0, 0, parent.measuredWidth, parent.measuredHeight)
|
||||
}
|
||||
})
|
||||
setOnHierarchyChangeListener(object : ViewGroup.OnHierarchyChangeListener {
|
||||
override fun onChildViewRemoved(parent: View?, child: View?) = Unit
|
||||
override fun onChildViewAdded(parent: View?, child: View?) {
|
||||
parent?.measure(
|
||||
View.MeasureSpec.makeMeasureSpec(measuredWidth, View.MeasureSpec.EXACTLY),
|
||||
View.MeasureSpec.makeMeasureSpec(measuredHeight, View.MeasureSpec.EXACTLY)
|
||||
)
|
||||
parent?.layout(0, 0, parent.measuredWidth, parent.measuredHeight)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -3,22 +3,22 @@ package com.mrousavy.camera.utils
|
||||
import com.facebook.react.bridge.WritableArray
|
||||
|
||||
fun WritableArray.pushInt(value: Int?) {
|
||||
if (value == null)
|
||||
this.pushNull()
|
||||
else
|
||||
this.pushInt(value)
|
||||
if (value == null)
|
||||
this.pushNull()
|
||||
else
|
||||
this.pushInt(value)
|
||||
}
|
||||
|
||||
fun WritableArray.pushDouble(value: Double?) {
|
||||
if (value == null)
|
||||
this.pushNull()
|
||||
else
|
||||
this.pushDouble(value)
|
||||
if (value == null)
|
||||
this.pushNull()
|
||||
else
|
||||
this.pushDouble(value)
|
||||
}
|
||||
|
||||
fun WritableArray.pushBoolean(value: Boolean?) {
|
||||
if (value == null)
|
||||
this.pushNull()
|
||||
else
|
||||
this.pushBoolean(value)
|
||||
if (value == null)
|
||||
this.pushNull()
|
||||
else
|
||||
this.pushBoolean(value)
|
||||
}
|
||||
|
@ -3,22 +3,22 @@ package com.mrousavy.camera.utils
|
||||
import com.facebook.react.bridge.WritableMap
|
||||
|
||||
fun WritableMap.putInt(key: String, value: Int?) {
|
||||
if (value == null)
|
||||
this.putNull(key)
|
||||
else
|
||||
this.putInt(key, value)
|
||||
if (value == null)
|
||||
this.putNull(key)
|
||||
else
|
||||
this.putInt(key, value)
|
||||
}
|
||||
|
||||
fun WritableMap.putDouble(key: String, value: Double?) {
|
||||
if (value == null)
|
||||
this.putNull(key)
|
||||
else
|
||||
this.putDouble(key, value)
|
||||
if (value == null)
|
||||
this.putNull(key)
|
||||
else
|
||||
this.putDouble(key, value)
|
||||
}
|
||||
|
||||
fun WritableMap.putBoolean(key: String, value: Boolean?) {
|
||||
if (value == null)
|
||||
this.putNull(key)
|
||||
else
|
||||
this.putBoolean(key, value)
|
||||
if (value == null)
|
||||
this.putNull(key)
|
||||
else
|
||||
this.putBoolean(key, value)
|
||||
}
|
||||
|
@ -1,27 +1,27 @@
|
||||
package com.mrousavy.camera.utils
|
||||
|
||||
import com.facebook.react.bridge.Promise
|
||||
import com.mrousavy.camera.CameraError
|
||||
import com.mrousavy.camera.UnknownCameraError
|
||||
import com.facebook.react.bridge.Promise
|
||||
|
||||
inline fun withPromise(promise: Promise, closure: () -> Any?) {
|
||||
try {
|
||||
val result = closure()
|
||||
promise.resolve(result)
|
||||
} catch (e: Throwable) {
|
||||
e.printStackTrace()
|
||||
val error = if (e is CameraError) e else UnknownCameraError(e)
|
||||
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
|
||||
}
|
||||
try {
|
||||
val result = closure()
|
||||
promise.resolve(result)
|
||||
} catch (e: Throwable) {
|
||||
e.printStackTrace()
|
||||
val error = if (e is CameraError) e else UnknownCameraError(e)
|
||||
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun withSuspendablePromise(promise: Promise, closure: () -> Any?) {
|
||||
try {
|
||||
val result = closure()
|
||||
promise.resolve(result)
|
||||
} catch (e: Throwable) {
|
||||
e.printStackTrace()
|
||||
val error = if (e is CameraError) e else UnknownCameraError(e)
|
||||
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
|
||||
}
|
||||
try {
|
||||
val result = closure()
|
||||
promise.resolve(result)
|
||||
} catch (e: Throwable) {
|
||||
e.printStackTrace()
|
||||
val error = if (e is CameraError) e else UnknownCameraError(e)
|
||||
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user