fix: Mirror selfies on Android (#129)

* Flip Image (slow approach)

* Measure time

* REACT_CLASS -> TAG

* d -> i

* remove TODO
This commit is contained in:
Marc Rousavy 2021-05-03 19:14:19 +02:00 committed by GitHub
parent ae1dde1993
commit f57714747a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 55 additions and 30 deletions

View File

@ -13,13 +13,12 @@ import com.facebook.react.bridge.WritableMap
import com.mrousavy.camera.utils.* import com.mrousavy.camera.utils.*
import kotlinx.coroutines.* import kotlinx.coroutines.*
import java.io.File import java.io.File
import kotlin.system.measureTimeMillis
private const val TAG = "CameraView.performance"
@SuppressLint("UnsafeOptInUsageError") @SuppressLint("UnsafeOptInUsageError")
suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope { suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope {
val startFunc = System.nanoTime() val startFunc = System.nanoTime()
Log.d(CameraView.REACT_CLASS, "takePhoto() called") Log.d(CameraView.TAG, "takePhoto() called")
val imageCapture = imageCapture ?: throw CameraNotReadyError() val imageCapture = imageCapture ?: throw CameraNotReadyError()
if (options.hasKey("flash")) { if (options.hasKey("flash")) {
@ -57,19 +56,18 @@ suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineS
val camera2Info = Camera2CameraInfo.from(camera!!.cameraInfo) val camera2Info = Camera2CameraInfo.from(camera!!.cameraInfo)
val lensFacing = camera2Info.getCameraCharacteristic(CameraCharacteristics.LENS_FACING) val lensFacing = camera2Info.getCameraCharacteristic(CameraCharacteristics.LENS_FACING)
// TODO: Flip image if lens is front side - see https://github.com/cuvent/react-native-vision-camera/issues/74
val results = awaitAll( val results = awaitAll(
async(coroutineContext) { async(coroutineContext) {
Log.d(CameraView.REACT_CLASS, "Taking picture...") Log.d(CameraView.TAG, "Taking picture...")
val startCapture = System.nanoTime() val startCapture = System.nanoTime()
val pic = imageCapture.takePicture(takePhotoExecutor) val pic = imageCapture.takePicture(takePhotoExecutor)
val endCapture = System.nanoTime() val endCapture = System.nanoTime()
Log.d(TAG, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms") Log.i(CameraView.TAG_PERF, "Finished image capture in ${(endCapture - startCapture) / 1_000_000}ms")
pic pic
}, },
async(Dispatchers.IO) { async(Dispatchers.IO) {
Log.d(CameraView.REACT_CLASS, "Creating temp file...") Log.d(CameraView.TAG, "Creating temp file...")
File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() } File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
} }
) )
@ -79,11 +77,12 @@ suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineS
val exif: ExifInterface? val exif: ExifInterface?
@Suppress("BlockingMethodInNonBlockingContext") @Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) { withContext(Dispatchers.IO) {
Log.d(CameraView.REACT_CLASS, "Saving picture to ${file.absolutePath}...") Log.d(CameraView.TAG, "Saving picture to ${file.absolutePath}...")
val startSave = System.nanoTime() val milliseconds = measureTimeMillis {
photo.save(file, lensFacing == CameraCharacteristics.LENS_FACING_FRONT) val flipHorizontally = lensFacing == CameraCharacteristics.LENS_FACING_FRONT
val endSave = System.nanoTime() photo.save(file, flipHorizontally)
Log.d(TAG, "Finished image saving in ${(endSave - startSave) / 1_000_000}ms") }
Log.i(CameraView.TAG_PERF, "Finished image saving in ${milliseconds}ms")
// TODO: Read Exif from existing in-memory photo buffer instead of file? // TODO: Read Exif from existing in-memory photo buffer instead of file?
exif = if (skipMetadata) null else ExifInterface(file) exif = if (skipMetadata) null else ExifInterface(file)
} }
@ -99,9 +98,9 @@ suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineS
photo.close() photo.close()
Log.d(CameraView.REACT_CLASS, "Finished taking photo!") Log.d(CameraView.TAG, "Finished taking photo!")
val endFunc = System.nanoTime() val endFunc = System.nanoTime()
Log.d(TAG, "Finished function execution in ${(endFunc - startFunc) / 1_000_000}ms") Log.i(CameraView.TAG_PERF, "Finished function execution in ${(endFunc - startFunc) / 1_000_000}ms")
return@coroutineScope map return@coroutineScope map
} }

View File

@ -161,7 +161,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
// Host Lifecycle (Activity) is currently inactive (STARTED or DESTROYED), so that overrules our view's lifecycle // Host Lifecycle (Activity) is currently inactive (STARTED or DESTROYED), so that overrules our view's lifecycle
lifecycleRegistry.currentState = hostLifecycleState lifecycleRegistry.currentState = hostLifecycleState
} }
Log.d(REACT_CLASS, "Lifecycle went from ${lifecycleBefore.name} -> ${lifecycleRegistry.currentState.name} (isActive: $isActive | isAttachedToWindow: $isAttachedToWindow)") Log.d(TAG, "Lifecycle went from ${lifecycleBefore.name} -> ${lifecycleRegistry.currentState.name} (isActive: $isActive | isAttachedToWindow: $isAttachedToWindow)")
} }
override fun onAttachedToWindow() { override fun onAttachedToWindow() {
@ -216,7 +216,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
private suspend fun configureSession() { private suspend fun configureSession() {
try { try {
val startTime = System.currentTimeMillis() val startTime = System.currentTimeMillis()
Log.i(REACT_CLASS, "Configuring session...") Log.i(TAG, "Configuring session...")
if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
throw MicrophonePermissionError() throw MicrophonePermissionError()
} }
@ -227,9 +227,9 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
throw NoCameraDeviceError() throw NoCameraDeviceError()
} }
if (format != null) if (format != null)
Log.i(REACT_CLASS, "Configuring session with Camera ID $cameraId and custom format...") Log.i(TAG, "Configuring session with Camera ID $cameraId and custom format...")
else else
Log.i(REACT_CLASS, "Configuring session with Camera ID $cameraId and default format options...") Log.i(TAG, "Configuring session with Camera ID $cameraId and default format options...")
// Used to bind the lifecycle of cameras to the lifecycle owner // Used to bind the lifecycle of cameras to the lifecycle owner
val cameraProvider = ProcessCameraProvider.getInstance(reactContext).await() val cameraProvider = ProcessCameraProvider.getInstance(reactContext).await()
@ -248,7 +248,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
if (format == null) { if (format == null) {
// let CameraX automatically find best resolution for the target aspect ratio // let CameraX automatically find best resolution for the target aspect ratio
Log.i(REACT_CLASS, "No custom format has been set, CameraX will automatically determine best configuration...") Log.i(TAG, "No custom format has been set, CameraX will automatically determine best configuration...")
val aspectRatio = aspectRatio(previewView.width, previewView.height) val aspectRatio = aspectRatio(previewView.width, previewView.height)
previewBuilder.setTargetAspectRatio(aspectRatio) previewBuilder.setTargetAspectRatio(aspectRatio)
imageCaptureBuilder.setTargetAspectRatio(aspectRatio) imageCaptureBuilder.setTargetAspectRatio(aspectRatio)
@ -256,7 +256,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
} else { } else {
// User has selected a custom format={}. Use that // User has selected a custom format={}. Use that
val format = DeviceFormat(format!!) val format = DeviceFormat(format!!)
Log.i(REACT_CLASS, "Using custom format - photo: ${format.photoSize}, video: ${format.videoSize} @ $fps FPS") Log.i(TAG, "Using custom format - photo: ${format.photoSize}, video: ${format.videoSize} @ $fps FPS")
previewBuilder.setDefaultResolution(format.photoSize) previewBuilder.setDefaultResolution(format.photoSize)
imageCaptureBuilder.setDefaultResolution(format.photoSize) imageCaptureBuilder.setDefaultResolution(format.photoSize)
videoCaptureBuilder.setDefaultResolution(format.photoSize) videoCaptureBuilder.setDefaultResolution(format.photoSize)
@ -266,7 +266,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
// Camera supports the given FPS (frame rate range) // Camera supports the given FPS (frame rate range)
val frameDuration = (1.0 / fps.toDouble()).toLong() * 1_000_000_000 val frameDuration = (1.0 / fps.toDouble()).toLong() * 1_000_000_000
Log.i(REACT_CLASS, "Setting AE_TARGET_FPS_RANGE to $fps-$fps, and SENSOR_FRAME_DURATION to $frameDuration") Log.i(TAG, "Setting AE_TARGET_FPS_RANGE to $fps-$fps, and SENSOR_FRAME_DURATION to $frameDuration")
Camera2Interop.Extender(previewBuilder) Camera2Interop.Extender(previewBuilder)
.setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) .setCaptureRequestOption(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps))
.setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration) .setCaptureRequestOption(CaptureRequest.SENSOR_FRAME_DURATION, frameDuration)
@ -283,11 +283,11 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) && val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
previewExtension.isExtensionAvailable(cameraSelector) previewExtension.isExtensionAvailable(cameraSelector)
if (isExtensionAvailable) { if (isExtensionAvailable) {
Log.i(REACT_CLASS, "Enabling native HDR extension...") Log.i(TAG, "Enabling native HDR extension...")
imageExtension.enableExtension(cameraSelector) imageExtension.enableExtension(cameraSelector)
previewExtension.enableExtension(cameraSelector) previewExtension.enableExtension(cameraSelector)
} else { } else {
Log.e(REACT_CLASS, "Native HDR vendor extension not available!") Log.e(TAG, "Native HDR vendor extension not available!")
throw HdrNotContainedInFormatError() throw HdrNotContainedInFormatError()
} }
} }
@ -299,11 +299,11 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) && val isExtensionAvailable = imageExtension.isExtensionAvailable(cameraSelector) &&
previewExtension.isExtensionAvailable(cameraSelector) previewExtension.isExtensionAvailable(cameraSelector)
if (isExtensionAvailable) { if (isExtensionAvailable) {
Log.i(REACT_CLASS, "Enabling native night-mode extension...") Log.i(TAG, "Enabling native night-mode extension...")
imageExtension.enableExtension(cameraSelector) imageExtension.enableExtension(cameraSelector)
previewExtension.enableExtension(cameraSelector) previewExtension.enableExtension(cameraSelector)
} else { } else {
Log.e(REACT_CLASS, "Native night-mode vendor extension not available!") Log.e(TAG, "Native night-mode vendor extension not available!")
throw LowLightBoostNotContainedInFormatError() throw LowLightBoostNotContainedInFormatError()
} }
} }
@ -325,7 +325,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
maxZoom = camera!!.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f maxZoom = camera!!.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f
val duration = System.currentTimeMillis() - startTime val duration = System.currentTimeMillis() - startTime
Log.i(REACT_CLASS, "Session configured in $duration ms! Camera: ${camera!!}") Log.i(TAG_PERF, "Session configured in $duration ms! Camera: ${camera!!}")
invokeOnInitialized() invokeOnInitialized()
} catch (exc: Throwable) { } catch (exc: Throwable) {
throw when (exc) { throw when (exc) {
@ -348,7 +348,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) { override fun onLayout(changed: Boolean, left: Int, top: Int, right: Int, bottom: Int) {
super.onLayout(changed, left, top, right, bottom) super.onLayout(changed, left, top, right, bottom)
Log.i(REACT_CLASS, "onLayout($changed, $left, $top, $right, $bottom) was called! (Width: $width, Height: $height)") Log.i(TAG, "onLayout($changed, $left, $top, $right, $bottom) was called! (Width: $width, Height: $height)")
} }
private fun invokeOnInitialized() { private fun invokeOnInitialized() {
@ -378,7 +378,8 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
} }
companion object { companion object {
const val REACT_CLASS = "CameraView" const val TAG = "CameraView"
const val TAG_PERF = "CameraView.performance"
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost") private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost")

View File

@ -1,12 +1,19 @@
package com.mrousavy.camera.utils package com.mrousavy.camera.utils
import android.annotation.SuppressLint import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.ImageFormat import android.graphics.ImageFormat
import android.graphics.Matrix
import android.util.Log
import androidx.camera.core.ImageProxy import androidx.camera.core.ImageProxy
import com.mrousavy.camera.CameraView
import com.mrousavy.camera.InvalidFormatError import com.mrousavy.camera.InvalidFormatError
import java.io.ByteArrayOutputStream
import java.io.File import java.io.File
import java.io.FileOutputStream import java.io.FileOutputStream
import java.nio.ByteBuffer import java.nio.ByteBuffer
import kotlin.system.measureTimeMillis
// TODO: Fix this flip() function (this outputs a black image) // TODO: Fix this flip() function (this outputs a black image)
fun flip(imageBytes: ByteArray, imageWidth: Int): ByteArray { fun flip(imageBytes: ByteArray, imageWidth: Int): ByteArray {
@ -33,17 +40,35 @@ fun flip(imageBytes: ByteArray, imageWidth: Int): ByteArray {
return holder + subArray return holder + subArray
} }
// TODO: This function is slow. Figure out a faster way to flip images, preferably via directly manipulating the byte[] Exif flags
fun flipImage(imageBytes: ByteArray): ByteArray {
val bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.size)
val matrix = Matrix()
matrix.preScale(-1f, 1f)
val newBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
val stream = ByteArrayOutputStream()
newBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream)
return stream.toByteArray()
}
fun ImageProxy.save(file: File, flipHorizontally: Boolean) { fun ImageProxy.save(file: File, flipHorizontally: Boolean) {
when (format) { when (format) {
// TODO: ImageFormat.RAW_SENSOR // TODO: ImageFormat.RAW_SENSOR
// TODO: ImageFormat.DEPTH_JPEG // TODO: ImageFormat.DEPTH_JPEG
ImageFormat.JPEG -> { ImageFormat.JPEG -> {
val buffer = planes[0].buffer val buffer = planes[0].buffer
val bytes = ByteArray(buffer.remaining()) var bytes = ByteArray(buffer.remaining())
// copy image from buffer to byte array // copy image from buffer to byte array
buffer.get(bytes) buffer.get(bytes)
if (flipHorizontally) {
val milliseconds = measureTimeMillis {
bytes = flipImage(bytes)
}
Log.i(CameraView.TAG_PERF, "Flipping Image took $milliseconds ms.")
}
val output = FileOutputStream(file) val output = FileOutputStream(file)
output.write(bytes) output.write(bytes)
output.close() output.close()