feat: Implement focus()
on Android (#2523)
* feat: Implement `focus()` on Android * Throw if not supported * Do focus in example * Format * fix: Properly convert layer point to camera coordinates * Fix * Set trigger back to IDLE * Fix rotation maybe? * Rotated by * fix: Fix display point calculation * Try other * Invoke `capture` callback on same thread * Center metering rectangle * Reset AF Trigger to IDLE * Reset it to it's default AF mode again, i dont even know anymore * Update CameraPage.tsx * Format * Apply options to repeating * Set * Use scene mode * Update CameraPage.tsx * Update CameraDeviceDetails.kt * It fucking works * Update PersistentCameraCaptureSession.kt * Update PersistentCameraCaptureSession.kt * Update PersistentCameraCaptureSession.kt * Create CameraCaptureSession+setRepeatingRequestAndWait.kt * Oh my god it works * Also focus AE * Cancel reset request * Rename to AF * Format * Update PersistentCameraCaptureSession.kt
This commit is contained in:
parent
fce6616964
commit
fb1d82ad9a
@ -44,6 +44,8 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, val cameraId
|
||||
characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f)
|
||||
}
|
||||
val sensorSize by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!! }
|
||||
val activeSize
|
||||
get() = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!!
|
||||
val sensorOrientation by lazy { characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0 }
|
||||
val minFocusDistance by lazy { getMinFocusDistanceCm() }
|
||||
val name by lazy {
|
||||
@ -91,6 +93,14 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, val cameraId
|
||||
val isBackwardsCompatible by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) }
|
||||
val supportsSnapshotCapture by lazy { supportsSnapshotCapture() }
|
||||
|
||||
val supportsTapToFocus by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) ?: 0) > 0 }
|
||||
val supportsTapToExposure by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0) > 0 }
|
||||
val supportsTapToWhiteBalance by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB) ?: 0) > 0 }
|
||||
|
||||
val afModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES)?.toList() ?: emptyList() }
|
||||
val aeModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES)?.toList() ?: emptyList() }
|
||||
val awbModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES)?.toList() ?: emptyList() }
|
||||
|
||||
// TODO: Also add 10-bit YUV here?
|
||||
val videoFormat = ImageFormat.YUV_420_888
|
||||
|
||||
@ -244,7 +254,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, val cameraId
|
||||
map.putBoolean("isMultiCam", isMultiCam)
|
||||
map.putBoolean("supportsRawCapture", supportsRawCapture)
|
||||
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
|
||||
map.putBoolean("supportsFocus", true) // I believe every device here supports focussing
|
||||
map.putBoolean("supportsFocus", supportsTapToFocus)
|
||||
map.putDouble("minZoom", minZoom)
|
||||
map.putDouble("maxZoom", maxZoom)
|
||||
map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android
|
||||
|
@ -62,6 +62,8 @@ class FlashUnavailableError :
|
||||
"flash-unavailable",
|
||||
"The Camera Device does not have a flash unit! Make sure you select a device where `device.hasFlash`/`device.hasTorch` is true."
|
||||
)
|
||||
class FocusNotSupportedError :
|
||||
CameraError("device", "focus-not-supported", "The currently selected camera device does not support focusing!")
|
||||
|
||||
class CameraNotReadyError :
|
||||
CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
||||
|
@ -468,9 +468,13 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
||||
callback.onError(error)
|
||||
}
|
||||
|
||||
suspend fun focus(x: Int, y: Int): Unit = throw NotImplementedError("focus() is not yet implemented!")
|
||||
suspend fun focus(x: Int, y: Int) {
|
||||
val previewView = previewView ?: throw CameraNotReadyError()
|
||||
val deviceDetails = captureSession.getActiveDeviceDetails() ?: throw CameraNotReadyError()
|
||||
|
||||
private suspend fun focus(point: Point): Unit = throw NotImplementedError()
|
||||
val cameraPoint = previewView.convertLayerPointToCameraCoordinates(Point(x, y), deviceDetails)
|
||||
captureSession.focus(cameraPoint)
|
||||
}
|
||||
|
||||
data class CapturedPhoto(
|
||||
val image: Image,
|
||||
|
@ -1,11 +1,15 @@
|
||||
package com.mrousavy.camera.core
|
||||
|
||||
import android.graphics.Point
|
||||
import android.hardware.camera2.CameraAccessException
|
||||
import android.hardware.camera2.CameraCaptureSession
|
||||
import android.hardware.camera2.CameraDevice
|
||||
import android.hardware.camera2.CameraManager
|
||||
import android.hardware.camera2.CaptureRequest
|
||||
import android.hardware.camera2.TotalCaptureResult
|
||||
import android.hardware.camera2.params.MeteringRectangle
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import com.mrousavy.camera.core.capture.PhotoCaptureRequest
|
||||
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
|
||||
import com.mrousavy.camera.core.outputs.SurfaceOutput
|
||||
@ -13,11 +17,18 @@ import com.mrousavy.camera.extensions.capture
|
||||
import com.mrousavy.camera.extensions.createCaptureSession
|
||||
import com.mrousavy.camera.extensions.isValid
|
||||
import com.mrousavy.camera.extensions.openCamera
|
||||
import com.mrousavy.camera.extensions.setRepeatingRequestAndWaitForAF
|
||||
import com.mrousavy.camera.extensions.tryAbortCaptures
|
||||
import com.mrousavy.camera.types.Flash
|
||||
import com.mrousavy.camera.types.Orientation
|
||||
import com.mrousavy.camera.types.QualityPrioritization
|
||||
import java.io.Closeable
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.cancelAndJoin
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.isActive
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.sync.Mutex
|
||||
import kotlinx.coroutines.sync.withLock
|
||||
|
||||
@ -29,6 +40,8 @@ import kotlinx.coroutines.sync.withLock
|
||||
class PersistentCameraCaptureSession(private val cameraManager: CameraManager, private val callback: Callback) : Closeable {
|
||||
companion object {
|
||||
private const val TAG = "PersistentCameraCaptureSession"
|
||||
private val DEFAULT_METERING_SIZE = Size(100, 100)
|
||||
private const val FOCUS_RESET_TIMEOUT = 3000L
|
||||
}
|
||||
|
||||
// Inputs/Dependencies
|
||||
@ -44,6 +57,8 @@ class PersistentCameraCaptureSession(private val cameraManager: CameraManager, p
|
||||
|
||||
private val mutex = Mutex()
|
||||
private var didDestroyFromOutside = false
|
||||
private var focusResetJob: Job? = null
|
||||
private val coroutineScope = CoroutineScope(CameraQueues.cameraQueue.coroutineDispatcher)
|
||||
|
||||
val isRunning: Boolean
|
||||
get() = isActive && session != null && device != null && !didDestroyFromOutside
|
||||
@ -128,6 +143,7 @@ class PersistentCameraCaptureSession(private val cameraManager: CameraManager, p
|
||||
enableShutterSound: Boolean
|
||||
): TotalCaptureResult {
|
||||
mutex.withLock {
|
||||
Log.i(TAG, "Capturing photo...")
|
||||
val session = session ?: throw CameraNotReadyError()
|
||||
val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError()
|
||||
val photoRequest = PhotoCaptureRequest(
|
||||
@ -149,6 +165,88 @@ class PersistentCameraCaptureSession(private val cameraManager: CameraManager, p
|
||||
}
|
||||
}
|
||||
|
||||
suspend fun focus(point: Point) {
|
||||
mutex.withLock {
|
||||
Log.i(TAG, "Focusing to $point...")
|
||||
val session = session ?: throw CameraNotReadyError()
|
||||
val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError()
|
||||
val device = session.device
|
||||
val deviceDetails = getOrCreateCameraDeviceDetails(device)
|
||||
if (!deviceDetails.supportsTapToFocus) {
|
||||
throw FocusNotSupportedError()
|
||||
}
|
||||
val outputs = outputs.filter { it.isRepeating }
|
||||
val meteringRectangle = MeteringRectangle(point, DEFAULT_METERING_SIZE, MeteringRectangle.METERING_WEIGHT_MAX - 1)
|
||||
|
||||
// 0. Cancel the 3 second focus reset task
|
||||
focusResetJob?.cancelAndJoin()
|
||||
focusResetJob = null
|
||||
|
||||
// 1. Cancel any ongoing AF/AE/AWB request
|
||||
repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request ->
|
||||
if (deviceDetails.supportsTapToFocus) {
|
||||
request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL)
|
||||
}
|
||||
if (deviceDetails.supportsTapToExposure) {
|
||||
request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL)
|
||||
}
|
||||
session.capture(request.build(), null, null)
|
||||
}
|
||||
|
||||
// 2. After previous AF/AE/AWB requests have been canceled, start a new AF/AE/AWB request
|
||||
repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request ->
|
||||
request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO)
|
||||
if (deviceDetails.supportsTapToFocus) {
|
||||
request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO)
|
||||
request.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(meteringRectangle))
|
||||
request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START)
|
||||
}
|
||||
if (deviceDetails.supportsTapToExposure) {
|
||||
request.set(CaptureRequest.CONTROL_AE_REGIONS, arrayOf(meteringRectangle))
|
||||
request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START)
|
||||
}
|
||||
if (deviceDetails.supportsTapToWhiteBalance) {
|
||||
request.set(CaptureRequest.CONTROL_AWB_REGIONS, arrayOf(meteringRectangle))
|
||||
}
|
||||
session.capture(request.build(), null, null)
|
||||
|
||||
// 3. Start a repeating request without the trigger and wait until AF/AE/AWB locks
|
||||
request.set(CaptureRequest.CONTROL_AF_TRIGGER, null)
|
||||
request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, null)
|
||||
session.setRepeatingRequestAndWaitForAF(request.build())
|
||||
}
|
||||
|
||||
// 4. After the Camera has successfully found the AF/AE/AWB lock-point, we set it to idle and keep the point metered
|
||||
repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request ->
|
||||
request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO)
|
||||
if (deviceDetails.supportsTapToFocus) {
|
||||
request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO)
|
||||
request.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(meteringRectangle))
|
||||
request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE)
|
||||
}
|
||||
if (deviceDetails.supportsTapToExposure) {
|
||||
request.set(CaptureRequest.CONTROL_AE_REGIONS, arrayOf(meteringRectangle))
|
||||
request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
|
||||
}
|
||||
session.setRepeatingRequest(request.build(), null, null)
|
||||
}
|
||||
|
||||
// 5. Wait 3 seconds
|
||||
focusResetJob = coroutineScope.launch {
|
||||
delay(FOCUS_RESET_TIMEOUT)
|
||||
if (!this.isActive) {
|
||||
// this job got canceled from the outside
|
||||
return@launch
|
||||
}
|
||||
Log.i(TAG, "Resetting focus to auto-focus...")
|
||||
// 6. Reset AF/AE/AWB to continuous auto-focus again, which is the default here.
|
||||
repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request ->
|
||||
session.setRepeatingRequest(request.build(), null, null)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun getActiveDeviceDetails(): CameraDeviceDetails? {
|
||||
val device = device ?: return null
|
||||
return getOrCreateCameraDeviceDetails(device)
|
||||
|
@ -2,6 +2,7 @@ package com.mrousavy.camera.core
|
||||
|
||||
import android.annotation.SuppressLint
|
||||
import android.content.Context
|
||||
import android.graphics.Point
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.Gravity
|
||||
@ -11,6 +12,7 @@ import android.widget.FrameLayout
|
||||
import com.facebook.react.bridge.UiThreadUtil
|
||||
import com.mrousavy.camera.extensions.getMaximumPreviewSize
|
||||
import com.mrousavy.camera.extensions.resize
|
||||
import com.mrousavy.camera.types.Orientation
|
||||
import com.mrousavy.camera.types.ResizeMode
|
||||
import kotlin.math.roundToInt
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
@ -51,6 +53,24 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceV
|
||||
}
|
||||
}
|
||||
|
||||
private val viewSize: Size
|
||||
get() {
|
||||
val displayMetrics = context.resources.displayMetrics
|
||||
val dpX = width / displayMetrics.density
|
||||
val dpY = height / displayMetrics.density
|
||||
return Size(dpX.toInt(), dpY.toInt())
|
||||
}
|
||||
|
||||
fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point {
|
||||
val sensorOrientation = Orientation.fromRotationDegrees(cameraDeviceDetails.sensorOrientation)
|
||||
val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height())
|
||||
val viewOrientation = Orientation.PORTRAIT
|
||||
|
||||
val rotated = Orientation.rotatePoint(point, viewSize, cameraSize, viewOrientation, sensorOrientation)
|
||||
Log.i(TAG, "$point -> $sensorOrientation (in $cameraSize -> $viewSize) -> $rotated")
|
||||
return rotated
|
||||
}
|
||||
|
||||
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
|
||||
// TODO: Take sensor orientation into account here
|
||||
val contentAspectRatio = contentSize.height.toDouble() / contentSize.width
|
||||
|
@ -61,9 +61,11 @@ abstract class CameraCaptureRequest(
|
||||
if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr")
|
||||
if (!format.supportsVideoHdr) throw InvalidVideoHdrError()
|
||||
builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR)
|
||||
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE)
|
||||
} else if (enableLowLightBoost) {
|
||||
if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError()
|
||||
builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT)
|
||||
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE)
|
||||
}
|
||||
|
||||
// Set Exposure Bias
|
||||
|
@ -14,7 +14,7 @@ import com.mrousavy.camera.types.Torch
|
||||
import com.mrousavy.camera.types.VideoStabilizationMode
|
||||
|
||||
class RepeatingCaptureRequest(
|
||||
private val enableVideoPipeline: Boolean,
|
||||
val enableVideoPipeline: Boolean,
|
||||
torch: Torch = Torch.OFF,
|
||||
private val fps: Int? = null,
|
||||
private val videoStabilizationMode: VideoStabilizationMode = VideoStabilizationMode.OFF,
|
||||
@ -48,6 +48,32 @@ class RepeatingCaptureRequest(
|
||||
): CaptureRequest.Builder {
|
||||
val builder = super.createCaptureRequest(template, device, deviceDetails, outputs)
|
||||
|
||||
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO)
|
||||
|
||||
// Set AF
|
||||
if (enableVideoPipeline && deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
|
||||
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)
|
||||
} else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
|
||||
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE)
|
||||
} else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_AUTO)) {
|
||||
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO)
|
||||
} else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_OFF)) {
|
||||
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF)
|
||||
builder.set(CaptureRequest.LENS_FOCUS_DISTANCE, 0f)
|
||||
}
|
||||
|
||||
// Set AE
|
||||
if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_ON)) {
|
||||
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
|
||||
} else if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_OFF)) {
|
||||
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF)
|
||||
}
|
||||
|
||||
// Set AWB
|
||||
if (deviceDetails.awbModes.contains(CameraCharacteristics.CONTROL_AWB_MODE_AUTO)) {
|
||||
builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO)
|
||||
}
|
||||
|
||||
// Set FPS
|
||||
if (fps != null) {
|
||||
if (format == null) throw PropRequiresFormatToBeNonNullError("fps")
|
||||
|
@ -5,7 +5,6 @@ import android.hardware.camera2.CaptureFailure
|
||||
import android.hardware.camera2.CaptureRequest
|
||||
import android.hardware.camera2.TotalCaptureResult
|
||||
import android.media.MediaActionSound
|
||||
import com.mrousavy.camera.core.CameraQueues
|
||||
import com.mrousavy.camera.core.CaptureAbortedError
|
||||
import com.mrousavy.camera.core.UnknownCaptureError
|
||||
import kotlin.coroutines.resume
|
||||
@ -23,20 +22,26 @@ suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableS
|
||||
override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) {
|
||||
super.onCaptureCompleted(session, request, result)
|
||||
|
||||
if (request == captureRequest) {
|
||||
continuation.resume(result)
|
||||
shutterSound?.release()
|
||||
}
|
||||
}
|
||||
|
||||
override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) {
|
||||
super.onCaptureStarted(session, request, timestamp, frameNumber)
|
||||
|
||||
if (request == captureRequest) {
|
||||
if (enableShutterSound) {
|
||||
shutterSound?.play(MediaActionSound.SHUTTER_CLICK)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) {
|
||||
super.onCaptureFailed(session, request, failure)
|
||||
|
||||
if (request == captureRequest) {
|
||||
val wasImageCaptured = failure.wasImageCaptured()
|
||||
val error = when (failure.reason) {
|
||||
CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured)
|
||||
@ -45,7 +50,8 @@ suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableS
|
||||
}
|
||||
continuation.resumeWithException(error)
|
||||
}
|
||||
}
|
||||
},
|
||||
CameraQueues.cameraQueue.handler
|
||||
null
|
||||
)
|
||||
}
|
||||
|
@ -0,0 +1,47 @@
|
||||
package com.mrousavy.camera.extensions
|
||||
|
||||
import android.hardware.camera2.CameraCaptureSession
|
||||
import android.hardware.camera2.CaptureFailure
|
||||
import android.hardware.camera2.CaptureRequest
|
||||
import android.hardware.camera2.CaptureResult
|
||||
import android.hardware.camera2.TotalCaptureResult
|
||||
import android.util.Log
|
||||
import com.mrousavy.camera.core.CaptureAbortedError
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlinx.coroutines.suspendCancellableCoroutine
|
||||
|
||||
private const val TAG = "CameraCaptureSession"
|
||||
|
||||
/**
|
||||
* Set a new repeating request for the [CameraCaptureSession] that contains an AF trigger, and wait until AF has locked.
|
||||
*/
|
||||
suspend fun CameraCaptureSession.setRepeatingRequestAndWaitForAF(request: CaptureRequest) =
|
||||
suspendCancellableCoroutine { continuation ->
|
||||
this.setRepeatingRequest(
|
||||
request,
|
||||
object : CameraCaptureSession.CaptureCallback() {
|
||||
override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) {
|
||||
super.onCaptureCompleted(session, request, result)
|
||||
|
||||
if (continuation.isActive) {
|
||||
val afState = result.get(CaptureResult.CONTROL_AF_STATE)
|
||||
Log.i(TAG, "AF State: $afState")
|
||||
if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
|
||||
continuation.resume(Unit)
|
||||
session.setRepeatingRequest(request, null, null)
|
||||
}
|
||||
}
|
||||
}
|
||||
override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) {
|
||||
super.onCaptureFailed(session, request, failure)
|
||||
|
||||
if (continuation.isActive) {
|
||||
continuation.resumeWithException(CaptureAbortedError(failure.wasImageCaptured()))
|
||||
session.setRepeatingRequest(request, null, null)
|
||||
}
|
||||
}
|
||||
},
|
||||
null
|
||||
)
|
||||
}
|
@ -2,7 +2,7 @@ package com.mrousavy.camera.extensions
|
||||
|
||||
import android.util.Size
|
||||
import android.util.SizeF
|
||||
import android.view.Surface
|
||||
import com.mrousavy.camera.types.Orientation
|
||||
import kotlin.math.abs
|
||||
import kotlin.math.max
|
||||
import kotlin.math.min
|
||||
@ -14,13 +14,10 @@ fun List<Size>.closestToOrMax(size: Size?): Size =
|
||||
this.maxBy { it.width * it.height }
|
||||
}
|
||||
|
||||
fun Size.rotated(surfaceRotation: Int): Size =
|
||||
when (surfaceRotation) {
|
||||
Surface.ROTATION_0 -> Size(width, height)
|
||||
Surface.ROTATION_90 -> Size(height, width)
|
||||
Surface.ROTATION_180 -> Size(width, height)
|
||||
Surface.ROTATION_270 -> Size(height, width)
|
||||
else -> Size(width, height)
|
||||
fun Size.rotatedBy(orientation: Orientation): Size =
|
||||
when (orientation) {
|
||||
Orientation.PORTRAIT, Orientation.PORTRAIT_UPSIDE_DOWN -> this
|
||||
Orientation.LANDSCAPE_LEFT, Orientation.LANDSCAPE_RIGHT -> Size(height, width)
|
||||
}
|
||||
|
||||
val Size.bigger: Int
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.mrousavy.camera.extensions
|
||||
|
||||
import android.util.Log
|
||||
import android.view.SurfaceHolder
|
||||
import androidx.annotation.UiThread
|
||||
import kotlin.coroutines.resume
|
||||
@ -15,14 +16,18 @@ suspend fun SurfaceHolder.resize(width: Int, height: Int) {
|
||||
return@suspendCancellableCoroutine
|
||||
}
|
||||
|
||||
Log.i("SurfaceHolder", "Resizing SurfaceHolder to $width x $height...")
|
||||
|
||||
val callback = object : SurfaceHolder.Callback {
|
||||
override fun surfaceCreated(holder: SurfaceHolder) = Unit
|
||||
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
|
||||
holder.removeCallback(this)
|
||||
Log.i("SurfaceHolder", "Resized SurfaceHolder to $width x $height!")
|
||||
continuation.resume(Unit)
|
||||
}
|
||||
override fun surfaceDestroyed(holder: SurfaceHolder) {
|
||||
holder.removeCallback(this)
|
||||
Log.e("SurfaceHolder", "Failed to resize SurfaceHolder to $width x $height!")
|
||||
continuation.cancel(Error("Tried to resize SurfaceView, but Surface has been destroyed!"))
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,9 @@
|
||||
package com.mrousavy.camera.types
|
||||
|
||||
import android.graphics.Point
|
||||
import android.graphics.PointF
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import com.mrousavy.camera.core.CameraDeviceDetails
|
||||
|
||||
enum class Orientation(override val unionValue: String) : JSUnionValue {
|
||||
@ -11,9 +15,9 @@ enum class Orientation(override val unionValue: String) : JSUnionValue {
|
||||
fun toDegrees(): Int =
|
||||
when (this) {
|
||||
PORTRAIT -> 0
|
||||
LANDSCAPE_RIGHT -> 90
|
||||
LANDSCAPE_LEFT -> 90
|
||||
PORTRAIT_UPSIDE_DOWN -> 180
|
||||
LANDSCAPE_LEFT -> 270
|
||||
LANDSCAPE_RIGHT -> 270
|
||||
}
|
||||
|
||||
fun toSensorRelativeOrientation(deviceDetails: CameraDeviceDetails): Orientation {
|
||||
@ -43,10 +47,34 @@ enum class Orientation(override val unionValue: String) : JSUnionValue {
|
||||
|
||||
fun fromRotationDegrees(rotationDegrees: Int): Orientation =
|
||||
when (rotationDegrees) {
|
||||
in 45..135 -> LANDSCAPE_RIGHT
|
||||
in 45..135 -> LANDSCAPE_LEFT
|
||||
in 135..225 -> PORTRAIT_UPSIDE_DOWN
|
||||
in 225..315 -> LANDSCAPE_LEFT
|
||||
in 225..315 -> LANDSCAPE_RIGHT
|
||||
else -> PORTRAIT
|
||||
}
|
||||
|
||||
fun rotatePoint(
|
||||
point: Point,
|
||||
fromSize: Size,
|
||||
toSize: Size,
|
||||
fromOrientation: Orientation,
|
||||
toOrientation: Orientation
|
||||
): Point {
|
||||
val differenceDegrees = (fromOrientation.toDegrees() + toOrientation.toDegrees()) % 360
|
||||
val difference = Orientation.fromRotationDegrees(differenceDegrees)
|
||||
val normalizedPoint = PointF(point.x / fromSize.width.toFloat(), point.y / fromSize.height.toFloat())
|
||||
|
||||
val rotatedNormalizedPoint = when (difference) {
|
||||
PORTRAIT -> normalizedPoint
|
||||
PORTRAIT_UPSIDE_DOWN -> PointF(1 - normalizedPoint.x, 1 - normalizedPoint.y)
|
||||
LANDSCAPE_LEFT -> PointF(normalizedPoint.y, 1 - normalizedPoint.x)
|
||||
LANDSCAPE_RIGHT -> PointF(1 - normalizedPoint.y, normalizedPoint.x)
|
||||
}
|
||||
|
||||
val rotatedX = rotatedNormalizedPoint.x * toSize.width
|
||||
val rotatedY = rotatedNormalizedPoint.y * toSize.height
|
||||
Log.i("ROTATE", "$point -> $normalizedPoint -> $difference -> $rotatedX, $rotatedY")
|
||||
return Point(rotatedX.toInt(), rotatedY.toInt())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import * as React from 'react'
|
||||
import { useRef, useState, useCallback, useMemo } from 'react'
|
||||
import { StyleSheet, Text, View } from 'react-native'
|
||||
import { GestureResponderEvent, StyleSheet, Text, View } from 'react-native'
|
||||
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler'
|
||||
import {
|
||||
CameraProps,
|
||||
@ -125,6 +125,16 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
//#endregion
|
||||
|
||||
//#region Tap Gesture
|
||||
const onFocusTap = useCallback(
|
||||
({ nativeEvent: event }: GestureResponderEvent) => {
|
||||
if (!device?.supportsFocus) return
|
||||
camera.current?.focus({
|
||||
x: event.locationX,
|
||||
y: event.locationY,
|
||||
})
|
||||
},
|
||||
[device?.supportsFocus],
|
||||
)
|
||||
const onDoubleTap = useCallback(() => {
|
||||
onFlipCameraPressed()
|
||||
}, [onFlipCameraPressed])
|
||||
@ -173,7 +183,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
<View style={styles.container}>
|
||||
{device != null && (
|
||||
<PinchGestureHandler onGestureEvent={onPinchGesture} enabled={isActive}>
|
||||
<Reanimated.View style={StyleSheet.absoluteFill}>
|
||||
<Reanimated.View onTouchEnd={onFocusTap} style={StyleSheet.absoluteFill}>
|
||||
<TapGestureHandler onEnded={onDoubleTap} numberOfTaps={2}>
|
||||
<ReanimatedCamera
|
||||
style={StyleSheet.absoluteFill}
|
||||
@ -186,8 +196,8 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
onStopped={() => 'Camera stopped!'}
|
||||
format={format}
|
||||
fps={fps}
|
||||
photoHdr={enableHdr}
|
||||
videoHdr={enableHdr}
|
||||
photoHdr={format?.supportsPhotoHdr && enableHdr}
|
||||
videoHdr={format?.supportsVideoHdr && enableHdr}
|
||||
lowLightBoost={device.supportsLowLightBoost && enableNightMode}
|
||||
enableZoomGesture={false}
|
||||
animatedProps={cameraAnimatedProps}
|
||||
|
@ -93,7 +93,7 @@ enum DeviceError: String {
|
||||
case .lowLightBoostNotSupported:
|
||||
return "The currently selected camera device does not support low-light boost! Select a device where `device.supportsLowLightBoost` is true."
|
||||
case .focusNotSupported:
|
||||
return "The currently selected camera device does not support focussing!"
|
||||
return "The currently selected camera device does not support focusing!"
|
||||
case .microphoneUnavailable:
|
||||
return "The microphone was unavailable."
|
||||
case .notAvailableOnSimulator:
|
||||
|
Loading…
Reference in New Issue
Block a user