perf: Improve pixelFormat
and add CameraDevice.sensorOrientation
(#1729)
* feat: Orientation * fix orientation value in manifest * Update AndroidManifest.xml * Style * fix: Set MAX_IMAGES to 3 * Pass `isMirrored` to `VideoPipeline` * Update docs about Skia FPs * Options * Add iPad target * Remove UIDevice onOrientationChanged listener * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift * Get available pixelFormats on iOS * format * Update CameraSession.kt * Expose `CameraDevice.sensorOrientation` * Lock orientation again
This commit is contained in:
@@ -17,6 +17,7 @@ import com.mrousavy.camera.extensions.getVideoSizes
|
||||
import com.mrousavy.camera.parsers.PixelFormat
|
||||
import com.mrousavy.camera.parsers.HardwareLevel
|
||||
import com.mrousavy.camera.parsers.LensFacing
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
import kotlin.math.PI
|
||||
import kotlin.math.atan
|
||||
@@ -36,6 +37,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
|
||||
private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false
|
||||
private val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f /* 35mm default */)
|
||||
private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
|
||||
private val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
|
||||
private val name = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION)
|
||||
else null) ?: "$lensFacing (${cameraId})"
|
||||
|
||||
@@ -202,6 +204,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
|
||||
map.putDouble("maxZoom", maxZoom)
|
||||
map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android
|
||||
map.putString("hardwareLevel", hardwareLevel.unionValue)
|
||||
map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue)
|
||||
|
||||
val array = Arguments.createArray()
|
||||
cameraConfig.outputFormats.forEach { f ->
|
||||
|
@@ -56,7 +56,7 @@ class CameraSession(private val context: Context,
|
||||
private const val TAG = "CameraSession"
|
||||
|
||||
// TODO: Samsung advertises 60 FPS but only allows 30 FPS for some reason.
|
||||
private val CAN_SET_FPS = Build.MANUFACTURER != "samsung"
|
||||
private val CAN_SET_FPS = !Build.MANUFACTURER.equals("samsung", true)
|
||||
}
|
||||
|
||||
data class CapturedPhoto(val image: Image,
|
||||
|
@@ -24,9 +24,10 @@ import java.io.Closeable
|
||||
@Suppress("KotlinJniMissingFunction")
|
||||
class VideoPipeline(val width: Int,
|
||||
val height: Int,
|
||||
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
|
||||
val format: Int = ImageFormat.PRIVATE,
|
||||
private val isMirrored: Boolean = false): SurfaceTexture.OnFrameAvailableListener, Closeable {
|
||||
companion object {
|
||||
private const val MAX_IMAGES = 5
|
||||
private const val MAX_IMAGES = 3
|
||||
private const val TAG = "VideoPipeline"
|
||||
}
|
||||
|
||||
@@ -98,7 +99,7 @@ class VideoPipeline(val width: Int,
|
||||
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
|
||||
|
||||
// TODO: Get correct orientation and isMirrored
|
||||
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
|
||||
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, isMirrored)
|
||||
frame.incrementRefCount()
|
||||
frameProcessor?.call(frame)
|
||||
frame.decrementRefCount()
|
||||
@@ -110,7 +111,7 @@ class VideoPipeline(val width: Int,
|
||||
* Configures the Pipeline to also call the given [FrameProcessor].
|
||||
* * If the [frameProcessor] is `null`, this output channel will be removed.
|
||||
* * If the [frameProcessor] is not `null`, the [VideoPipeline] will create Frames
|
||||
* using an [ImageWriter] and call the [FrameProcessor] with those Frames.
|
||||
* using an [ImageWriter] and call the [FrameProcessor] with those Frames.
|
||||
*/
|
||||
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
|
||||
synchronized(this) {
|
||||
|
@@ -1,6 +1,7 @@
|
||||
package com.mrousavy.camera.core.outputs
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import android.hardware.camera2.CameraCharacteristics
|
||||
import android.hardware.camera2.CameraManager
|
||||
import android.media.Image
|
||||
import android.media.ImageReader
|
||||
@@ -91,6 +92,7 @@ class CameraOutputs(val cameraId: String,
|
||||
|
||||
init {
|
||||
val characteristics = cameraManager.getCameraCharacteristics(cameraId)
|
||||
val isMirrored = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT
|
||||
|
||||
Log.i(TAG, "Preparing Outputs for Camera $cameraId...")
|
||||
|
||||
@@ -117,7 +119,7 @@ class CameraOutputs(val cameraId: String,
|
||||
// Video output: High resolution repeating images (startRecording() or useFrameProcessor())
|
||||
if (video != null) {
|
||||
val size = characteristics.getVideoSizes(cameraId, video.format).closestToOrMax(video.targetSize)
|
||||
val videoPipeline = VideoPipeline(size.width, size.height, video.format)
|
||||
val videoPipeline = VideoPipeline(size.width, size.height, video.format, isMirrored)
|
||||
|
||||
Log.i(TAG, "Adding ${size.width}x${size.height} video output. (Format: ${video.format})")
|
||||
videoOutput = VideoPipelineOutput(videoPipeline, SurfaceOutput.OutputType.VIDEO)
|
||||
|
@@ -46,23 +46,23 @@ fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager,
|
||||
QualityPrioritization.BALANCED -> 92
|
||||
QualityPrioritization.QUALITY -> 100
|
||||
}
|
||||
captureRequest[CaptureRequest.JPEG_QUALITY] = jpegQuality.toByte()
|
||||
captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte())
|
||||
|
||||
captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees())
|
||||
|
||||
when (flashMode) {
|
||||
// Set the Flash Mode
|
||||
Flash.OFF -> {
|
||||
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON
|
||||
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
|
||||
}
|
||||
Flash.ON -> {
|
||||
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH
|
||||
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH)
|
||||
}
|
||||
Flash.AUTO -> {
|
||||
if (enableRedEyeReduction) {
|
||||
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
|
||||
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE)
|
||||
} else {
|
||||
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH
|
||||
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -75,17 +75,17 @@ fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager,
|
||||
val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)
|
||||
val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false
|
||||
if (hasOpticalStabilization) {
|
||||
captureRequest[CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE] = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF
|
||||
captureRequest[CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE] = CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON
|
||||
captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF)
|
||||
captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON)
|
||||
} else if (hasDigitalStabilization) {
|
||||
captureRequest[CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE] = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON
|
||||
captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON)
|
||||
} else {
|
||||
// no stabilization is supported. ignore it
|
||||
}
|
||||
}
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||
captureRequest[CaptureRequest.CONTROL_ZOOM_RATIO] = zoom
|
||||
captureRequest.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoom)
|
||||
} else {
|
||||
val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!!
|
||||
captureRequest.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoom))
|
||||
|
Reference in New Issue
Block a user