Get sizes per ImageFormat/PixelFormat (#119)

* Get sizes per ImageFormat/PixelFormat

* Fix `getOutputMinFrameDuration` crashing

* Fix `isHighestPhotoQualitySupported`

* Add TS type explanations

* Update CameraDevice.ts

* Update CameraViewModule.kt

* Add link to AVCaptureColorSpace apple docs
This commit is contained in:
Marc Rousavy 2021-04-13 13:01:24 +02:00 committed by GitHub
parent 584cd682db
commit 9320e356e3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 139 additions and 59 deletions

View File

@ -5,8 +5,6 @@ import android.content.Context
import android.content.pm.PackageManager
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraManager
import android.media.ImageReader
import android.media.MediaRecorder
import android.os.Build
import android.util.Log
import androidx.camera.core.CameraSelector
@ -117,8 +115,8 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
GlobalScope.launch(Dispatchers.Main) {
withPromise(promise) {
// I need to init those because the HDR/Night Mode Extension expects them to be initialized
val extensionsManager = ExtensionsManager.init(reactApplicationContext).await()
val processCameraProvider = ProcessCameraProvider.getInstance(reactApplicationContext).await()
ExtensionsManager.init(reactApplicationContext).await()
ProcessCameraProvider.getInstance(reactApplicationContext).await()
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager
?: throw CameraManagerUnavailableError()
@ -173,7 +171,6 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
val fieldOfView = characteristics.getFieldOfView()
val map = Arguments.createMap()
val formats = Arguments.createArray()
map.putString("id", id)
map.putArray("devices", deviceTypes)
map.putString("position", parseLensFacing(lensFacing))
@ -194,61 +191,72 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
map.putDouble("neutralZoom", characteristics.neutralZoomPercent.toDouble())
val maxImageOutputSize = cameraConfig.getOutputSizes(ImageReader::class.java).maxByOrNull { it.width * it.height }!!
// TODO: Optimize?
val maxImageOutputSize = cameraConfig.outputFormats
.flatMap { cameraConfig.getOutputSizes(it).toList() }
.maxByOrNull { it.width * it.height }!!
// TODO: Should I really check MediaRecorder::class instead of SurfaceView::class?
// Recording should always be done in the most efficient format, which is the format native to the camera framework
cameraConfig.getOutputSizes(MediaRecorder::class.java).forEach { size ->
val isHighestPhotoQualitySupported = areUltimatelyEqual(size, maxImageOutputSize)
val formats = Arguments.createArray()
// Get the number of seconds that each frame will take to process
val secondsPerFrame = cameraConfig.getOutputMinFrameDuration(MediaRecorder::class.java, size) / 1_000_000_000.0
cameraConfig.outputFormats.forEach { formatId ->
val formatName = parseImageFormat(formatId)
val frameRateRanges = Arguments.createArray()
if (secondsPerFrame > 0) {
val fps = (1.0 / secondsPerFrame).toInt()
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", 1)
frameRateRange.putInt("maxFrameRate", fps)
frameRateRanges.pushMap(frameRateRange)
cameraConfig.getOutputSizes(formatId).forEach { size ->
val isHighestPhotoQualitySupported = areUltimatelyEqual(size, maxImageOutputSize)
// Get the number of seconds that each frame will take to process
val secondsPerFrame = try {
cameraConfig.getOutputMinFrameDuration(formatId, size) / 1_000_000_000.0
} catch (error: Throwable) {
Log.e(REACT_CLASS, "Minimum Frame Duration for MediaRecorder Output cannot be calculated, format \"$formatName\" is not supported.")
null
}
val frameRateRanges = Arguments.createArray()
if (secondsPerFrame != null && secondsPerFrame > 0) {
val fps = (1.0 / secondsPerFrame).toInt()
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", 1)
frameRateRange.putInt("maxFrameRate", fps)
frameRateRanges.pushMap(frameRateRange)
}
fpsRanges.forEach { range ->
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", range.lower)
frameRateRange.putInt("maxFrameRate", range.upper)
frameRateRanges.pushMap(frameRateRange)
}
val colorSpaces = Arguments.createArray()
colorSpaces.pushString(formatName)
val videoStabilizationModes = Arguments.createArray()
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_OFF)) {
videoStabilizationModes.pushString("off")
}
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) {
videoStabilizationModes.pushString("auto")
videoStabilizationModes.pushString("standard")
}
val format = Arguments.createMap()
format.putDouble("photoHeight", size.height.toDouble())
format.putDouble("photoWidth", size.width.toDouble())
format.putDouble("videoHeight", size.height.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoHeight == photoHeight?)
format.putDouble("videoWidth", size.width.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoWidth == photoWidth?)
format.putBoolean("isHighestPhotoQualitySupported", isHighestPhotoQualitySupported)
format.putInt("maxISO", isoRange?.upper)
format.putInt("minISO", isoRange?.lower)
format.putDouble("fieldOfView", fieldOfView) // TODO: Revisit getAvailableCameraDevices (is fieldOfView accurate?)
format.putDouble("maxZoom", (zoomRange?.upper ?: maxScalerZoom).toDouble())
format.putArray("colorSpaces", colorSpaces)
format.putBoolean("supportsVideoHDR", false) // TODO: supportsVideoHDR
format.putBoolean("supportsPhotoHDR", supportsHdr)
format.putArray("frameRateRanges", frameRateRanges)
format.putString("autoFocusSystem", "none") // TODO: Revisit getAvailableCameraDevices (autoFocusSystem) (CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES or CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)
format.putArray("videoStabilizationModes", videoStabilizationModes)
formats.pushMap(format)
}
fpsRanges.forEach { range ->
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", range.lower)
frameRateRange.putInt("maxFrameRate", range.upper)
frameRateRanges.pushMap(frameRateRange)
}
// TODO Revisit getAvailableCameraDevices (colorSpaces, more than YUV?)
val colorSpaces = Arguments.createArray()
colorSpaces.pushString("yuv")
val videoStabilizationModes = Arguments.createArray()
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_OFF)) {
videoStabilizationModes.pushString("off")
}
if (stabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) {
videoStabilizationModes.pushString("auto")
videoStabilizationModes.pushString("standard")
}
val format = Arguments.createMap()
format.putDouble("photoHeight", size.height.toDouble())
format.putDouble("photoWidth", size.width.toDouble())
format.putDouble("videoHeight", size.height.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoHeight == photoHeight?)
format.putDouble("videoWidth", size.width.toDouble()) // TODO: Revisit getAvailableCameraDevices (videoWidth == photoWidth?)
format.putBoolean("isHighestPhotoQualitySupported", isHighestPhotoQualitySupported)
format.putInt("maxISO", isoRange?.upper)
format.putInt("minISO", isoRange?.lower)
format.putDouble("fieldOfView", fieldOfView) // TODO: Revisit getAvailableCameraDevices (is fieldOfView accurate?)
format.putDouble("maxZoom", (zoomRange?.upper ?: maxScalerZoom).toDouble())
format.putArray("colorSpaces", colorSpaces)
format.putBoolean("supportsVideoHDR", false) // TODO: supportsVideoHDR
format.putBoolean("supportsPhotoHDR", supportsHdr)
format.putArray("frameRateRanges", frameRateRanges)
format.putString("autoFocusSystem", "none") // TODO: Revisit getAvailableCameraDevices (autoFocusSystem) (CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES or CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)
format.putArray("videoStabilizationModes", videoStabilizationModes)
formats.pushMap(format)
}
map.putArray("formats", formats)

View File

@ -0,0 +1,48 @@
package com.mrousavy.camera.parsers
import android.graphics.ImageFormat
/**
* Parses ImageFormat/PixelFormat int to a string representation useable for the TypeScript types.
*/
fun parseImageFormat(imageFormat: Int): String {
return when (imageFormat) {
ImageFormat.YUV_420_888 -> "yuv"
ImageFormat.YUV_422_888 -> "yuv"
ImageFormat.YUV_444_888 -> "yuv"
ImageFormat.JPEG -> "jpeg"
ImageFormat.DEPTH_JPEG -> "jpeg-depth"
ImageFormat.RAW_SENSOR -> "raw"
ImageFormat.RAW_PRIVATE -> "raw"
ImageFormat.HEIC -> "heic"
ImageFormat.PRIVATE -> "private"
ImageFormat.DEPTH16 -> "depth-16"
else -> "unknown"
/*
ImageFormat.UNKNOWN -> "TODOFILL"
ImageFormat.RGB_565 -> "TODOFILL"
ImageFormat.YV12 -> "TODOFILL"
ImageFormat.Y8 -> "TODOFILL"
ImageFormat.NV16 -> "TODOFILL"
ImageFormat.NV21 -> "TODOFILL"
ImageFormat.YUY2 -> "TODOFILL"
ImageFormat.FLEX_RGB_888 -> "TODOFILL"
ImageFormat.FLEX_RGBA_8888 -> "TODOFILL"
ImageFormat.RAW10 -> "TODOFILL"
ImageFormat.RAW12 -> "TODOFILL"
ImageFormat.DEPTH_POINT_CLOUD -> "TODOFILL"
@Suppress("DUPLICATE_LABEL_IN_WHEN")
PixelFormat.UNKNOWN -> "TODOFILL"
PixelFormat.TRANSPARENT -> "TODOFILL"
PixelFormat.TRANSLUCENT -> "TODOFILL"
PixelFormat.RGBX_8888 -> "TODOFILL"
PixelFormat.RGBA_F16 -> "TODOFILL"
PixelFormat.RGBA_8888 -> "TODOFILL"
PixelFormat.RGBA_1010102 -> "TODOFILL"
PixelFormat.OPAQUE -> "TODOFILL"
@Suppress("DUPLICATE_LABEL_IN_WHEN")
PixelFormat.RGB_565 -> "TODOFILL"
PixelFormat.RGB_888 -> "TODOFILL"
*/
}
}

View File

@ -46,14 +46,38 @@ export const parsePhysicalDeviceTypes = (
* Indicates a format's color space.
*
* #### The following colorspaces are available on iOS:
* * `"srgb"`: The sGRB color space (https://www.w3.org/Graphics/Color/srgb)
* * `"srgb"`: The sGRB color space.
* * `"p3-d65"`: The P3 D65 wide color space which uses Illuminant D65 as the white point
* * `"hlg-bt2020"`: The BT2020 wide color space which uses Illuminant D65 as the white point and Hybrid Log-Gamma as the transfer function
*
* > See ["AVCaptureColorSpace"](https://developer.apple.com/documentation/avfoundation/avcapturecolorspace) for more information.
*
* #### The following colorspaces are available on Android:
* * `"yuv"`: The YCbCr color space.
* * `"yuv"`: The Multi-plane Android YCbCr color space. (YUV 420_888, 422_888 or 444_888)
* * `"jpeg"`: The compressed JPEG color space.
* * `"jpeg-depth"`: The compressed JPEG color space including depth data.
* * `"raw"`: The Camera's RAW sensor color space. (Single-channel Bayer-mosaic image, usually 16 bit)
* * `"heic"`: The compressed HEIC color space.
* * `"private"`: The Android private opaque image format. (The choices of the actual format and pixel data layout are entirely up to the device-specific and framework internal implementations, and may vary depending on use cases even for the same device. These buffers are not directly accessible to the application)
* * `"depth-16"`: The Android dense depth image format (16 bit)
* * `"unknown"`: Placeholder for an unknown image/pixel format. [Edit this file](https://github.com/cuvent/react-native-vision-camera/edit/main/android/src/main/java/com/mrousavy/camera/parsers/ImageFormat+String.kt) to add a name for the unknown format.
*
* > See ["Android Color Formats"](https://jbit.net/Android_Colors/) for more information.
*/
export type ColorSpace = 'hlg-bt2020' | 'p3-d65' | 'srgb' | 'yuv';
export type ColorSpace =
// ios
| 'hlg-bt2020'
| 'p3-d65'
| 'srgb'
// android
| 'yuv'
| 'jpeg'
| 'jpeg-depth'
| 'raw'
| 'heic'
| 'private'
| 'depth-16'
| 'unknown';
/**
* Indicates a format's autofocus system.