fix: Properly check HardwareBuffer usage flags before setting them (#2488)

* fix: Properly check `HardwareBuffer` usage flags before setting them

* fix: Use GPU flag if pixel format is NATIVE

* Update VideoPipeline.kt

* Add some logs

* fix: Properly convert ImageFormat to HardwareBufferFormat

* Update Podfile.lock

* fix: Add a safe `getHardwareBufferFormat` method

* Format
This commit is contained in:
Marc Rousavy
2024-01-31 20:31:56 +01:00
committed by GitHub
parent fb96d000bc
commit f896831d4a
4 changed files with 101 additions and 13 deletions

View File

@@ -9,6 +9,7 @@ import android.os.Build
import android.util.Log
import android.view.Surface
import androidx.annotation.Keep
import androidx.annotation.RequiresApi
import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.mrousavy.camera.frameprocessor.Frame
@@ -31,7 +32,7 @@ class VideoPipeline(
val height: Int,
val format: PixelFormat = PixelFormat.NATIVE,
private val isMirrored: Boolean = false,
enableFrameProcessor: Boolean = false,
private val enableFrameProcessor: Boolean = false,
private val callback: CameraSession.Callback
) : SurfaceTexture.OnFrameAvailableListener,
Closeable {
@@ -80,10 +81,9 @@ class VideoPipeline(
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i(TAG, "Using API 29 for GPU ImageReader...")
// If we are in PRIVATE, we just pass it to the GPU as efficiently as possible - so use GPU flag.
// If we are in YUV/RGB/..., we probably want to access Frame data - so use CPU flag.
val usage = if (format == ImageFormat.PRIVATE) HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE else HardwareBuffer.USAGE_CPU_READ_OFTEN
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usage)
val usageFlags = getRecommendedHardwareBufferFlags()
Log.i(TAG, "Using ImageReader flags: $usageFlags")
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags)
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format)
} else {
Log.i(TAG, "Using legacy API for CPU ImageReader...")
@@ -103,7 +103,7 @@ class VideoPipeline(
if (hasOutputs) {
// If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline
imageWriter!!.queueInputImage(image)
imageWriter?.queueInputImage(image)
}
} catch (e: Throwable) {
Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e)
@@ -182,6 +182,52 @@ class VideoPipeline(
}
}
/**
* Get the recommended HardwareBuffer flags for creating ImageReader instances with.
*
* Tries to use [HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE] if possible, [HardwareBuffer.USAGE_CPU_READ_OFTEN]
* or a combination of both flags if CPU access is needed ([enableFrameProcessor]), and [0] otherwise.
*/
@RequiresApi(Build.VERSION_CODES.Q)
@Suppress("LiftReturnOrAssignment")
private fun getRecommendedHardwareBufferFlags(): Long {
val cpuFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN
val gpuFlag = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE
val bothFlags = gpuFlag or cpuFlag
if (format == PixelFormat.NATIVE) {
// We don't need CPU access, so we can use GPU optimized buffers
if (supportsHardwareBufferFlags(gpuFlag)) {
// We support GPU Buffers directly and
Log.i(TAG, "GPU HardwareBuffers are supported!")
return gpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
} else {
// We are using YUV or RGB formats, so we need CPU access on the Frame
if (supportsHardwareBufferFlags(bothFlags)) {
// We support both CPU and GPU flags!
Log.i(TAG, "GPU + CPU HardwareBuffers are supported!")
return bothFlags
} else if (supportsHardwareBufferFlags(cpuFlag)) {
// We only support a CPU read flag, that's fine
Log.i(TAG, "CPU HardwareBuffers are supported!")
return cpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
}
}
@RequiresApi(Build.VERSION_CODES.Q)
private fun supportsHardwareBufferFlags(flags: Long): Boolean {
val hardwareBufferFormat = format.toHardwareBufferFormat()
return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags)
}
private external fun getInputTextureId(): Int
private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray)

View File

@@ -4,6 +4,7 @@ import android.graphics.ImageFormat
import android.util.Log
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
import com.mrousavy.camera.core.PixelFormatNotSupportedError
import com.mrousavy.camera.utils.HardwareBufferUtils
import com.mrousavy.camera.utils.ImageFormatUtils
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
@@ -19,6 +20,11 @@ enum class PixelFormat(override val unionValue: String) : JSUnionValue {
else -> throw PixelFormatNotSupportedError(this.unionValue)
}
fun toHardwareBufferFormat(): Int {
val imageFormat = toImageFormat()
return HardwareBufferUtils.getHardwareBufferFormat(imageFormat)
}
companion object : JSUnionValue.Companion<PixelFormat> {
private const val TAG = "PixelFormat"
fun fromImageFormat(imageFormat: Int): PixelFormat =

View File

@@ -0,0 +1,36 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.hardware.HardwareBuffer
import android.media.ImageReader
import android.os.Build
class HardwareBufferUtils {
companion object {
fun getHardwareBufferFormat(imageFormat: Int): Int {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
// Dynamically create an ImageReader with the target ImageFormat, and then
// get it's HardwareBuffer format to see what it uses underneath.
val imageReader = ImageReader.newInstance(1, 1, imageFormat, 1, HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)
val format = imageReader.hardwareBufferFormat
imageReader.close()
return format
}
if (imageFormat == ImageFormat.PRIVATE) {
// PRIVATE formats are opaque, their actual equivalent HardwareBuffer format is unknown.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
// We can assume that YUV 4:2:0 or RGB is used.
return HardwareBuffer.YCBCR_420_888
} else {
// Maybe assume we are on RGB if we're not on API R or above...
return HardwareBuffer.RGB_888
}
}
// According to PublicFormat.cpp in Android's codebase, the formats map 1:1 anyways..
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/native/libs/ui/PublicFormat.cpp
return imageFormat
}
}
}