fix: Properly check HardwareBuffer
usage flags before setting them (#2488)
* fix: Properly check `HardwareBuffer` usage flags before setting them * fix: Use GPU flag if pixel format is NATIVE * Update VideoPipeline.kt * Add some logs * fix: Properly convert ImageFormat to HardwareBufferFormat * Update Podfile.lock * fix: Add a safe `getHardwareBufferFormat` method * Format
This commit is contained in:
parent
fb96d000bc
commit
f896831d4a
@ -9,6 +9,7 @@ import android.os.Build
|
|||||||
import android.util.Log
|
import android.util.Log
|
||||||
import android.view.Surface
|
import android.view.Surface
|
||||||
import androidx.annotation.Keep
|
import androidx.annotation.Keep
|
||||||
|
import androidx.annotation.RequiresApi
|
||||||
import com.facebook.jni.HybridData
|
import com.facebook.jni.HybridData
|
||||||
import com.facebook.proguard.annotations.DoNotStrip
|
import com.facebook.proguard.annotations.DoNotStrip
|
||||||
import com.mrousavy.camera.frameprocessor.Frame
|
import com.mrousavy.camera.frameprocessor.Frame
|
||||||
@ -31,7 +32,7 @@ class VideoPipeline(
|
|||||||
val height: Int,
|
val height: Int,
|
||||||
val format: PixelFormat = PixelFormat.NATIVE,
|
val format: PixelFormat = PixelFormat.NATIVE,
|
||||||
private val isMirrored: Boolean = false,
|
private val isMirrored: Boolean = false,
|
||||||
enableFrameProcessor: Boolean = false,
|
private val enableFrameProcessor: Boolean = false,
|
||||||
private val callback: CameraSession.Callback
|
private val callback: CameraSession.Callback
|
||||||
) : SurfaceTexture.OnFrameAvailableListener,
|
) : SurfaceTexture.OnFrameAvailableListener,
|
||||||
Closeable {
|
Closeable {
|
||||||
@ -80,10 +81,9 @@ class VideoPipeline(
|
|||||||
|
|
||||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||||
Log.i(TAG, "Using API 29 for GPU ImageReader...")
|
Log.i(TAG, "Using API 29 for GPU ImageReader...")
|
||||||
// If we are in PRIVATE, we just pass it to the GPU as efficiently as possible - so use GPU flag.
|
val usageFlags = getRecommendedHardwareBufferFlags()
|
||||||
// If we are in YUV/RGB/..., we probably want to access Frame data - so use CPU flag.
|
Log.i(TAG, "Using ImageReader flags: $usageFlags")
|
||||||
val usage = if (format == ImageFormat.PRIVATE) HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE else HardwareBuffer.USAGE_CPU_READ_OFTEN
|
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags)
|
||||||
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usage)
|
|
||||||
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format)
|
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format)
|
||||||
} else {
|
} else {
|
||||||
Log.i(TAG, "Using legacy API for CPU ImageReader...")
|
Log.i(TAG, "Using legacy API for CPU ImageReader...")
|
||||||
@ -103,7 +103,7 @@ class VideoPipeline(
|
|||||||
|
|
||||||
if (hasOutputs) {
|
if (hasOutputs) {
|
||||||
// If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline
|
// If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline
|
||||||
imageWriter!!.queueInputImage(image)
|
imageWriter?.queueInputImage(image)
|
||||||
}
|
}
|
||||||
} catch (e: Throwable) {
|
} catch (e: Throwable) {
|
||||||
Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e)
|
Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e)
|
||||||
@ -182,6 +182,52 @@ class VideoPipeline(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the recommended HardwareBuffer flags for creating ImageReader instances with.
|
||||||
|
*
|
||||||
|
* Tries to use [HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE] if possible, [HardwareBuffer.USAGE_CPU_READ_OFTEN]
|
||||||
|
* or a combination of both flags if CPU access is needed ([enableFrameProcessor]), and [0] otherwise.
|
||||||
|
*/
|
||||||
|
@RequiresApi(Build.VERSION_CODES.Q)
|
||||||
|
@Suppress("LiftReturnOrAssignment")
|
||||||
|
private fun getRecommendedHardwareBufferFlags(): Long {
|
||||||
|
val cpuFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN
|
||||||
|
val gpuFlag = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE
|
||||||
|
val bothFlags = gpuFlag or cpuFlag
|
||||||
|
|
||||||
|
if (format == PixelFormat.NATIVE) {
|
||||||
|
// We don't need CPU access, so we can use GPU optimized buffers
|
||||||
|
if (supportsHardwareBufferFlags(gpuFlag)) {
|
||||||
|
// We support GPU Buffers directly and
|
||||||
|
Log.i(TAG, "GPU HardwareBuffers are supported!")
|
||||||
|
return gpuFlag
|
||||||
|
} else {
|
||||||
|
// no flags are supported - fall back to default
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// We are using YUV or RGB formats, so we need CPU access on the Frame
|
||||||
|
if (supportsHardwareBufferFlags(bothFlags)) {
|
||||||
|
// We support both CPU and GPU flags!
|
||||||
|
Log.i(TAG, "GPU + CPU HardwareBuffers are supported!")
|
||||||
|
return bothFlags
|
||||||
|
} else if (supportsHardwareBufferFlags(cpuFlag)) {
|
||||||
|
// We only support a CPU read flag, that's fine
|
||||||
|
Log.i(TAG, "CPU HardwareBuffers are supported!")
|
||||||
|
return cpuFlag
|
||||||
|
} else {
|
||||||
|
// no flags are supported - fall back to default
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@RequiresApi(Build.VERSION_CODES.Q)
|
||||||
|
private fun supportsHardwareBufferFlags(flags: Long): Boolean {
|
||||||
|
val hardwareBufferFormat = format.toHardwareBufferFormat()
|
||||||
|
return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags)
|
||||||
|
}
|
||||||
|
|
||||||
private external fun getInputTextureId(): Int
|
private external fun getInputTextureId(): Int
|
||||||
private external fun onBeforeFrame()
|
private external fun onBeforeFrame()
|
||||||
private external fun onFrame(transformMatrix: FloatArray)
|
private external fun onFrame(transformMatrix: FloatArray)
|
||||||
|
@ -4,6 +4,7 @@ import android.graphics.ImageFormat
|
|||||||
import android.util.Log
|
import android.util.Log
|
||||||
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
|
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
|
||||||
import com.mrousavy.camera.core.PixelFormatNotSupportedError
|
import com.mrousavy.camera.core.PixelFormatNotSupportedError
|
||||||
|
import com.mrousavy.camera.utils.HardwareBufferUtils
|
||||||
import com.mrousavy.camera.utils.ImageFormatUtils
|
import com.mrousavy.camera.utils.ImageFormatUtils
|
||||||
|
|
||||||
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
|
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
|
||||||
@ -19,6 +20,11 @@ enum class PixelFormat(override val unionValue: String) : JSUnionValue {
|
|||||||
else -> throw PixelFormatNotSupportedError(this.unionValue)
|
else -> throw PixelFormatNotSupportedError(this.unionValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun toHardwareBufferFormat(): Int {
|
||||||
|
val imageFormat = toImageFormat()
|
||||||
|
return HardwareBufferUtils.getHardwareBufferFormat(imageFormat)
|
||||||
|
}
|
||||||
|
|
||||||
companion object : JSUnionValue.Companion<PixelFormat> {
|
companion object : JSUnionValue.Companion<PixelFormat> {
|
||||||
private const val TAG = "PixelFormat"
|
private const val TAG = "PixelFormat"
|
||||||
fun fromImageFormat(imageFormat: Int): PixelFormat =
|
fun fromImageFormat(imageFormat: Int): PixelFormat =
|
||||||
|
@ -0,0 +1,36 @@
|
|||||||
|
package com.mrousavy.camera.utils
|
||||||
|
|
||||||
|
import android.graphics.ImageFormat
|
||||||
|
import android.hardware.HardwareBuffer
|
||||||
|
import android.media.ImageReader
|
||||||
|
import android.os.Build
|
||||||
|
|
||||||
|
class HardwareBufferUtils {
|
||||||
|
companion object {
|
||||||
|
fun getHardwareBufferFormat(imageFormat: Int): Int {
|
||||||
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
|
||||||
|
// Dynamically create an ImageReader with the target ImageFormat, and then
|
||||||
|
// get it's HardwareBuffer format to see what it uses underneath.
|
||||||
|
val imageReader = ImageReader.newInstance(1, 1, imageFormat, 1, HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)
|
||||||
|
val format = imageReader.hardwareBufferFormat
|
||||||
|
imageReader.close()
|
||||||
|
return format
|
||||||
|
}
|
||||||
|
|
||||||
|
if (imageFormat == ImageFormat.PRIVATE) {
|
||||||
|
// PRIVATE formats are opaque, their actual equivalent HardwareBuffer format is unknown.
|
||||||
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
|
||||||
|
// We can assume that YUV 4:2:0 or RGB is used.
|
||||||
|
return HardwareBuffer.YCBCR_420_888
|
||||||
|
} else {
|
||||||
|
// Maybe assume we are on RGB if we're not on API R or above...
|
||||||
|
return HardwareBuffer.RGB_888
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// According to PublicFormat.cpp in Android's codebase, the formats map 1:1 anyways..
|
||||||
|
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/native/libs/ui/PublicFormat.cpp
|
||||||
|
return imageFormat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -27,9 +27,9 @@ PODS:
|
|||||||
- libwebp/sharpyuv (1.3.2)
|
- libwebp/sharpyuv (1.3.2)
|
||||||
- libwebp/webp (1.3.2):
|
- libwebp/webp (1.3.2):
|
||||||
- libwebp/sharpyuv
|
- libwebp/sharpyuv
|
||||||
- MMKV (1.3.2):
|
- MMKV (1.3.3):
|
||||||
- MMKVCore (~> 1.3.2)
|
- MMKVCore (~> 1.3.3)
|
||||||
- MMKVCore (1.3.2)
|
- MMKVCore (1.3.3)
|
||||||
- RCT-Folly (2021.07.22.00):
|
- RCT-Folly (2021.07.22.00):
|
||||||
- boost
|
- boost
|
||||||
- DoubleConversion
|
- DoubleConversion
|
||||||
@ -337,7 +337,7 @@ PODS:
|
|||||||
- react-native-mmkv (2.11.0):
|
- react-native-mmkv (2.11.0):
|
||||||
- MMKV (>= 1.2.13)
|
- MMKV (>= 1.2.13)
|
||||||
- React-Core
|
- React-Core
|
||||||
- react-native-safe-area-context (4.8.0):
|
- react-native-safe-area-context (4.8.2):
|
||||||
- React-Core
|
- React-Core
|
||||||
- react-native-video (5.2.1):
|
- react-native-video (5.2.1):
|
||||||
- React-Core
|
- React-Core
|
||||||
@ -675,8 +675,8 @@ SPEC CHECKSUMS:
|
|||||||
hermes-engine: 9180d43df05c1ed658a87cc733dc3044cf90c00a
|
hermes-engine: 9180d43df05c1ed658a87cc733dc3044cf90c00a
|
||||||
libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913
|
libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913
|
||||||
libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
|
libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
|
||||||
MMKV: f21593c0af4b3f2a0ceb8f820f28bb639ea22bb7
|
MMKV: f902fb6719da13c2ab0965233d8963a59416f911
|
||||||
MMKVCore: 31b4cb83f8266467eef20a35b6d78e409a11060d
|
MMKVCore: d26e4d3edd5cb8588c2569222cbd8be4231374e9
|
||||||
RCT-Folly: 424b8c9a7a0b9ab2886ffe9c3b041ef628fd4fb1
|
RCT-Folly: 424b8c9a7a0b9ab2886ffe9c3b041ef628fd4fb1
|
||||||
RCTRequired: 83bca1c184feb4d2e51c72c8369b83d641443f95
|
RCTRequired: 83bca1c184feb4d2e51c72c8369b83d641443f95
|
||||||
RCTTypeSafety: 13c4a87a16d7db6cd66006ce9759f073402ef85b
|
RCTTypeSafety: 13c4a87a16d7db6cd66006ce9759f073402ef85b
|
||||||
@ -695,7 +695,7 @@ SPEC CHECKSUMS:
|
|||||||
react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2
|
react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2
|
||||||
react-native-cameraroll: 4701ae7c3dbcd3f5e9e150ca17f250a276154b35
|
react-native-cameraroll: 4701ae7c3dbcd3f5e9e150ca17f250a276154b35
|
||||||
react-native-mmkv: e97c0c79403fb94577e5d902ab1ebd42b0715b43
|
react-native-mmkv: e97c0c79403fb94577e5d902ab1ebd42b0715b43
|
||||||
react-native-safe-area-context: d1c8161a1e9560f7066e8926a7d825eb57c5dab5
|
react-native-safe-area-context: 0ee144a6170530ccc37a0fd9388e28d06f516a89
|
||||||
react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253
|
react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253
|
||||||
react-native-worklets-core: a894d572639fcf37c6d284cc799882d25d00c93d
|
react-native-worklets-core: a894d572639fcf37c6d284cc799882d25d00c93d
|
||||||
React-NativeModulesApple: b6868ee904013a7923128892ee4a032498a1024a
|
React-NativeModulesApple: b6868ee904013a7923128892ee4a032498a1024a
|
||||||
|
Loading…
Reference in New Issue
Block a user