feat: Add enableGpuBuffers property (#2557)

* Revert "fix: Fix VideoPipeline crash on Samsung (Disable `USAGE_GPU_SAMPLED_IMAGE` ImageReader) (#2555)"

This reverts commit ad33dd91b1.

* feat: Add `enableGpuBuffers` prop

* Create ImageWriter separately
This commit is contained in:
Marc Rousavy 2024-02-14 12:47:40 +01:00 committed by GitHub
parent 478688529b
commit 1a0bd8f7c2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 146 additions and 7 deletions

View File

@ -70,6 +70,13 @@ Enable Buffer Compression ([`enableBufferCompression`](/docs/api/interfaces/Came
Note: When not using a `frameProcessor`, buffer compression is automatically enabled.
### GPU buffers
Enable GPU Buffer flags ([`enableGpuBuffers`](/docs/api/interfaces/CameraProps#enablegpubuffers)) to optimize the Video Pipeline for zero-copy buffer forwarding.
If this is enabled, the Video Pipeline can avoid an additional CPU -> GPU copy, resulting in better performance and more efficiency.
Note: This only takes effect when using a `frameProcessor`.
### Video Stabilization
Video Stabilization requires additional overhead to start the algorithm, so disabling [`videoStabilizationMode`](/docs/api/interfaces/CameraProps#videostabilizationmode) can significantly speed up the Camera initialization time.

View File

@ -112,7 +112,7 @@ If you're experiencing build issues or runtime issues in VisionCamera, make sure
2. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information.
3. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs. There is useful information about the Frame Processor Runtime that will tell you if something goes wrong.
4. If your Frame Processor is not running, make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr` or `format` on the `<Camera>` component except for the required ones:
5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr`, `enableGpuBuffers` or `format` on the `<Camera>` component except for the required ones:
```tsx
<Camera device={device} isActive={true} style={{ width: 500, height: 500 }} />
```

View File

@ -64,6 +64,7 @@ class CameraView(context: Context) :
var videoHdr = false
var photoHdr = false
var lowLightBoost: Boolean? = null // nullable bool
var enableGpuBuffers: Boolean = false
// other props
var isActive = false
@ -160,7 +161,8 @@ class CameraView(context: Context) :
CameraConfiguration.Video(
videoHdr,
pixelFormat,
enableFrameProcessor
enableFrameProcessor,
enableGpuBuffers
)
)
} else {

View File

@ -84,6 +84,11 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
view.enableFpsGraph = enableFpsGraph
}
@ReactProp(name = "enableGpuBuffers")
fun setEnableGpuBuffers(view: CameraView, enableGpuBuffers: Boolean) {
view.enableGpuBuffers = enableGpuBuffers
}
@ReactProp(name = "videoStabilizationMode")
fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) {
val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode)

View File

@ -44,7 +44,7 @@ data class CameraConfiguration(
// Output<T> types, those need to be comparable
data class CodeScanner(val codeTypes: List<CodeType>)
data class Photo(val enableHdr: Boolean)
data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean)
data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean, val enableGpuBuffers: Boolean)
data class Audio(val nothing: Unit)
data class Preview(val surface: Surface)

View File

@ -288,6 +288,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
video.config.pixelFormat,
isSelfie,
video.config.enableFrameProcessor,
video.config.enableGpuBuffers,
callback
)
val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr)

View File

@ -9,6 +9,7 @@ import android.os.Build
import android.util.Log
import android.view.Surface
import androidx.annotation.Keep
import androidx.annotation.RequiresApi
import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.mrousavy.camera.frameprocessor.Frame
@ -32,6 +33,7 @@ class VideoPipeline(
val format: PixelFormat = PixelFormat.NATIVE,
private val isMirrored: Boolean = false,
private val enableFrameProcessor: Boolean = false,
private val enableGpuBuffers: Boolean = false,
private val callback: CameraSession.Callback
) : SurfaceTexture.OnFrameAvailableListener,
Closeable {
@ -78,14 +80,25 @@ class VideoPipeline(
val format = getImageReaderFormat()
Log.i(TAG, "Using ImageReader round-trip (format: #$format)")
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
// Create ImageReader
if (enableGpuBuffers && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val usageFlags = getRecommendedHardwareBufferFlags()
Log.i(TAG, "Creating ImageReader with GPU-optimized usage flags: $usageFlags")
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags)
} else {
Log.i(TAG, "Creating ImageReader with default usage flags...")
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
}
// Create ImageWriter
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i(TAG, "Using ImageWriter with custom format (#$format)...")
Log.i(TAG, "Creating ImageWriter with format #$format...")
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format)
} else {
Log.i(TAG, "Using ImageWriter with default format...")
Log.i(TAG, "Creating ImageWriter with default format...")
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES)
}
imageReader!!.setOnImageAvailableListener({ reader ->
Log.i(TAG, "ImageReader::onImageAvailable!")
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
@ -181,6 +194,56 @@ class VideoPipeline(
}
}
/**
* Get the recommended HardwareBuffer flags for creating ImageReader instances with.
*
* Tries to use [HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE] if possible, [HardwareBuffer.USAGE_CPU_READ_OFTEN]
* or a combination of both flags if CPU access is needed ([enableFrameProcessor]), and [0] otherwise.
*/
@RequiresApi(Build.VERSION_CODES.Q)
@Suppress("LiftReturnOrAssignment")
private fun getRecommendedHardwareBufferFlags(): Long {
val cpuFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN
val gpuFlag = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE
val bothFlags = gpuFlag or cpuFlag
if (format == PixelFormat.NATIVE) {
// We don't need CPU access, so we can use GPU optimized buffers
if (supportsHardwareBufferFlags(gpuFlag)) {
// We support GPU Buffers directly and
Log.i(TAG, "GPU HardwareBuffers are supported!")
return gpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
} else {
// We are using YUV or RGB formats, so we need CPU access on the Frame
if (supportsHardwareBufferFlags(bothFlags)) {
// We support both CPU and GPU flags!
Log.i(TAG, "GPU + CPU HardwareBuffers are supported!")
return bothFlags
} else if (supportsHardwareBufferFlags(cpuFlag)) {
// We only support a CPU read flag, that's fine
Log.i(TAG, "CPU HardwareBuffers are supported!")
return cpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
}
}
@RequiresApi(Build.VERSION_CODES.Q)
private fun supportsHardwareBufferFlags(flags: Long): Boolean {
val hardwareBufferFormat = format.toHardwareBufferFormat()
try {
return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags)
} catch (_: Throwable) {
return false
}
}
private external fun getInputTextureId(): Int
private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray)

View File

@ -4,6 +4,7 @@ import android.graphics.ImageFormat
import android.util.Log
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
import com.mrousavy.camera.core.PixelFormatNotSupportedError
import com.mrousavy.camera.utils.HardwareBufferUtils
import com.mrousavy.camera.utils.ImageFormatUtils
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
@ -19,6 +20,11 @@ enum class PixelFormat(override val unionValue: String) : JSUnionValue {
else -> throw PixelFormatNotSupportedError(this.unionValue)
}
fun toHardwareBufferFormat(): Int {
val imageFormat = toImageFormat()
return HardwareBufferUtils.getHardwareBufferFormat(imageFormat)
}
companion object : JSUnionValue.Companion<PixelFormat> {
private const val TAG = "PixelFormat"
fun fromImageFormat(imageFormat: Int): PixelFormat =

View File

@ -0,0 +1,36 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.hardware.HardwareBuffer
import android.media.ImageReader
import android.os.Build
class HardwareBufferUtils {
companion object {
fun getHardwareBufferFormat(imageFormat: Int): Int {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
// Dynamically create an ImageReader with the target ImageFormat, and then
// get it's HardwareBuffer format to see what it uses underneath.
val imageReader = ImageReader.newInstance(1, 1, imageFormat, 1, HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)
val format = imageReader.hardwareBufferFormat
imageReader.close()
return format
}
if (imageFormat == ImageFormat.PRIVATE) {
// PRIVATE formats are opaque, their actual equivalent HardwareBuffer format is unknown.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
// We can assume that YUV 4:2:0 or RGB is used.
return HardwareBuffer.YCBCR_420_888
} else {
// Maybe assume we are on RGB if we're not on API R or above...
return HardwareBuffer.RGB_888
}
}
// According to PublicFormat.cpp in Android's codebase, the formats map 1:1 anyways..
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/native/libs/ui/PublicFormat.cpp
return imageFormat
}
}
}

View File

@ -183,10 +183,29 @@ export interface CameraProps extends ViewProps {
*
* @platform iOS
* @default
* - true // if video={true} and frameProcessor={undefined}
* - true // if frameProcessor={undefined}
* - false // otherwise
*/
enableBufferCompression?: boolean
/**
* Enables or disables GPU-sampled buffers for the video stream. This only takes effect when using a {@linkcode frameProcessor}.
*
* When recording a Video ({@linkcode video}) while a Frame Processor is running ({@linkcode frameProcessor}),
* the {@linkcode Frame | Frames} will need to be forwarded to the Media Encoder.
*
* - When `enableGpuBuffers` is `false`, the Video Pipeline will use CPU buffers causing an additional copy
* from the Frame Processor to the Media Encoder, which potentially results in increased latency.
* - When `enableGpuBuffers` is `true`, the Video Pipeline will use shared GPU buffers which greatly increases
* it's efficiency as an additional buffer copy is avoided.
* (See [`USAGE_GPU_SAMPLED_IMAGE`](https://developer.android.com/reference/android/hardware/HardwareBuffer#USAGE_GPU_SAMPLED_IMAGE))
*
* In general, it is recommended to set this to `true` if possible, as this can increase performance and efficiency of the Video Pipeline.
* This is an experimental feature flag however, so use at your own risk.
*
* @platform Android
* @default false
*/
enableGpuBuffers?: boolean
/**
* Enables or disables low-light boost on this camera device.
*