feat: Use AHardwareBuffer* for frame.toArrayBuffer() (#1888)

* feat: Route images through `ImageWriter` into OpenGL pipeline

* fix: Use RGB format

* fix: Every device supports YUV, RGB and NATIVE

* Update VideoPipeline.kt

* log format

* Plug ImageReader between OpenGL pipeline

* Call Frame Processor

* Format

* Remove logs

* feat: Use `HardwareBuffer` for `toArrayBuffer()`

* Format
This commit is contained in:
Marc Rousavy 2023-09-29 21:54:04 +02:00 committed by GitHub
parent 954b44810b
commit cf1952d8f7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 47 additions and 44 deletions

View File

@ -12,6 +12,9 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include <android/hardware_buffer.h>
#include <android/hardware_buffer_jni.h>
namespace vision { namespace vision {
using namespace facebook; using namespace facebook;
@ -82,11 +85,13 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
if (name == "toArrayBuffer") { if (name == "toArrayBuffer") {
jsi::HostFunctionType toArrayBuffer = [=](jsi::Runtime& runtime, const jsi::Value& thisArg, const jsi::Value* args, jsi::HostFunctionType toArrayBuffer = [=](jsi::Runtime& runtime, const jsi::Value& thisArg, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
auto buffer = this->frame->toByteBuffer(); AHardwareBuffer* hardwareBuffer = this->frame->getHardwareBuffer();
if (!buffer->isDirect()) {
throw std::runtime_error("Failed to get byte content of Frame - array is not direct ByteBuffer!"); AHardwareBuffer_Desc bufferDescription;
} AHardwareBuffer_describe(hardwareBuffer, &bufferDescription);
auto size = buffer->getDirectSize(); __android_log_print(ANDROID_LOG_INFO, "Frame", "Buffer %i x %i @ %i", bufferDescription.width, bufferDescription.height,
bufferDescription.stride);
size_t size = bufferDescription.height * bufferDescription.stride;
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache"; static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) { if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
@ -102,9 +107,17 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer); runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
} }
// Get CPU access to the HardwareBuffer (&buffer is a virtual temporary address)
void* buffer;
AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_MASK, -1, nullptr, &buffer);
// directly write to C++ JSI ArrayBuffer // directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime); auto destinationBuffer = arrayBuffer.data(runtime);
memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size); memcpy(destinationBuffer, buffer, sizeof(uint8_t) * size);
// Release HardwareBuffer again
AHardwareBuffer_unlock(hardwareBuffer, nullptr);
AHardwareBuffer_release(hardwareBuffer);
return arrayBuffer; return arrayBuffer;
}; };

View File

@ -4,6 +4,7 @@
#include "JFrame.h" #include "JFrame.h"
#include <android/hardware_buffer_jni.h>
#include <fbjni/ByteBuffer.h> #include <fbjni/ByteBuffer.h>
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <jni.h> #include <jni.h>
@ -58,9 +59,10 @@ int JFrame::getBytesPerRow() const {
return getBytesPerRowMethod(self()); return getBytesPerRowMethod(self());
} }
local_ref<JByteBuffer> JFrame::toByteBuffer() const { AHardwareBuffer* JFrame::getHardwareBuffer() const {
static const auto toByteBufferMethod = getClass()->getMethod<JByteBuffer()>("toByteBuffer"); static const auto getHardwareBufferMethod = getClass()->getMethod<jobject()>("getHardwareBufferBoxed");
return toByteBufferMethod(self()); auto hardwareBuffer = getHardwareBufferMethod(self());
return AHardwareBuffer_fromHardwareBuffer(jni::Environment::current(), hardwareBuffer.get());
} }
void JFrame::incrementRefCount() { void JFrame::incrementRefCount() {

View File

@ -4,6 +4,7 @@
#pragma once #pragma once
#include <android/hardware_buffer.h>
#include <fbjni/ByteBuffer.h> #include <fbjni/ByteBuffer.h>
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <jni.h> #include <jni.h>
@ -26,7 +27,7 @@ public:
jlong getTimestamp() const; jlong getTimestamp() const;
local_ref<JString> getOrientation() const; local_ref<JString> getOrientation() const;
local_ref<JString> getPixelFormat() const; local_ref<JString> getPixelFormat() const;
local_ref<JByteBuffer> toByteBuffer() const; AHardwareBuffer* getHardwareBuffer() const;
void incrementRefCount(); void incrementRefCount();
void decrementRefCount(); void decrementRefCount();
void close(); void close();

View File

@ -50,13 +50,6 @@ class NoCameraDeviceError :
) )
class PixelFormatNotSupportedError(format: String) : class PixelFormatNotSupportedError(format: String) :
CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!") CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
class PixelFormatNotSupportedInVideoPipelineError(format: String) :
CameraError(
"device",
"pixel-format-not-supported",
"The pixelFormat $format is currently not supported in the VideoPipeline! " +
"See this issue for more details ($4.000 bounty!): https://github.com/mrousavy/react-native-vision-camera/issues/1837"
)
class CameraNotReadyError : class CameraNotReadyError :
CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
@ -77,10 +70,8 @@ class CaptureAbortedError(wasImageCaptured: Boolean) :
CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured")
class UnknownCaptureError(wasImageCaptured: Boolean) : class UnknownCaptureError(wasImageCaptured: Boolean) :
CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured")
class RecorderError(name: String, extra: Int) : class RecorderError(name: String, extra: Int) :
CameraError("capture", "recorder-error", "An error occured while recording a video! $name $extra") CameraError("capture", "recorder-error", "An error occured while recording a video! $name $extra")
class NoRecordingInProgressError : class NoRecordingInProgressError :
CameraError("capture", "no-recording-in-progress", "There was no active video recording in progress! Did you call stopRecording() twice?") CameraError("capture", "no-recording-in-progress", "There was no active video recording in progress! Did you call stopRecording() twice?")
class RecordingInProgressError : class RecordingInProgressError :
@ -92,5 +83,9 @@ class RecordingInProgressError :
class ViewNotFoundError(viewId: Int) : class ViewNotFoundError(viewId: Int) :
CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.") CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
class FrameProcessorsUnavailableError(reason: String) :
CameraError("system", "frame-processors-unavailable", "Frame Processors are unavailable! Reason: $reason")
class HardwareBuffersNotAvailableError :
CameraError("system", "hardware-buffers-unavailable", "HardwareBuffers are only available on API 28 or higher!")
class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause) class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause)

View File

@ -1,8 +1,10 @@
package com.mrousavy.camera.frameprocessor; package com.mrousavy.camera.frameprocessor;
import android.graphics.ImageFormat; import android.hardware.HardwareBuffer;
import android.media.Image; import android.media.Image;
import android.os.Build;
import com.facebook.proguard.annotations.DoNotStrip; import com.facebook.proguard.annotations.DoNotStrip;
import com.mrousavy.camera.HardwareBuffersNotAvailableError;
import com.mrousavy.camera.parsers.PixelFormat; import com.mrousavy.camera.parsers.PixelFormat;
import com.mrousavy.camera.parsers.Orientation; import com.mrousavy.camera.parsers.Orientation;
@ -14,6 +16,7 @@ public class Frame {
private final long timestamp; private final long timestamp;
private final Orientation orientation; private final Orientation orientation;
private int refCount = 0; private int refCount = 0;
private HardwareBuffer hardwareBuffer = null;
public Frame(Image image, long timestamp, Orientation orientation, boolean isMirrored) { public Frame(Image image, long timestamp, Orientation orientation, boolean isMirrored) {
this.image = image; this.image = image;
@ -89,34 +92,20 @@ public class Frame {
return image.getPlanes()[0].getRowStride(); return image.getPlanes()[0].getRowStride();
} }
private static ByteBuffer byteArrayCache;
@SuppressWarnings("unused") @SuppressWarnings("unused")
@DoNotStrip @DoNotStrip
public ByteBuffer toByteBuffer() { public Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError {
switch (image.getFormat()) { return getHardwareBuffer();
case ImageFormat.YUV_420_888: }
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
int totalSize = ySize + uSize + vSize;
if (byteArrayCache != null) byteArrayCache.rewind(); public HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError {
if (byteArrayCache == null || byteArrayCache.remaining() != totalSize) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) {
byteArrayCache = ByteBuffer.allocateDirect(totalSize); throw new HardwareBuffersNotAvailableError();
}
byteArrayCache.put(yBuffer).put(uBuffer).put(vBuffer);
return byteArrayCache;
case ImageFormat.JPEG:
return image.getPlanes()[0].getBuffer();
default:
throw new RuntimeException("Cannot convert Frame with Format " + image.getFormat() + " to byte array!");
} }
if (hardwareBuffer == null) {
hardwareBuffer = image.getHardwareBuffer();
}
return hardwareBuffer;
} }
@SuppressWarnings("unused") @SuppressWarnings("unused")
@ -142,6 +131,9 @@ public class Frame {
@SuppressWarnings("unused") @SuppressWarnings("unused")
@DoNotStrip @DoNotStrip
private void close() { private void close() {
if (hardwareBuffer != null) {
hardwareBuffer.close();
}
image.close(); image.close();
} }
} }