feat: Rewrite Android C++ part (VisionCameraProxy + JFrame) (#1661)

* First Android rewrite

* Rewrite Android C++ backend

* Pass `ReadableNativeMap`, fix build error

* fix: Fix FrameProcessor init

* Make a bunch of stuff const reference to avoid copies

* Indents

* Cleanup

* indents

* docs: Update Android docs

* Update CameraView.kt

* fix: Format C++ code
This commit is contained in:
Marc Rousavy
2023-07-22 00:15:11 +02:00
committed by GitHub
parent 44ed42d5d6
commit 86dd703c2b
45 changed files with 985 additions and 859 deletions

View File

@@ -18,7 +18,7 @@ suspend fun CameraView.focus(pointMap: ReadableMap) {
// Getting the point from the previewView needs to be run on the UI thread
val point = withContext(coroutineScope.coroutineContext) {
previewView.meteringPointFactory.createPoint(x.toFloat(), y.toFloat());
previewView.meteringPointFactory.createPoint(x.toFloat(), y.toFloat())
}
val action = FocusMeteringAction.Builder(point, FocusMeteringAction.FLAG_AF or FocusMeteringAction.FLAG_AE)

View File

@@ -14,7 +14,7 @@ import java.io.FileOutputStream
import kotlinx.coroutines.guava.await
suspend fun CameraView.takeSnapshot(options: ReadableMap): WritableMap = coroutineScope {
val camera = camera ?: throw com.mrousavy.camera.CameraNotReadyError()
val camera = camera ?: throw CameraNotReadyError()
val enableFlash = options.getString("flash") == "on"
try {

View File

@@ -24,15 +24,16 @@ import androidx.lifecycle.*
import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.*
import com.facebook.react.uimanager.events.RCTEventEmitter
import com.mrousavy.camera.frameprocessor.FrameProcessorRuntimeManager
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.frameprocessor.FrameProcessorPlugin
import com.mrousavy.camera.frameprocessor.FrameProcessorPluginRegistry
import com.mrousavy.camera.utils.*
import kotlinx.coroutines.*
import kotlinx.coroutines.guava.await
import java.lang.IllegalArgumentException
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import kotlin.math.floor
import kotlin.math.max
import kotlin.math.min
@@ -118,8 +119,9 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
internal var camera: Camera? = null
internal var imageCapture: ImageCapture? = null
internal var videoCapture: VideoCapture<Recorder>? = null
private var imageAnalysis: ImageAnalysis? = null
public var frameProcessor: FrameProcessor? = null
private var preview: Preview? = null
private var imageAnalysis: ImageAnalysis? = null
internal var activeVideoRecording: Recording? = null
@@ -156,10 +158,7 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
private var minZoom: Float = 1f
private var maxZoom: Float = 1f
@DoNotStrip
private var mHybridData: HybridData? = null
@Suppress("LiftReturnOrAssignment", "RedundantIf")
@Suppress("RedundantIf")
internal val fallbackToSnapshot: Boolean
@SuppressLint("UnsafeOptInUsageError")
get() {
@@ -190,8 +189,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
}
init {
mHybridData = initHybrid()
previewView = PreviewView(context)
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
previewView.installHierarchyFitter() // If this is not called correctly, view finder will be black/blank
@@ -244,9 +241,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
imageAnalysis?.targetRotation = outputRotation
}
private external fun initHybrid(): HybridData
private external fun frameProcessorCallback(frame: ImageProxy)
override fun getLifecycle(): Lifecycle {
return lifecycleRegistry
}
@@ -461,8 +455,9 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
imageAnalysis = imageAnalysisBuilder.build().apply {
setAnalyzer(cameraExecutor) { image ->
// Call JS Frame Processor
frameProcessorCallback(image)
// frame gets closed in FrameHostObject implementation (JS ref counting)
val frame = Frame(image)
frameProcessor?.call(frame)
// ...frame gets closed in FrameHostObject implementation via JS ref counting
}
}
useCases.add(imageAnalysis!!)

View File

@@ -3,16 +3,11 @@ package com.mrousavy.camera
import android.Manifest
import android.content.Context
import android.content.pm.PackageManager
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraManager
import android.os.Build
import android.util.Log
import android.util.Size
import androidx.camera.core.CameraSelector
import androidx.camera.extensions.ExtensionMode
import androidx.camera.extensions.ExtensionsManager
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.video.QualitySelector
import androidx.core.content.ContextCompat
import com.facebook.react.bridge.*
import com.facebook.react.module.annotations.ReactModule
@@ -20,8 +15,9 @@ import com.facebook.react.modules.core.PermissionAwareActivity
import com.facebook.react.modules.core.PermissionListener
import com.facebook.react.uimanager.UIManagerHelper
import com.facebook.react.bridge.ReactApplicationContext
import com.mrousavy.camera.frameprocessor.VisionCameraInstaller
import java.util.concurrent.ExecutorService
import com.mrousavy.camera.frameprocessor.FrameProcessorRuntimeManager
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
import com.mrousavy.camera.parsers.*
import com.mrousavy.camera.utils.*
import kotlinx.coroutines.*
@@ -30,38 +26,21 @@ import java.util.concurrent.Executors
@ReactModule(name = CameraViewModule.TAG)
@Suppress("unused")
class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBaseJavaModule(reactContext) {
class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJavaModule(reactContext) {
companion object {
const val TAG = "CameraView"
var RequestCode = 10
fun parsePermissionStatus(status: Int): String {
return when (status) {
PackageManager.PERMISSION_DENIED -> "denied"
PackageManager.PERMISSION_GRANTED -> "authorized"
else -> "not-determined"
}
}
}
var frameProcessorThread: ExecutorService = Executors.newSingleThreadExecutor()
private val coroutineScope = CoroutineScope(Dispatchers.Default) // TODO: or Dispatchers.Main?
private var frameProcessorManager: FrameProcessorRuntimeManager? = null
private fun cleanup() {
if (coroutineScope.isActive) {
coroutineScope.cancel("CameraViewModule has been destroyed.")
}
}
override fun onCatalystInstanceDestroy() {
super.onCatalystInstanceDestroy()
cleanup()
}
override fun invalidate() {
super.invalidate()
cleanup()
frameProcessorThread.shutdown()
if (coroutineScope.isActive) {
coroutineScope.cancel("CameraViewModule has been destroyed.")
}
}
override fun getName(): String {
@@ -75,6 +54,18 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
return view ?: throw ViewNotFoundError(viewId)
}
@ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean {
return try {
val proxy = VisionCameraProxy(reactApplicationContext, frameProcessorThread)
VisionCameraInstaller.install(proxy)
true
} catch (e: Error) {
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
false
}
}
@ReactMethod
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
coroutineScope.launch {
@@ -151,18 +142,6 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
}
@ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean {
try {
frameProcessorManager = FrameProcessorRuntimeManager(reactApplicationContext, frameProcessorThread)
frameProcessorManager!!.installBindings()
return true
} catch (e: Error) {
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
return false
}
}
@ReactMethod
fun getAvailableCameraDevices(promise: Promise) {
coroutineScope.launch {

View File

@@ -43,12 +43,12 @@ class ParallelVideoProcessingNotSupportedError(cause: Throwable) : CameraError("
"See https://mrousavy.github.io/react-native-vision-camera/docs/guides/devices#the-supportsparallelvideoprocessing-prop for more information.", cause)
class FpsNotContainedInFormatError(fps: Int) : CameraError("format", "invalid-fps", "The given FPS were not valid for the currently selected format. Make sure you select a format which `frameRateRanges` includes $fps FPS!")
class HdrNotContainedInFormatError() : CameraError(
class HdrNotContainedInFormatError : CameraError(
"format", "invalid-hdr",
"The currently selected format does not support HDR capture! " +
"Make sure you select a format which `frameRateRanges` includes `supportsPhotoHDR`!"
)
class LowLightBoostNotContainedInFormatError() : CameraError(
class LowLightBoostNotContainedInFormatError : CameraError(
"format", "invalid-low-light-boost",
"The currently selected format does not support low-light boost (night mode)! " +
"Make sure you select a format which includes `supportsLowLightBoost`."

View File

@@ -0,0 +1,120 @@
package com.mrousavy.camera.frameprocessor;
import android.annotation.SuppressLint;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.media.Image;
import androidx.camera.core.ImageProxy;
import com.facebook.proguard.annotations.DoNotStrip;
import java.nio.ByteBuffer;
public class Frame {
private final ImageProxy imageProxy;
public Frame(ImageProxy imageProxy) {
this.imageProxy = imageProxy;
}
public ImageProxy getImageProxy() {
return imageProxy;
}
@SuppressWarnings("unused")
@DoNotStrip
public int getWidth() {
return imageProxy.getWidth();
}
@SuppressWarnings("unused")
@DoNotStrip
public int getHeight() {
return imageProxy.getHeight();
}
@SuppressWarnings("unused")
@DoNotStrip
public boolean getIsValid() {
try {
@SuppressLint("UnsafeOptInUsageError")
Image image = imageProxy.getImage();
if (image == null) return false;
// will throw an exception if the image is already closed
image.getCropRect();
// no exception thrown, image must still be valid.
return true;
} catch (Exception e) {
// exception thrown, image has already been closed.
return false;
}
}
@SuppressWarnings("unused")
@DoNotStrip
public boolean getIsMirrored() {
Matrix matrix = imageProxy.getImageInfo().getSensorToBufferTransformMatrix();
// TODO: Figure out how to get isMirrored from ImageProxy
return false;
}
@SuppressWarnings("unused")
@DoNotStrip
public long getTimestamp() {
return imageProxy.getImageInfo().getTimestamp();
}
@SuppressWarnings("unused")
@DoNotStrip
public String getOrientation() {
int rotation = imageProxy.getImageInfo().getRotationDegrees();
if (rotation >= 45 && rotation < 135)
return "landscapeRight";
if (rotation >= 135 && rotation < 225)
return "portraitUpsideDown";
if (rotation >= 225 && rotation < 315)
return "landscapeLeft";
return "portrait";
}
@SuppressWarnings("unused")
@DoNotStrip
public int getPlanesCount() {
return imageProxy.getPlanes().length;
}
@SuppressWarnings("unused")
@DoNotStrip
public int getBytesPerRow() {
return imageProxy.getPlanes()[0].getRowStride();
}
private static byte[] byteArrayCache;
@SuppressWarnings("unused")
@DoNotStrip
public byte[] toByteArray() {
switch (imageProxy.getFormat()) {
case ImageFormat.YUV_420_888:
ByteBuffer yBuffer = imageProxy.getPlanes()[0].getBuffer();
ByteBuffer vuBuffer = imageProxy.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int vuSize = vuBuffer.remaining();
if (byteArrayCache == null || byteArrayCache.length != ySize + vuSize) {
byteArrayCache = new byte[ySize + vuSize];
}
yBuffer.get(byteArrayCache, 0, ySize);
vuBuffer.get(byteArrayCache, ySize, vuSize);
return byteArrayCache;
default:
throw new RuntimeException("Cannot convert Frame with Format " + imageProxy.getFormat() + " to byte array!");
}
}
@SuppressWarnings("unused")
@DoNotStrip
private void close() {
imageProxy.close();
}
}

View File

@@ -0,0 +1,27 @@
package com.mrousavy.camera.frameprocessor;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.facebook.jni.HybridData;
import com.facebook.proguard.annotations.DoNotStrip;
/**
* Represents a JS Frame Processor
*/
@SuppressWarnings("JavaJniMissingFunction") // we're using fbjni.
public final class FrameProcessor {
/**
* Call the JS Frame Processor function with the given Frame
*/
public native void call(Frame frame);
@DoNotStrip
@Keep
private final HybridData mHybridData;
public FrameProcessor(HybridData hybridData) {
mHybridData = hybridData;
}
}

View File

@@ -3,8 +3,8 @@ package com.mrousavy.camera.frameprocessor;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.ImageProxy;
import com.facebook.proguard.annotations.DoNotStrip;
import com.facebook.react.bridge.ReadableNativeMap;
/**
* Declares a Frame Processor Plugin.
@@ -12,42 +12,14 @@ import com.facebook.proguard.annotations.DoNotStrip;
@DoNotStrip
@Keep
public abstract class FrameProcessorPlugin {
private final @NonNull String mName;
/**
* The actual Frame Processor plugin callback. Called for every frame the ImageAnalyzer receives.
* @param image The CameraX ImageProxy. Don't call .close() on this, as VisionCamera handles that.
* @param frame The Frame from the Camera. Don't call .close() on this, as VisionCamera handles that.
* @return You can return any primitive, map or array you want. See the
* <a href="https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors-plugins-overview#types">Types</a>
* table for a list of supported types.
*/
@DoNotStrip
@Keep
public abstract @Nullable Object callback(@NonNull ImageProxy image, @NonNull Object[] params);
/**
* Initializes the native plugin part.
* @param name Specifies the Frame Processor Plugin's name in the Runtime.
* The actual name in the JS Runtime will be prefixed with two underscores (`__`)
*/
protected FrameProcessorPlugin(@NonNull String name) {
mName = name;
}
/**
* Get the user-defined name of the Frame Processor Plugin.
*/
@DoNotStrip
@Keep
public @NonNull String getName() {
return mName;
}
/**
* Registers the given plugin in the Frame Processor Runtime.
* @param plugin An instance of a plugin.
*/
public static void register(@NonNull FrameProcessorPlugin plugin) {
FrameProcessorRuntimeManager.Companion.addPlugin(plugin);
}
public abstract @Nullable Object callback(@NonNull Frame frame, @Nullable ReadableNativeMap params);
}

View File

@@ -0,0 +1,38 @@
package com.mrousavy.camera.frameprocessor;
import androidx.annotation.Keep;
import androidx.annotation.Nullable;
import com.facebook.proguard.annotations.DoNotStrip;
import com.facebook.react.bridge.ReadableNativeMap;
import java.util.HashMap;
import java.util.Map;
@DoNotStrip
@Keep
public class FrameProcessorPluginRegistry {
private static final Map<String, PluginInitializer> Plugins = new HashMap<>();
@DoNotStrip
@Keep
public static void addFrameProcessorPlugin(String name, PluginInitializer pluginInitializer) {
assert !Plugins.containsKey(name) : "Tried to add a Frame Processor Plugin with a name that already exists! " +
"Either choose unique names, or remove the unused plugin. Name: ";
Plugins.put(name, pluginInitializer);
}
@DoNotStrip
@Keep
public static FrameProcessorPlugin getPlugin(String name, ReadableNativeMap options) {
PluginInitializer initializer = Plugins.get(name);
if (initializer == null) {
return null;
}
return initializer.initializePlugin(options);
}
public interface PluginInitializer {
FrameProcessorPlugin initializePlugin(@Nullable ReadableNativeMap options);
}
}

View File

@@ -1,98 +0,0 @@
package com.mrousavy.camera.frameprocessor;
import android.annotation.SuppressLint;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.media.Image;
import androidx.annotation.Keep;
import androidx.camera.core.ImageProxy;
import com.facebook.proguard.annotations.DoNotStrip;
import java.nio.ByteBuffer;
@SuppressWarnings("unused") // used through JNI
@DoNotStrip
@Keep
public class ImageProxyUtils {
@SuppressLint("UnsafeOptInUsageError")
@DoNotStrip
@Keep
public static boolean isImageProxyValid(ImageProxy imageProxy) {
try {
Image image = imageProxy.getImage();
if (image == null) return false;
// will throw an exception if the image is already closed
image.getCropRect();
// no exception thrown, image must still be valid.
return true;
} catch (Exception e) {
// exception thrown, image has already been closed.
return false;
}
}
@DoNotStrip
@Keep
public static boolean isImageProxyMirrored(ImageProxy imageProxy) {
Matrix matrix = imageProxy.getImageInfo().getSensorToBufferTransformMatrix();
// TODO: Figure out how to get isMirrored from ImageProxy
return false;
}
@DoNotStrip
@Keep
public static String getOrientation(ImageProxy imageProxy) {
int rotation = imageProxy.getImageInfo().getRotationDegrees();
if (rotation >= 45 && rotation < 135)
return "landscapeRight";
if (rotation >= 135 && rotation < 225)
return "portraitUpsideDown";
if (rotation >= 225 && rotation < 315)
return "landscapeLeft";
return "portrait";
}
@DoNotStrip
@Keep
public static long getTimestamp(ImageProxy imageProxy) {
return imageProxy.getImageInfo().getTimestamp();
}
@DoNotStrip
@Keep
public static int getPlanesCount(ImageProxy imageProxy) {
return imageProxy.getPlanes().length;
}
@DoNotStrip
@Keep
public static int getBytesPerRow(ImageProxy imageProxy) {
return imageProxy.getPlanes()[0].getRowStride();
}
private static byte[] byteArrayCache;
@DoNotStrip
@Keep
public static byte[] toByteArray(ImageProxy imageProxy) {
switch (imageProxy.getFormat()) {
case ImageFormat.YUV_420_888:
ByteBuffer yBuffer = imageProxy.getPlanes()[0].getBuffer();
ByteBuffer vuBuffer = imageProxy.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int vuSize = vuBuffer.remaining();
if (byteArrayCache == null || byteArrayCache.length != ySize + vuSize) {
byteArrayCache = new byte[ySize + vuSize];
}
yBuffer.get(byteArrayCache, 0, ySize);
vuBuffer.get(byteArrayCache, ySize, vuSize);
return byteArrayCache;
default:
throw new RuntimeException("Cannot convert Frame with Format " + imageProxy.getFormat() + " to byte array!");
}
}
}

View File

@@ -0,0 +1,6 @@
package com.mrousavy.camera.frameprocessor;
@SuppressWarnings("JavaJniMissingFunction") // we use fbjni
public class VisionCameraInstaller {
public static native void install(VisionCameraProxy proxy);
}

View File

@@ -5,6 +5,7 @@ import androidx.annotation.Keep
import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.ReadableNativeMap
import com.facebook.react.turbomodule.core.CallInvokerHolderImpl
import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.CameraView
@@ -12,12 +13,11 @@ import com.mrousavy.camera.ViewNotFoundError
import java.lang.ref.WeakReference
import java.util.concurrent.ExecutorService
@Suppress("KotlinJniMissingFunction") // I use fbjni, Android Studio is not smart enough to realize that.
class FrameProcessorRuntimeManager(context: ReactApplicationContext, frameProcessorThread: ExecutorService) {
companion object {
const val TAG = "FrameProcessorRuntime"
private val Plugins: ArrayList<FrameProcessorPlugin> = ArrayList()
@Suppress("KotlinJniMissingFunction") // we use fbjni.
class VisionCameraProxy(context: ReactApplicationContext, frameProcessorThread: ExecutorService) {
companion object {
const val TAG = "VisionCameraProxy"
init {
try {
System.loadLibrary("VisionCamera")
@@ -26,52 +26,53 @@ class FrameProcessorRuntimeManager(context: ReactApplicationContext, frameProces
throw e
}
}
fun addPlugin(plugin: FrameProcessorPlugin) {
Plugins.add(plugin)
}
}
@DoNotStrip
private var mHybridData: HybridData? = null
private var mContext: WeakReference<ReactApplicationContext>? = null
private var mScheduler: VisionCameraScheduler? = null
@Keep
private var mHybridData: HybridData
private var mContext: WeakReference<ReactApplicationContext>
private var mScheduler: VisionCameraScheduler
init {
val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl
val jsRuntimeHolder = context.javaScriptContextHolder.get()
mScheduler = VisionCameraScheduler(frameProcessorThread)
mContext = WeakReference(context)
mHybridData = initHybrid(jsRuntimeHolder, jsCallInvokerHolder, mScheduler!!)
mHybridData = initHybrid(jsRuntimeHolder, jsCallInvokerHolder, mScheduler)
}
@Suppress("unused")
@DoNotStrip
@Keep
fun findCameraViewById(viewId: Int): CameraView {
private fun findCameraViewById(viewId: Int): CameraView {
Log.d(TAG, "Finding view $viewId...")
val ctx = mContext?.get()
val ctx = mContext.get()
val view = if (ctx != null) UIManagerHelper.getUIManager(ctx, viewId)?.resolveView(viewId) as CameraView? else null
Log.d(TAG, if (view != null) "Found view $viewId!" else "Couldn't find view $viewId!")
return view ?: throw ViewNotFoundError(viewId)
}
fun installBindings() {
Log.i(TAG, "Installing JSI Bindings on JS Thread...")
installJSIBindings()
Log.i(TAG, "Installing Frame Processor Plugins...")
Plugins.forEach { plugin ->
registerPlugin(plugin)
}
Log.i(TAG, "Successfully installed ${Plugins.count()} Frame Processor Plugins!")
@DoNotStrip
@Keep
fun setFrameProcessor(viewId: Int, frameProcessor: FrameProcessor) {
val view = findCameraViewById(viewId)
view.frameProcessor = frameProcessor
}
@DoNotStrip
@Keep
fun removeFrameProcessor(viewId: Int) {
val view = findCameraViewById(viewId)
view.frameProcessor = null
}
@DoNotStrip
@Keep
fun getFrameProcessorPlugin(name: String, options: ReadableNativeMap): FrameProcessorPlugin {
return FrameProcessorPluginRegistry.getPlugin(name, options)
}
// private C++ funcs
private external fun initHybrid(
jsContext: Long,
jsCallInvokerHolder: CallInvokerHolderImpl,
scheduler: VisionCameraScheduler
jsContext: Long,
jsCallInvokerHolder: CallInvokerHolderImpl,
scheduler: VisionCameraScheduler
): HybridData
private external fun registerPlugin(plugin: FrameProcessorPlugin)
private external fun installJSIBindings()
}

View File

@@ -0,0 +1,11 @@
package com.mrousavy.camera.parsers
import android.content.pm.PackageManager
fun parsePermissionStatus(status: Int): String {
return when (status) {
PackageManager.PERMISSION_DENIED -> "denied"
PackageManager.PERMISSION_GRANTED -> "authorized"
else -> "not-determined"
}
}