feat: Frame Processors for Android (#196)
* Create android gradle build setup * Fix `prefab` config * Add `pickFirst **/*.so` to example build.gradle * fix REA path * cache gradle builds * Update validate-android.yml * Create Native Proxy * Copy REA header * implement ctor * Rename CameraViewModule -> FrameProcessorRuntimeManager * init FrameProcessorRuntimeManager * fix name * Update FrameProcessorRuntimeManager.h * format * Create AndroidErrorHandler.h * Initialize runtime and install JSI funcs * Update FrameProcessorRuntimeManager.cpp * Update CameraViewModule.kt * Make CameraView hybrid C++ class to find view & set frame processor * Update FrameProcessorRuntimeManager.cpp * pass function by rvalue * pass by const && * extract hermes and JSC REA * pass `FOR_HERMES` * correctly prepare JSC and Hermes * Update CMakeLists.txt * add missing hermes include * clean up imports * Create JImageProxy.h * pass ImageProxy to JNI as `jobject` * try use `JImageProxy` C++ wrapper type * Use `local_ref<JImageProxy>` * Create `JImageProxyHostObject` for JSI interop * debug call to frame processor * Unset frame processor * Fix CameraView native part not being registered * close image * use `jobject` instead of `JImageProxy` for now :( * fix hermes build error * Set enable FP callback * fix JNI call * Update CameraView.cpp * Get Format * Create plugin abstract * Make `FrameProcessorPlugin` a hybrid object * Register plugin CXX * Call `registerPlugin` * Catch * remove JSI * Create sample QR code plugin * register plugins * Fix missing JNI binding * Add `mHybridData` * prefix name with two underscores (`__`) * Update CameraPage.tsx * wrap `ImageProxy` in host object * Use `jobject` for HO box * Update JImageProxy.h * reinterpret jobject * Try using `JImageProxy` instead of `jobject` * Update JImageProxy.h * get bytes per row and plane count * Update CameraView.cpp * Return base * add some docs and JNI JSI conversion * indent * Convert JSI value to JNI jobject * using namespace facebook * Try using class * Use plain old Object[] * Try convert JNI -> JSI * fix decl * fix bool init * Correctly link folly * Update CMakeLists.txt * Convert Map to Object * Use folly for Map and Array * Return `alias_ref<jobject>` instead of raw `jobject` * fix JNI <-> JSI conversion * Update JSIJNIConversion.cpp * Log parameters * fix params index offset * add more test cases * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx * fix types * Rename to example plugin * remove support for hashmap * Try use HashMap iterable fbjni binding * try using JReadableArray/JReadableMap * Fix list return values * Update JSIJNIConversion.cpp * Update JSIJNIConversion.cpp * (iOS) Rename ObjC QR Code Plugin to Example Plugin * Rename Swift plugin QR -> Example * Update ExamplePluginSwift.swift * Fix Map/Dictionary logging format * Update ExampleFrameProcessorPlugin.m * Reconfigure session if frame processor changed * Handle use-cases via `maxUseCasesCount` * Don't crash app on `configureSession` error * Document "use-cases" * Update DEVICES.mdx * fix merge * Make `const &` * iOS: Automatically enable `video` if a `frameProcessor` is set * Update CameraView.cpp * fix docs * Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false. * Fix lookup * Update CameraView.kt * Implement `frameProcessorFps` * Finalize Frame Processor Plugin Hybrid * Update CameraViewModule.kt * Support `flash` on `takeSnapshot()` * Update docs * Add docs * Update CameraPage.tsx * Attribute NonNull * remove unused imports * Add Android docs for Frame Processors * Make JNI HashMap <-> JSI Object conversion faster directly access `toHashMap` instead of going through java * add todo * Always run `prepareJSC` and `prepareHermes` * switch jsc and hermes * Specify ndkVersion `21.4.7075529` * Update gradle.properties * Update gradle.properties * Create .aar * Correctly prepare android package * Update package.json * Update package.json * remove `prefab` build feature * split * Add docs for registering the FP plugin * Add step for dep * Update CaptureButton.tsx * Move to `reanimated-headers/` * Exclude reanimated-headers from cpplint * disable `build/include_order` rule * cpplint fixes * perf: Make `JSIJNIConversion` a `namespace` instead of `class` * Ignore runtime/references for `convert` funcs * Build Android .aar in CI * Run android build script only on `prepack` * Update package.json * Update package.json * Update build-android-npm-package.sh * Move to `yarn build` * Also install node_modules in example step * Update validate-android.yml * sort imports * fix torch * Run ImageAnalysis on `FrameProcessorThread` * Update Errors.kt * Add clean android script * Upgrade reanimated to 2.3.0-alpha.1 * Revert "Upgrade reanimated to 2.3.0-alpha.1" This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035. * ⚠️ TEMP FIX: hotfix reanimated build.gradle * Update CameraView+TakeSnapshot.kt * ⚠️ TEMP FIX: Disable ktlint action for now * Update clean.sh * Set max heap size to 4g * rebuild lockfiles * Update Podfile.lock * rename * Build lib .aar before example/
This commit is contained in:
@@ -17,8 +17,13 @@ import kotlin.system.measureTimeMillis
|
||||
|
||||
@SuppressLint("UnsafeOptInUsageError")
|
||||
suspend fun CameraView.takePhoto(options: ReadableMap): WritableMap = coroutineScope {
|
||||
if (fallbackToSnapshot) {
|
||||
Log.i(CameraView.TAG, "takePhoto() called, but falling back to Snapshot because 1 use-case is already occupied.")
|
||||
return@coroutineScope takeSnapshot(options)
|
||||
}
|
||||
|
||||
val startFunc = System.nanoTime()
|
||||
Log.d(CameraView.TAG, "takePhoto() called")
|
||||
Log.i(CameraView.TAG, "takePhoto() called")
|
||||
if (imageCapture == null) {
|
||||
if (photo == true) {
|
||||
throw CameraNotReadyError()
|
||||
|
@@ -11,32 +11,48 @@ import kotlinx.coroutines.coroutineScope
|
||||
import kotlinx.coroutines.withContext
|
||||
import java.io.File
|
||||
import java.io.FileOutputStream
|
||||
import kotlinx.coroutines.guava.await
|
||||
|
||||
suspend fun CameraView.takeSnapshot(options: ReadableMap): WritableMap = coroutineScope {
|
||||
val bitmap = this@takeSnapshot.previewView.bitmap ?: throw CameraNotReadyError()
|
||||
val camera = camera ?: throw com.mrousavy.camera.CameraNotReadyError()
|
||||
val enableFlash = options.getString("flash") == "on"
|
||||
|
||||
val quality = if (options.hasKey("quality")) options.getInt("quality") else 100
|
||||
|
||||
val file: File
|
||||
val exif: ExifInterface
|
||||
@Suppress("BlockingMethodInNonBlockingContext")
|
||||
withContext(Dispatchers.IO) {
|
||||
file = File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
|
||||
FileOutputStream(file).use { stream ->
|
||||
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, stream)
|
||||
try {
|
||||
if (enableFlash) {
|
||||
camera.cameraControl.enableTorch(true).await()
|
||||
}
|
||||
|
||||
val bitmap = this@takeSnapshot.previewView.bitmap ?: throw CameraNotReadyError()
|
||||
|
||||
val quality = if (options.hasKey("quality")) options.getInt("quality") else 100
|
||||
|
||||
val file: File
|
||||
val exif: ExifInterface
|
||||
@Suppress("BlockingMethodInNonBlockingContext")
|
||||
withContext(Dispatchers.IO) {
|
||||
file = File.createTempFile("mrousavy", ".jpg", context.cacheDir).apply { deleteOnExit() }
|
||||
FileOutputStream(file).use { stream ->
|
||||
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, stream)
|
||||
}
|
||||
exif = ExifInterface(file)
|
||||
}
|
||||
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", file.absolutePath)
|
||||
map.putInt("width", bitmap.width)
|
||||
map.putInt("height", bitmap.height)
|
||||
map.putBoolean("isRawPhoto", false)
|
||||
|
||||
val skipMetadata =
|
||||
if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
|
||||
val metadata = if (skipMetadata) null else exif.buildMetadataMap()
|
||||
map.putMap("metadata", metadata)
|
||||
|
||||
return@coroutineScope map
|
||||
} finally {
|
||||
if (enableFlash) {
|
||||
// reset to `torch` property
|
||||
camera.cameraControl.enableTorch(this@takeSnapshot.torch == "on")
|
||||
}
|
||||
exif = ExifInterface(file)
|
||||
}
|
||||
|
||||
val map = Arguments.createMap()
|
||||
map.putString("path", file.absolutePath)
|
||||
map.putInt("width", bitmap.width)
|
||||
map.putInt("height", bitmap.height)
|
||||
map.putBoolean("isRawPhoto", false)
|
||||
|
||||
val skipMetadata = if (options.hasKey("skipMetadata")) options.getBoolean("skipMetadata") else false
|
||||
val metadata = if (skipMetadata) null else exif.buildMetadataMap()
|
||||
map.putMap("metadata", metadata)
|
||||
|
||||
return@coroutineScope map
|
||||
}
|
||||
|
@@ -21,6 +21,8 @@ import androidx.camera.lifecycle.ProcessCameraProvider
|
||||
import androidx.camera.view.PreviewView
|
||||
import androidx.core.content.ContextCompat
|
||||
import androidx.lifecycle.*
|
||||
import com.facebook.jni.HybridData
|
||||
import com.facebook.proguard.annotations.DoNotStrip
|
||||
import com.facebook.react.bridge.*
|
||||
import com.facebook.react.uimanager.events.RCTEventEmitter
|
||||
import com.mrousavy.camera.utils.*
|
||||
@@ -82,11 +84,14 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
var torch = "off"
|
||||
var zoom = 0.0 // in percent
|
||||
var enableZoomGesture = false
|
||||
var frameProcessorFps = 1.0
|
||||
|
||||
// private properties
|
||||
private val reactContext: ReactContext
|
||||
get() = context as ReactContext
|
||||
|
||||
private var enableFrameProcessor = false
|
||||
|
||||
@Suppress("JoinDeclarationAndAssignment")
|
||||
internal val previewView: PreviewView
|
||||
private val cameraExecutor = Executors.newSingleThreadExecutor()
|
||||
@@ -96,6 +101,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
internal var camera: Camera? = null
|
||||
internal var imageCapture: ImageCapture? = null
|
||||
internal var videoCapture: VideoCapture? = null
|
||||
internal var imageAnalysis: ImageAnalysis? = null
|
||||
|
||||
private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
|
||||
private val scaleGestureDetector: ScaleGestureDetector
|
||||
@@ -107,7 +113,42 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
private var minZoom: Float = 1f
|
||||
private var maxZoom: Float = 1f
|
||||
|
||||
@DoNotStrip
|
||||
private var mHybridData: HybridData?
|
||||
|
||||
@Suppress("LiftReturnOrAssignment", "RedundantIf")
|
||||
internal val fallbackToSnapshot: Boolean
|
||||
@SuppressLint("UnsafeOptInUsageError")
|
||||
get() {
|
||||
if (video != true && !enableFrameProcessor) {
|
||||
// Both use-cases are disabled, so `photo` is the only use-case anyways. Don't need to fallback here.
|
||||
return false
|
||||
}
|
||||
cameraId?.let { cameraId ->
|
||||
val cameraManger = reactContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager
|
||||
cameraManger?.let {
|
||||
val characteristics = cameraManger.getCameraCharacteristics(cameraId)
|
||||
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
|
||||
if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
// Camera only supports a single use-case at a time
|
||||
return true
|
||||
} else {
|
||||
if (video == true && enableFrameProcessor) {
|
||||
// Camera supports max. 2 use-cases, but both are occupied by `frameProcessor` and `video`
|
||||
return true
|
||||
} else {
|
||||
// Camera supports max. 2 use-cases and only one is occupied (either `frameProcessor` or `video`), so we can add `photo`
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
init {
|
||||
mHybridData = initHybrid()
|
||||
|
||||
previewView = PreviewView(context)
|
||||
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
||||
previewView.installHierarchyFitter() // If this is not called correctly, view finder will be black/blank
|
||||
@@ -144,6 +185,28 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
})
|
||||
}
|
||||
|
||||
fun finalize() {
|
||||
mHybridData?.resetNative()
|
||||
}
|
||||
|
||||
private external fun initHybrid(): HybridData
|
||||
private external fun frameProcessorCallback(frame: ImageProxy)
|
||||
|
||||
@Suppress("unused")
|
||||
@DoNotStrip
|
||||
fun setEnableFrameProcessor(enable: Boolean) {
|
||||
Log.d(TAG, "Set enable frame processor: $enable")
|
||||
val before = enableFrameProcessor
|
||||
enableFrameProcessor = enable
|
||||
|
||||
if (before != enable) {
|
||||
// reconfigure session if frame processor was added/removed to adjust use-cases.
|
||||
GlobalScope.launch(Dispatchers.Main) {
|
||||
configureSession()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun getLifecycle(): Lifecycle {
|
||||
return lifecycleRegistry
|
||||
}
|
||||
@@ -245,6 +308,10 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
||||
val videoCaptureBuilder = VideoCapture.Builder()
|
||||
.setTargetRotation(rotation)
|
||||
val imageAnalysisBuilder = ImageAnalysis.Builder()
|
||||
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
||||
.setTargetRotation(rotation)
|
||||
.setBackgroundExecutor(CameraViewModule.FrameProcessorThread)
|
||||
|
||||
if (format == null) {
|
||||
// let CameraX automatically find best resolution for the target aspect ratio
|
||||
@@ -311,11 +378,10 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
}
|
||||
}
|
||||
|
||||
val preview = previewBuilder.build()
|
||||
|
||||
// Unbind use cases before rebinding
|
||||
videoCapture = null
|
||||
imageCapture = null
|
||||
imageAnalysis = null
|
||||
cameraProvider.unbindAll()
|
||||
|
||||
// Bind use cases to camera
|
||||
@@ -325,9 +391,32 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
useCases.add(videoCapture!!)
|
||||
}
|
||||
if (photo == true) {
|
||||
imageCapture = imageCaptureBuilder.build()
|
||||
useCases.add(imageCapture!!)
|
||||
if (fallbackToSnapshot) {
|
||||
Log.i(TAG, "Tried to add photo use-case (`photo={true}`) but the Camera device only supports " +
|
||||
"a single use-case at a time. Falling back to Snapshot capture.")
|
||||
} else {
|
||||
imageCapture = imageCaptureBuilder.build()
|
||||
useCases.add(imageCapture!!)
|
||||
}
|
||||
}
|
||||
if (enableFrameProcessor) {
|
||||
var lastCall = System.currentTimeMillis() - 1000
|
||||
val intervalMs = (1.0 / frameProcessorFps) * 1000.0
|
||||
imageAnalysis = imageAnalysisBuilder.build().apply {
|
||||
setAnalyzer(cameraExecutor, { image ->
|
||||
val now = System.currentTimeMillis()
|
||||
if (now - lastCall > intervalMs) {
|
||||
lastCall = now
|
||||
Log.d(TAG, "Calling Frame Processor...")
|
||||
frameProcessorCallback(image)
|
||||
}
|
||||
image.close()
|
||||
})
|
||||
}
|
||||
useCases.add(imageAnalysis!!)
|
||||
}
|
||||
|
||||
val preview = previewBuilder.build()
|
||||
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, *useCases.toTypedArray())
|
||||
preview.setSurfaceProvider(previewView.surfaceProvider)
|
||||
|
||||
@@ -338,11 +427,18 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
Log.i(TAG_PERF, "Session configured in $duration ms! Camera: ${camera!!}")
|
||||
invokeOnInitialized()
|
||||
} catch (exc: Throwable) {
|
||||
throw when (exc) {
|
||||
val error = when (exc) {
|
||||
is CameraError -> exc
|
||||
is IllegalArgumentException -> InvalidCameraDeviceError(exc)
|
||||
is IllegalArgumentException -> {
|
||||
if (exc.message?.contains("too many use cases") == true) {
|
||||
ParallelVideoProcessingNotSupportedError(exc)
|
||||
} else {
|
||||
InvalidCameraDeviceError(exc)
|
||||
}
|
||||
}
|
||||
else -> UnknownCameraError(exc)
|
||||
}
|
||||
invokeOnError(error)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -381,7 +477,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
|
||||
const val TAG = "CameraView"
|
||||
const val TAG_PERF = "CameraView.performance"
|
||||
|
||||
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video")
|
||||
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video", "frameProcessorFps")
|
||||
|
||||
private val arrayListOfZoom = arrayListOf("zoom")
|
||||
}
|
||||
|
@@ -1,7 +1,6 @@
|
||||
package com.mrousavy.camera
|
||||
|
||||
import android.util.Log
|
||||
import com.facebook.react.bridge.ReadableArray
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.facebook.react.common.MapBuilder
|
||||
import com.facebook.react.uimanager.SimpleViewManager
|
||||
@@ -81,6 +80,13 @@ class CameraViewManager : SimpleViewManager<CameraView>() {
|
||||
view.fps = if (fps > 0) fps else null
|
||||
}
|
||||
|
||||
@ReactProp(name = "frameProcessorFps", defaultDouble = 1.0)
|
||||
fun setFrameProcessorFps(view: CameraView, frameProcessorFps: Double) {
|
||||
if (view.frameProcessorFps != frameProcessorFps)
|
||||
addChangedPropToTransaction(view, "frameProcessorFps")
|
||||
view.frameProcessorFps = frameProcessorFps
|
||||
}
|
||||
|
||||
@ReactProp(name = "hdr")
|
||||
fun setHdr(view: CameraView, hdr: Boolean?) {
|
||||
if (view.hdr != hdr)
|
||||
|
@@ -17,8 +17,11 @@ import androidx.core.content.ContextCompat
|
||||
import com.facebook.react.bridge.*
|
||||
import com.facebook.react.modules.core.PermissionAwareActivity
|
||||
import com.facebook.react.modules.core.PermissionListener
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessorRuntimeManager
|
||||
import com.mrousavy.camera.parsers.*
|
||||
import com.mrousavy.camera.utils.*
|
||||
import java.util.concurrent.ExecutorService
|
||||
import java.util.concurrent.Executors
|
||||
import kotlinx.coroutines.*
|
||||
import kotlinx.coroutines.guava.await
|
||||
|
||||
@@ -26,6 +29,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
|
||||
companion object {
|
||||
const val REACT_CLASS = "CameraView"
|
||||
var RequestCode = 10
|
||||
val FrameProcessorThread: ExecutorService = Executors.newSingleThreadExecutor()
|
||||
|
||||
fun parsePermissionStatus(status: Int): String {
|
||||
return when (status) {
|
||||
@@ -36,6 +40,23 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
|
||||
}
|
||||
}
|
||||
|
||||
private var frameProcessorManager: FrameProcessorRuntimeManager? = null
|
||||
|
||||
override fun initialize() {
|
||||
super.initialize()
|
||||
FrameProcessorThread.execute {
|
||||
frameProcessorManager = FrameProcessorRuntimeManager(reactApplicationContext)
|
||||
reactApplicationContext.runOnJSQueueThread {
|
||||
frameProcessorManager!!.installJSIBindings()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun onCatalystInstanceDestroy() {
|
||||
super.onCatalystInstanceDestroy()
|
||||
frameProcessorManager?.destroy()
|
||||
}
|
||||
|
||||
override fun getName(): String {
|
||||
return REACT_CLASS
|
||||
}
|
||||
@@ -73,7 +94,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
|
||||
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
|
||||
onRecordCallback(null, map)
|
||||
} catch (error: Throwable) {
|
||||
val map = makeErrorMap("capture/unknown", "An unknown error occured while trying to start a video recording!", error)
|
||||
val map = makeErrorMap("capture/unknown", "An unknown error occurred while trying to start a video recording!", error)
|
||||
onRecordCallback(null, map)
|
||||
}
|
||||
}
|
||||
@@ -149,6 +170,8 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
|
||||
val supportsHdr = hdrExtension.isExtensionAvailable(cameraSelector)
|
||||
val nightExtension = NightImageCaptureExtender.create(imageCaptureBuilder)
|
||||
val supportsLowLightBoost = nightExtension.isExtensionAvailable(cameraSelector)
|
||||
// see https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture
|
||||
val supportsParallelVideoProcessing = hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY && hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
|
||||
|
||||
val fieldOfView = characteristics.getFieldOfView()
|
||||
|
||||
@@ -160,7 +183,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
|
||||
map.putBoolean("hasFlash", hasFlash)
|
||||
map.putBoolean("hasTorch", hasFlash)
|
||||
map.putBoolean("isMultiCam", isMultiCam)
|
||||
map.putBoolean("supportsPhotoAndVideoCapture", hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY)
|
||||
map.putBoolean("supportsParallelVideoProcessing", supportsParallelVideoProcessing)
|
||||
map.putBoolean("supportsRawCapture", supportsRawCapture)
|
||||
map.putBoolean("supportsDepthCapture", supportsDepthCapture)
|
||||
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
|
||||
|
@@ -37,6 +37,9 @@ class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : Camer
|
||||
|
||||
class NoCameraDeviceError : CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||
class InvalidCameraDeviceError(cause: Throwable) : CameraError("device", "invalid-device", "The given Camera device could not be found for use-case binding!", cause)
|
||||
class ParallelVideoProcessingNotSupportedError(cause: Throwable) : CameraError("device", "parallel-video-processing-not-supported", "The given LEGACY Camera device does not support parallel " +
|
||||
"video processing (`video={true}` + `frameProcessor={...}`). Disable either `video` or `frameProcessor`. To find out if a device supports parallel video processing, check the `supportsParallelVideoProcessing` property on the CameraDevice. " +
|
||||
"See https://mrousavy.github.io/react-native-vision-camera/docs/guides/lifecycle#the-supportsparallelvideoprocessing-prop for more information.", cause)
|
||||
|
||||
class FpsNotContainedInFormatError(fps: Int) : CameraError("format", "invalid-fps", "The given FPS were not valid for the currently selected format. Make sure you select a format which `frameRateRanges` includes $fps FPS!")
|
||||
class HdrNotContainedInFormatError() : CameraError(
|
||||
|
@@ -0,0 +1,54 @@
|
||||
package com.mrousavy.camera.frameprocessor;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.camera.core.ImageProxy;
|
||||
import com.facebook.jni.HybridData;
|
||||
|
||||
/**
|
||||
* Declares a Frame Processor Plugin.
|
||||
*/
|
||||
public abstract class FrameProcessorPlugin {
|
||||
static {
|
||||
System.loadLibrary("VisionCamera");
|
||||
}
|
||||
|
||||
@SuppressWarnings({"FieldCanBeLocal", "unused"})
|
||||
private final HybridData mHybridData;
|
||||
|
||||
/**
|
||||
* The actual Frame Processor plugin callback. Called for every frame the ImageAnalyzer receives.
|
||||
* @param image The CameraX ImageProxy. Don't call .close() on this, as VisionCamera handles that.
|
||||
* @return You can return any primitive, map or array you want. See the
|
||||
* <a href="https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors-plugins-overview#types">Types</a>
|
||||
* table for a list of supported types.
|
||||
*/
|
||||
public abstract @Nullable Object callback(@NonNull ImageProxy image, @NonNull Object[] params);
|
||||
|
||||
/**
|
||||
* Initializes the native plugin part.
|
||||
* @param name Specifies the Frame Processor Plugin's name in the Runtime.
|
||||
* The actual name in the JS Runtime will be prefixed with two underscores (`__`)
|
||||
*/
|
||||
protected FrameProcessorPlugin(@NonNull String name) {
|
||||
mHybridData = initHybrid(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
if (mHybridData != null) {
|
||||
mHybridData.resetNative();
|
||||
}
|
||||
}
|
||||
|
||||
private native @NonNull HybridData initHybrid(@NonNull String name);
|
||||
|
||||
/**
|
||||
* Registers the given plugin in the Frame Processor Runtime.
|
||||
* @param plugin An instance of a plugin.
|
||||
*/
|
||||
public static void register(@NonNull FrameProcessorPlugin plugin) {
|
||||
FrameProcessorRuntimeManager.Companion.getPlugins().add(plugin);
|
||||
}
|
||||
}
|
@@ -0,0 +1,75 @@
|
||||
package com.mrousavy.camera.frameprocessor
|
||||
|
||||
import android.util.Log
|
||||
import com.facebook.jni.HybridData
|
||||
import com.facebook.proguard.annotations.DoNotStrip
|
||||
import com.facebook.react.bridge.ReactApplicationContext
|
||||
import com.facebook.react.turbomodule.core.CallInvokerHolderImpl
|
||||
import com.mrousavy.camera.CameraView
|
||||
import com.mrousavy.camera.ViewNotFoundError
|
||||
import com.swmansion.reanimated.Scheduler
|
||||
import java.lang.ref.WeakReference
|
||||
|
||||
class FrameProcessorRuntimeManager(context: ReactApplicationContext) {
|
||||
companion object {
|
||||
const val TAG = "FrameProcessorRuntime"
|
||||
private var HasRegisteredPlugins = false
|
||||
val Plugins: ArrayList<FrameProcessorPlugin> = ArrayList()
|
||||
get() {
|
||||
if (HasRegisteredPlugins) {
|
||||
throw Error("Tried to access Frame Processor Plugin list, " +
|
||||
"but plugins have already been registered (list is frozen now!).")
|
||||
}
|
||||
return field
|
||||
}
|
||||
|
||||
init {
|
||||
System.loadLibrary("reanimated")
|
||||
System.loadLibrary("VisionCamera")
|
||||
}
|
||||
}
|
||||
|
||||
@DoNotStrip
|
||||
private var mHybridData: HybridData?
|
||||
private var mContext: WeakReference<ReactApplicationContext>?
|
||||
private var mScheduler: Scheduler?
|
||||
|
||||
init {
|
||||
val holder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl
|
||||
mScheduler = Scheduler(context)
|
||||
mContext = WeakReference(context)
|
||||
mHybridData = initHybrid(context.javaScriptContextHolder.get(), holder, mScheduler!!)
|
||||
initializeRuntime()
|
||||
|
||||
Log.i(TAG, "Installing Frame Processor Plugins...")
|
||||
Plugins.forEach { plugin ->
|
||||
registerPlugin(plugin)
|
||||
}
|
||||
Log.i(TAG, "Successfully installed ${Plugins.count()} Frame Processor Plugins!")
|
||||
HasRegisteredPlugins = true
|
||||
}
|
||||
|
||||
fun destroy() {
|
||||
mScheduler?.deactivate()
|
||||
mHybridData?.resetNative()
|
||||
}
|
||||
|
||||
fun findCameraViewById(viewId: Int): CameraView {
|
||||
Log.d(TAG, "finding view $viewId...")
|
||||
val view = mContext?.get()?.currentActivity?.findViewById<CameraView>(viewId)
|
||||
Log.d(TAG, "found view $viewId! is null: ${view == null}")
|
||||
return view ?: throw ViewNotFoundError(viewId)
|
||||
}
|
||||
|
||||
// private C++ funcs
|
||||
private external fun initHybrid(
|
||||
jsContext: Long,
|
||||
jsCallInvokerHolder: CallInvokerHolderImpl,
|
||||
scheduler: Scheduler
|
||||
): HybridData?
|
||||
private external fun initializeRuntime()
|
||||
private external fun registerPlugin(plugin: FrameProcessorPlugin)
|
||||
|
||||
// public C++ funcs
|
||||
external fun installJSIBindings()
|
||||
}
|
@@ -14,7 +14,6 @@ import java.io.FileOutputStream
|
||||
import java.nio.ByteBuffer
|
||||
import kotlin.system.measureTimeMillis
|
||||
|
||||
|
||||
// TODO: Fix this flip() function (this outputs a black image)
|
||||
fun flip(imageBytes: ByteArray, imageWidth: Int): ByteArray {
|
||||
// separate out the sub arrays
|
||||
|
Reference in New Issue
Block a user