chore: Move onFrame into Callback on Android (#2458)

* Separate to onFrame

* Restructure FP

* Move lib loading into `CameraViewModule`
This commit is contained in:
Marc Rousavy 2024-01-30 14:17:32 +01:00 committed by GitHub
parent 02bc8a979c
commit af14f912fb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 30 additions and 55 deletions

View File

@ -14,6 +14,7 @@ import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.core.CodeScannerFrame import com.mrousavy.camera.core.CodeScannerFrame
import com.mrousavy.camera.core.PreviewView import com.mrousavy.camera.core.PreviewView
import com.mrousavy.camera.extensions.installHierarchyFitter import com.mrousavy.camera.extensions.installHierarchyFitter
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.types.CameraDeviceFormat import com.mrousavy.camera.types.CameraDeviceFormat
import com.mrousavy.camera.types.CodeScannerOptions import com.mrousavy.camera.types.CodeScannerOptions
@ -39,7 +40,7 @@ import kotlinx.coroutines.launch
class CameraView(context: Context) : class CameraView(context: Context) :
FrameLayout(context), FrameLayout(context),
CoroutineScope, CoroutineScope,
CameraSession.CameraSessionCallback { CameraSession.Callback {
companion object { companion object {
const val TAG = "CameraView" const val TAG = "CameraView"
} }
@ -96,10 +97,6 @@ class CameraView(context: Context) :
private var currentConfigureCall: Long = System.currentTimeMillis() private var currentConfigureCall: Long = System.currentTimeMillis()
internal var frameProcessor: FrameProcessor? = null internal var frameProcessor: FrameProcessor? = null
set(value) {
field = value
cameraSession.frameProcessor = frameProcessor
}
override val coroutineContext: CoroutineContext = CameraQueues.cameraQueue.coroutineDispatcher override val coroutineContext: CoroutineContext = CameraQueues.cameraQueue.coroutineDispatcher
@ -230,6 +227,10 @@ class CameraView(context: Context) :
} }
} }
override fun onFrame(frame: Frame) {
frameProcessor?.call(frame)
}
override fun onError(error: Throwable) { override fun onError(error: Throwable) {
invokeOnError(error) invokeOnError(error)
} }

View File

@ -26,6 +26,17 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
companion object { companion object {
const val TAG = "CameraView" const val TAG = "CameraView"
var sharedRequestCode = 10 var sharedRequestCode = 10
init {
try {
// Load the native part of VisionCamera.
// Includes the OpenGL VideoPipeline, as well as Frame Processor JSI bindings
System.loadLibrary("VisionCamera")
} catch (e: UnsatisfiedLinkError) {
Log.e(VisionCameraProxy.TAG, "Failed to load VisionCamera C++ library!", e)
throw e
}
}
} }
private val coroutineScope = CoroutineScope(Dispatchers.Default) // TODO: or Dispatchers.Main? private val coroutineScope = CoroutineScope(Dispatchers.Default) // TODO: or Dispatchers.Main?

View File

@ -37,6 +37,7 @@ import com.mrousavy.camera.extensions.getPreviewTargetSize
import com.mrousavy.camera.extensions.getVideoSizes import com.mrousavy.camera.extensions.getVideoSizes
import com.mrousavy.camera.extensions.openCamera import com.mrousavy.camera.extensions.openCamera
import com.mrousavy.camera.extensions.setZoom import com.mrousavy.camera.extensions.setZoom
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.types.Flash import com.mrousavy.camera.types.Flash
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
@ -55,7 +56,7 @@ import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.sync.Mutex import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock import kotlinx.coroutines.sync.withLock
class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: CameraSessionCallback) : class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) :
CameraManager.AvailabilityCallback(), CameraManager.AvailabilityCallback(),
Closeable, Closeable,
CoroutineScope { CoroutineScope {
@ -383,7 +384,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
size.height, size.height,
video.config.pixelFormat, video.config.pixelFormat,
isSelfie, isSelfie,
video.config.enableFrameProcessor video.config.enableFrameProcessor,
callback
) )
val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr) val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr)
outputs.add(output) outputs.add(output)
@ -615,7 +617,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
private fun updateVideoOutputs() { private fun updateVideoOutputs() {
val videoOutput = videoOutput ?: return val videoOutput = videoOutput ?: return
Log.i(TAG, "Updating Video Outputs...") Log.i(TAG, "Updating Video Outputs...")
videoOutput.videoPipeline.setFrameProcessorOutput(frameProcessor)
videoOutput.videoPipeline.setRecordingSessionOutput(recording) videoOutput.videoPipeline.setRecordingSessionOutput(recording)
} }
@ -720,8 +721,9 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
} }
} }
interface CameraSessionCallback { interface Callback {
fun onError(error: Throwable) fun onError(error: Throwable)
fun onFrame(frame: Frame)
fun onInitialized() fun onInitialized()
fun onStarted() fun onStarted()
fun onStopped() fun onStopped()

View File

@ -14,7 +14,7 @@ class CodeScannerPipeline(
val size: Size, val size: Size,
val format: Int, val format: Int,
val configuration: CameraConfiguration.CodeScanner, val configuration: CameraConfiguration.CodeScanner,
val callback: CameraSession.CameraSessionCallback val callback: CameraSession.Callback
) : Closeable { ) : Closeable {
companion object { companion object {
// We want to have a buffer of 2 images, but we always only acquire one. // We want to have a buffer of 2 images, but we always only acquire one.

View File

@ -31,26 +31,13 @@ class VideoPipeline(
val height: Int, val height: Int,
val format: PixelFormat = PixelFormat.NATIVE, val format: PixelFormat = PixelFormat.NATIVE,
private val isMirrored: Boolean = false, private val isMirrored: Boolean = false,
enableFrameProcessor: Boolean = false enableFrameProcessor: Boolean = false,
private val callback: CameraSession.Callback
) : SurfaceTexture.OnFrameAvailableListener, ) : SurfaceTexture.OnFrameAvailableListener,
Closeable { Closeable {
companion object { companion object {
private const val MAX_IMAGES = 3 private const val MAX_IMAGES = 3
private const val TAG = "VideoPipeline" private const val TAG = "VideoPipeline"
init {
try {
System.loadLibrary("VisionCamera")
} catch (e: UnsatisfiedLinkError) {
Log.e(
TAG,
"Failed to load VisionCamera C++ library! " +
"OpenGL GPU VideoPipeline cannot be used.",
e
)
throw e
}
}
} }
@DoNotStrip @DoNotStrip
@ -60,16 +47,13 @@ class VideoPipeline(
private var transformMatrix = FloatArray(16) private var transformMatrix = FloatArray(16)
private var isActive = true private var isActive = true
// Output 1
private var frameProcessor: FrameProcessor? = null
// Output 2
private var recordingSession: RecordingSession? = null
// Input // Input
private val surfaceTexture: SurfaceTexture private val surfaceTexture: SurfaceTexture
val surface: Surface val surface: Surface
// Output
private var recordingSession: RecordingSession? = null
// If Frame Processors are enabled, we go through ImageReader first before we go thru OpenGL // If Frame Processors are enabled, we go through ImageReader first before we go thru OpenGL
private var imageReader: ImageReader? = null private var imageReader: ImageReader? = null
private var imageWriter: ImageWriter? = null private var imageWriter: ImageWriter? = null
@ -115,7 +99,7 @@ class VideoPipeline(
frame.incrementRefCount() frame.incrementRefCount()
try { try {
frameProcessor?.call(frame) callback.onFrame(frame)
if (hasOutputs) { if (hasOutputs) {
// If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline // If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline
@ -141,7 +125,6 @@ class VideoPipeline(
isActive = false isActive = false
imageWriter?.close() imageWriter?.close()
imageReader?.close() imageReader?.close()
frameProcessor = null
recordingSession = null recordingSession = null
surfaceTexture.release() surfaceTexture.release()
} }
@ -180,20 +163,6 @@ class VideoPipeline(
else -> ImageFormat.PRIVATE else -> ImageFormat.PRIVATE
} }
/**
* Configures the Pipeline to also call the given [FrameProcessor] (or null).
*/
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
synchronized(this) {
if (frameProcessor != null) {
Log.i(TAG, "Setting $width x $height FrameProcessor Output...")
} else {
Log.i(TAG, "Removing FrameProcessor Output...")
}
this.frameProcessor = frameProcessor
}
}
/** /**
* Configures the Pipeline to also write Frames to a Surface from a `MediaRecorder` (or null) * Configures the Pipeline to also write Frames to a Surface from a `MediaRecorder` (or null)
*/ */

View File

@ -17,14 +17,6 @@ import java.lang.ref.WeakReference
class VisionCameraProxy(context: ReactApplicationContext) { class VisionCameraProxy(context: ReactApplicationContext) {
companion object { companion object {
const val TAG = "VisionCameraProxy" const val TAG = "VisionCameraProxy"
init {
try {
System.loadLibrary("VisionCamera")
} catch (e: UnsatisfiedLinkError) {
Log.e(TAG, "Failed to load VisionCamera C++ library!", e)
throw e
}
}
} }
@DoNotStrip @DoNotStrip