feat: Sync Frame Processors (plus runAsync
and runAtTargetFps
) (#1472)
Before, Frame Processors ran on a separate Thread. After, Frame Processors run fully synchronous and always at the same FPS as the Camera. Two new functions have been introduced: * `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls. * `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute. You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.) You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side. You can also combine both functions. Examples: ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAtTargetFps(10, () => { 'worklet' console.log("I'm running at 10 FPS!") }) }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAsync(frame, () => { 'worklet' console.log("I'm running on another Thread, I can block for longer!") }) }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAtTargetFps(10, () => { 'worklet' runAsync(frame, () => { 'worklet' console.log("I'm running on another Thread at 10 FPS, I can block for longer!") }) }) }, []) ```
This commit is contained in:
@@ -31,17 +31,6 @@ fun CameraView.invokeOnError(error: Throwable) {
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event)
|
||||
}
|
||||
|
||||
fun CameraView.invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: Double, suggestedFps: Double) {
|
||||
Log.e(CameraView.TAG, "invokeOnFrameProcessorPerformanceSuggestionAvailable(suggestedFps: $suggestedFps):")
|
||||
|
||||
val event = Arguments.createMap()
|
||||
val type = if (suggestedFps > currentFps) "can-use-higher-fps" else "should-use-lower-fps"
|
||||
event.putString("type", type)
|
||||
event.putDouble("suggestedFrameProcessorFps", suggestedFps)
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraPerformanceSuggestionAvailable", event)
|
||||
}
|
||||
|
||||
fun CameraView.invokeOnViewReady() {
|
||||
val event = Arguments.createMap()
|
||||
val reactContext = context as ReactContext
|
||||
|
@@ -25,7 +25,6 @@ import com.facebook.jni.HybridData
|
||||
import com.facebook.proguard.annotations.DoNotStrip
|
||||
import com.facebook.react.bridge.*
|
||||
import com.facebook.react.uimanager.events.RCTEventEmitter
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessorPerformanceDataCollector
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessorRuntimeManager
|
||||
import com.mrousavy.camera.utils.*
|
||||
import kotlinx.coroutines.*
|
||||
@@ -103,13 +102,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
|
||||
field = value
|
||||
setOnTouchListener(if (value) touchEventListener else null)
|
||||
}
|
||||
var frameProcessorFps = 1.0
|
||||
set(value) {
|
||||
field = value
|
||||
actualFrameProcessorFps = if (value == -1.0) 30.0 else value
|
||||
lastFrameProcessorPerformanceEvaluation = System.currentTimeMillis()
|
||||
frameProcessorPerformanceDataCollector.clear()
|
||||
}
|
||||
|
||||
// private properties
|
||||
private var isMounted = false
|
||||
@@ -166,16 +158,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
|
||||
private var minZoom: Float = 1f
|
||||
private var maxZoom: Float = 1f
|
||||
|
||||
private var actualFrameProcessorFps = 30.0
|
||||
private val frameProcessorPerformanceDataCollector = FrameProcessorPerformanceDataCollector()
|
||||
private var lastSuggestedFrameProcessorFps = 0.0
|
||||
private var lastFrameProcessorPerformanceEvaluation = System.currentTimeMillis()
|
||||
private val isReadyForNewEvaluation: Boolean
|
||||
get() {
|
||||
val lastPerformanceEvaluationElapsedTime = System.currentTimeMillis() - lastFrameProcessorPerformanceEvaluation
|
||||
return lastPerformanceEvaluationElapsedTime > 1000
|
||||
}
|
||||
|
||||
@DoNotStrip
|
||||
private var mHybridData: HybridData? = null
|
||||
|
||||
@@ -480,21 +462,8 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
|
||||
Log.i(TAG, "Adding ImageAnalysis use-case...")
|
||||
imageAnalysis = imageAnalysisBuilder.build().apply {
|
||||
setAnalyzer(cameraExecutor, { image ->
|
||||
val now = System.currentTimeMillis()
|
||||
val intervalMs = (1.0 / actualFrameProcessorFps) * 1000.0
|
||||
if (now - lastFrameProcessorCall > intervalMs) {
|
||||
lastFrameProcessorCall = now
|
||||
|
||||
val perfSample = frameProcessorPerformanceDataCollector.beginPerformanceSampleCollection()
|
||||
frameProcessorCallback(image)
|
||||
perfSample.endPerformanceSampleCollection()
|
||||
}
|
||||
image.close()
|
||||
|
||||
if (isReadyForNewEvaluation) {
|
||||
// last evaluation was more than a second ago, evaluate again
|
||||
evaluateNewPerformanceSamples()
|
||||
}
|
||||
// Call JS Frame Processor
|
||||
frameProcessorCallback(image)
|
||||
})
|
||||
}
|
||||
useCases.add(imageAnalysis!!)
|
||||
@@ -526,22 +495,4 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun evaluateNewPerformanceSamples() {
|
||||
lastFrameProcessorPerformanceEvaluation = System.currentTimeMillis()
|
||||
val maxFrameProcessorFps = 30 // TODO: Get maxFrameProcessorFps from ImageAnalyser
|
||||
val averageFps = 1.0 / frameProcessorPerformanceDataCollector.averageExecutionTimeSeconds
|
||||
val suggestedFrameProcessorFps = floor(min(averageFps, maxFrameProcessorFps.toDouble()))
|
||||
|
||||
if (frameProcessorFps == -1.0) {
|
||||
// frameProcessorFps="auto"
|
||||
actualFrameProcessorFps = suggestedFrameProcessorFps
|
||||
} else {
|
||||
// frameProcessorFps={someCustomFpsValue}
|
||||
if (suggestedFrameProcessorFps != lastSuggestedFrameProcessorFps && suggestedFrameProcessorFps != frameProcessorFps) {
|
||||
invokeOnFrameProcessorPerformanceSuggestionAvailable(frameProcessorFps, suggestedFrameProcessorFps)
|
||||
lastSuggestedFrameProcessorFps = suggestedFrameProcessorFps
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -27,7 +27,6 @@ class CameraViewManager(reactContext: ReactApplicationContext) : ViewGroupManage
|
||||
.put("cameraViewReady", MapBuilder.of("registrationName", "onViewReady"))
|
||||
.put("cameraInitialized", MapBuilder.of("registrationName", "onInitialized"))
|
||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||
.put("cameraPerformanceSuggestionAvailable", MapBuilder.of("registrationName", "onFrameProcessorPerformanceSuggestionAvailable"))
|
||||
.build()
|
||||
}
|
||||
|
||||
@@ -108,13 +107,6 @@ class CameraViewManager(reactContext: ReactApplicationContext) : ViewGroupManage
|
||||
view.fps = if (fps > 0) fps else null
|
||||
}
|
||||
|
||||
@ReactProp(name = "frameProcessorFps", defaultDouble = 1.0)
|
||||
fun setFrameProcessorFps(view: CameraView, frameProcessorFps: Double) {
|
||||
if (view.frameProcessorFps != frameProcessorFps)
|
||||
addChangedPropToTransaction(view, "frameProcessorFps")
|
||||
view.frameProcessorFps = frameProcessorFps
|
||||
}
|
||||
|
||||
@ReactProp(name = "hdr")
|
||||
fun setHdr(view: CameraView, hdr: Boolean?) {
|
||||
if (view.hdr != hdr)
|
||||
|
@@ -1,38 +0,0 @@
|
||||
package com.mrousavy.camera.frameprocessor
|
||||
|
||||
data class PerformanceSampleCollection(val endPerformanceSampleCollection: () -> Unit)
|
||||
|
||||
// keep a maximum of `maxSampleSize` historical performance data samples cached.
|
||||
private const val maxSampleSize = 15
|
||||
|
||||
class FrameProcessorPerformanceDataCollector {
|
||||
private var counter = 0
|
||||
private var performanceSamples: ArrayList<Double> = ArrayList()
|
||||
|
||||
val averageExecutionTimeSeconds: Double
|
||||
get() = performanceSamples.average()
|
||||
|
||||
fun beginPerformanceSampleCollection(): PerformanceSampleCollection {
|
||||
val begin = System.currentTimeMillis()
|
||||
|
||||
return PerformanceSampleCollection {
|
||||
val end = System.currentTimeMillis()
|
||||
val seconds = (end - begin) / 1_000.0
|
||||
|
||||
val index = counter % maxSampleSize
|
||||
|
||||
if (performanceSamples.size > index) {
|
||||
performanceSamples[index] = seconds
|
||||
} else {
|
||||
performanceSamples.add(seconds)
|
||||
}
|
||||
|
||||
counter++
|
||||
}
|
||||
}
|
||||
|
||||
fun clear() {
|
||||
counter = 0
|
||||
performanceSamples.clear()
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user