feat: Sync Frame Processors (plus runAsync and runAtTargetFps) (#1472)
Before, Frame Processors ran on a separate Thread.
After, Frame Processors run fully synchronous and always at the same FPS as the Camera.
Two new functions have been introduced:
* `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls.
* `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute.
You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.)
You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side.
You can also combine both functions.
Examples:
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
console.log("I'm running at 10 FPS!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread, I can block for longer!")
})
}, [])
```
```js
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log("I'm running at 60 FPS!")
runAtTargetFps(10, () => {
'worklet'
runAsync(frame, () => {
'worklet'
console.log("I'm running on another Thread at 10 FPS, I can block for longer!")
})
})
}, [])
```
This commit is contained in:
@@ -190,8 +190,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
}
|
||||
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
// Video Recording runs in the same queue
|
||||
if isRecording {
|
||||
// Write Video / Audio frame to file
|
||||
guard let recordingSession = recordingSession else {
|
||||
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
||||
return
|
||||
@@ -211,54 +211,9 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
}
|
||||
|
||||
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
|
||||
// check if last frame was x nanoseconds ago, effectively throttling FPS
|
||||
let frameTime = UInt64(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds * 1_000_000_000.0)
|
||||
let lastFrameProcessorCallElapsedTime = frameTime - lastFrameProcessorCall
|
||||
let secondsPerFrame = 1.0 / actualFrameProcessorFps
|
||||
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
||||
if lastFrameProcessorCallElapsedTime >= UInt64(nanosecondsPerFrame) {
|
||||
if !isRunningFrameProcessor {
|
||||
// we're not in the middle of executing the Frame Processor, so prepare for next call.
|
||||
CameraQueues.frameProcessorQueue.async {
|
||||
self.isRunningFrameProcessor = true
|
||||
|
||||
let perfSample = self.frameProcessorPerformanceDataCollector.beginPerformanceSampleCollection()
|
||||
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
|
||||
frameProcessor(frame)
|
||||
perfSample.endPerformanceSampleCollection()
|
||||
|
||||
self.isRunningFrameProcessor = false
|
||||
}
|
||||
lastFrameProcessorCall = frameTime
|
||||
} else {
|
||||
// we're still in the middle of executing a Frame Processor for a previous frame, so a frame was dropped.
|
||||
ReactLogger.log(level: .warning, message: "The Frame Processor took so long to execute that a frame was dropped.")
|
||||
}
|
||||
}
|
||||
|
||||
if isReadyForNewEvaluation {
|
||||
// last evaluation was more than 1sec ago, evaluate again
|
||||
evaluateNewPerformanceSamples()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func evaluateNewPerformanceSamples() {
|
||||
lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
|
||||
guard let videoDevice = videoDeviceInput?.device else { return }
|
||||
guard frameProcessorPerformanceDataCollector.hasEnoughData else { return }
|
||||
|
||||
let maxFrameProcessorFps = Double(videoDevice.activeVideoMinFrameDuration.timescale) * Double(videoDevice.activeVideoMinFrameDuration.value)
|
||||
let averageFps = 1.0 / frameProcessorPerformanceDataCollector.averageExecutionTimeSeconds
|
||||
let suggestedFrameProcessorFps = max(floor(min(averageFps, maxFrameProcessorFps)), 1)
|
||||
|
||||
if frameProcessorFps.intValue == -1 {
|
||||
// frameProcessorFps="auto"
|
||||
actualFrameProcessorFps = suggestedFrameProcessorFps
|
||||
} else {
|
||||
// frameProcessorFps={someCustomFpsValue}
|
||||
invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: frameProcessorFps.doubleValue,
|
||||
suggestedFps: suggestedFrameProcessorFps)
|
||||
// Call the JavaScript Frame Processor func (worklet)
|
||||
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
|
||||
frameProcessor(frame)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -270,11 +225,6 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
}
|
||||
}
|
||||
|
||||
private var isReadyForNewEvaluation: Bool {
|
||||
let lastPerformanceEvaluationElapsedTime = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorPerformanceEvaluation.uptimeNanoseconds
|
||||
return lastPerformanceEvaluationElapsedTime > 1_000_000_000
|
||||
}
|
||||
|
||||
/**
|
||||
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
||||
*/
|
||||
|
||||
Reference in New Issue
Block a user