fix: Fix UI Thread race condition in setFrameProcessor(...) (#265)

* fix: Fix UI Thread race condition in `setFrameProcessor(...)`

* Revert "fix: Fix UI Thread race condition in `setFrameProcessor(...)`"

This reverts commit 9c524e123cff6843d7d11db602a5027d1bb06b4b.

* Use `setImmediate` to call `setFrameProcessor(...)`

* Fix frame processor order of applying

* Add `enableFrameProcessor` prop that defines if a FP is added

* rename constant

* Implement `enableFrameProcessor` prop for Android and make `frameProcessorFps` faster

* link to troubleshooting guide

* Update TROUBLESHOOTING.mdx

* Add logs for use-cases

* fix log

* set initial frame processor in `onLayout` instead of `componentDidMount`
This commit is contained in:
Marc Rousavy 2021-07-12 15:16:03 +02:00 committed by GitHub
parent 7acae0c8a8
commit 4b4ea0ff33
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 81 additions and 99 deletions

View File

@ -30,8 +30,7 @@ void CameraView::registerNatives() {
void CameraView::frameProcessorCallback(const alias_ref<JImageProxy::javaobject>& frame) { void CameraView::frameProcessorCallback(const alias_ref<JImageProxy::javaobject>& frame) {
if (frameProcessor_ == nullptr) { if (frameProcessor_ == nullptr) {
__android_log_write(ANDROID_LOG_WARN, TAG, "Frame Processor is null!"); __android_log_write(ANDROID_LOG_WARN, TAG, "Called Frame Processor callback, but `frameProcessor` is null!");
setEnableFrameProcessor(false);
return; return;
} }
@ -45,24 +44,12 @@ void CameraView::frameProcessorCallback(const alias_ref<JImageProxy::javaobject>
} }
} }
void CameraView::setEnableFrameProcessor(bool enable) {
if (enable) {
__android_log_write(ANDROID_LOG_INFO, TAG, "Enabling Frame Processor Callback...");
} else {
__android_log_write(ANDROID_LOG_INFO, TAG, "Disabling Frame Processor Callback...");
}
static const auto javaMethod = javaPart_->getClass()->getMethod<void(bool)>("setEnableFrameProcessor");
javaMethod(javaPart_.get(), enable);
}
void CameraView::setFrameProcessor(const FrameProcessor&& frameProcessor) { void CameraView::setFrameProcessor(const FrameProcessor&& frameProcessor) {
frameProcessor_ = frameProcessor; frameProcessor_ = frameProcessor;
setEnableFrameProcessor(true);
} }
void vision::CameraView::unsetFrameProcessor() { void vision::CameraView::unsetFrameProcessor() {
frameProcessor_ = nullptr; frameProcessor_ = nullptr;
setEnableFrameProcessor(false);
} }
} // namespace vision } // namespace vision

View File

@ -26,7 +26,6 @@ class CameraView : public jni::HybridClass<CameraView> {
// TODO: Use template<> to avoid heap allocation for std::function<> // TODO: Use template<> to avoid heap allocation for std::function<>
void setFrameProcessor(const FrameProcessor&& frameProcessor); void setFrameProcessor(const FrameProcessor&& frameProcessor);
void unsetFrameProcessor(); void unsetFrameProcessor();
void setEnableFrameProcessor(bool enable);
private: private:
friend HybridBase; friend HybridBase;

View File

@ -71,6 +71,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
var photo: Boolean? = null var photo: Boolean? = null
var video: Boolean? = null var video: Boolean? = null
var audio: Boolean? = null var audio: Boolean? = null
var enableFrameProcessor = false
// props that require format reconfiguring // props that require format reconfiguring
var format: ReadableMap? = null var format: ReadableMap? = null
var fps: Int? = null var fps: Int? = null
@ -88,8 +89,6 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
private val reactContext: ReactContext private val reactContext: ReactContext
get() = context as ReactContext get() = context as ReactContext
private var enableFrameProcessor = false
@Suppress("JoinDeclarationAndAssignment") @Suppress("JoinDeclarationAndAssignment")
internal val previewView: PreviewView internal val previewView: PreviewView
private val cameraExecutor = Executors.newSingleThreadExecutor() private val cameraExecutor = Executors.newSingleThreadExecutor()
@ -99,7 +98,10 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
internal var camera: Camera? = null internal var camera: Camera? = null
internal var imageCapture: ImageCapture? = null internal var imageCapture: ImageCapture? = null
internal var videoCapture: VideoCapture? = null internal var videoCapture: VideoCapture? = null
internal var imageAnalysis: ImageAnalysis? = null private var imageAnalysis: ImageAnalysis? = null
private var lastFrameProcessorCall = System.currentTimeMillis()
private var extensionsManager: ExtensionsManager? = null private var extensionsManager: ExtensionsManager? = null
private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
@ -191,26 +193,6 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
private external fun initHybrid(): HybridData private external fun initHybrid(): HybridData
private external fun frameProcessorCallback(frame: ImageProxy) private external fun frameProcessorCallback(frame: ImageProxy)
@Suppress("unused")
@DoNotStrip
fun setEnableFrameProcessor(enable: Boolean) {
Log.d(TAG, "Set enable frame processor: $enable")
val before = enableFrameProcessor
enableFrameProcessor = enable
if (before != enable) {
// reconfigure session if frame processor was added/removed to adjust use-cases.
GlobalScope.launch(Dispatchers.Main) {
try {
configureSession()
} catch (e: Throwable) {
Log.e(TAG, "Failed to configure session after setting frame processor! ${e.message}")
invokeOnError(e)
}
}
}
}
override fun getLifecycle(): Lifecycle { override fun getLifecycle(): Lifecycle {
return lifecycleRegistry return lifecycleRegistry
} }
@ -383,6 +365,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
// Bind use cases to camera // Bind use cases to camera
val useCases = ArrayList<UseCase>() val useCases = ArrayList<UseCase>()
if (video == true) { if (video == true) {
Log.i(TAG, "Adding VideoCapture use-case...")
videoCapture = videoCaptureBuilder.build() videoCapture = videoCaptureBuilder.build()
useCases.add(videoCapture!!) useCases.add(videoCapture!!)
} }
@ -391,18 +374,19 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
Log.i(TAG, "Tried to add photo use-case (`photo={true}`) but the Camera device only supports " + Log.i(TAG, "Tried to add photo use-case (`photo={true}`) but the Camera device only supports " +
"a single use-case at a time. Falling back to Snapshot capture.") "a single use-case at a time. Falling back to Snapshot capture.")
} else { } else {
Log.i(TAG, "Adding ImageCapture use-case...")
imageCapture = imageCaptureBuilder.build() imageCapture = imageCaptureBuilder.build()
useCases.add(imageCapture!!) useCases.add(imageCapture!!)
} }
} }
if (enableFrameProcessor) { if (enableFrameProcessor) {
var lastCall = System.currentTimeMillis() - 1000 Log.i(TAG, "Adding ImageAnalysis use-case...")
val intervalMs = (1.0 / frameProcessorFps) * 1000.0
imageAnalysis = imageAnalysisBuilder.build().apply { imageAnalysis = imageAnalysisBuilder.build().apply {
setAnalyzer(cameraExecutor, { image -> setAnalyzer(cameraExecutor, { image ->
val now = System.currentTimeMillis() val now = System.currentTimeMillis()
if (now - lastCall > intervalMs) { val intervalMs = (1.0 / frameProcessorFps) * 1000.0
lastCall = now if (now - lastFrameProcessorCall > intervalMs) {
lastFrameProcessorCall = now
frameProcessorCallback(image) frameProcessorCallback(image)
} }
image.close() image.close()
@ -477,7 +461,7 @@ class CameraView(context: Context) : FrameLayout(context), LifecycleOwner {
const val TAG = "CameraView" const val TAG = "CameraView"
const val TAG_PERF = "CameraView.performance" const val TAG_PERF = "CameraView.performance"
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video", "frameProcessorFps") private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "fps", "hdr", "lowLightBoost", "photo", "video", "enableFrameProcessor")
private val arrayListOfZoom = arrayListOf("zoom") private val arrayListOfZoom = arrayListOf("zoom")
} }
} }

View File

@ -43,6 +43,13 @@ class CameraViewManager : SimpleViewManager<CameraView>() {
view.audio = audio view.audio = audio
} }
@ReactProp(name = "enableFrameProcessor")
fun setEnableFrameProcessor(view: CameraView, enableFrameProcessor: Boolean) {
if (view.enableFrameProcessor != enableFrameProcessor)
addChangedPropToTransaction(view, "enableFrameProcessor")
view.enableFrameProcessor = enableFrameProcessor
}
@ReactProp(name = "enableDepthData") @ReactProp(name = "enableDepthData")
fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) { fun setEnableDepthData(view: CameraView, enableDepthData: Boolean) {
if (view.enableDepthData != enableDepthData) if (view.enableDepthData != enableDepthData)

View File

@ -37,6 +37,7 @@ Before opening an issue, make sure you try the following:
5. Press **Create Bridging Header** when promted. 5. Press **Create Bridging Header** when promted.
5. If you're having runtime issues, check the logs in Xcode to find out more. In Xcode, go to **View** > **Debug Area** > **Activate Console** (<kbd>⇧</kbd>+<kbd>⌘</kbd>+<kbd>C</kbd>). 5. If you're having runtime issues, check the logs in Xcode to find out more. In Xcode, go to **View** > **Debug Area** > **Activate Console** (<kbd>⇧</kbd>+<kbd>⌘</kbd>+<kbd>C</kbd>).
* For errors without messages, there's often an error code attached. Look up the error code on [osstatus.com](https://www.osstatus.com) to get more information about a specific error. * For errors without messages, there's often an error code attached. Look up the error code on [osstatus.com](https://www.osstatus.com) to get more information about a specific error.
6. If your Frame Processor is not running, make sure you check the native Xcode logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
## Android ## Android
@ -65,6 +66,7 @@ Before opening an issue, make sure you try the following:
``` ```
5. If you're having runtime issues, check the logs in Android Studio/Logcat to find out more. In Android Studio, go to **View** > **Tool Windows** > **Logcat** (<kbd>⌘</kbd>+<kbd>6</kbd>) or run `adb logcat` in Terminal. 5. If you're having runtime issues, check the logs in Android Studio/Logcat to find out more. In Android Studio, go to **View** > **Tool Windows** > **Logcat** (<kbd>⌘</kbd>+<kbd>6</kbd>) or run `adb logcat` in Terminal.
6. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/camera.camera-1#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information. 6. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/camera.camera-1#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information.
7. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs to find out why. Also make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI.
## Issues ## Issues

View File

@ -20,9 +20,9 @@
#import "JSConsoleHelper.h" #import "JSConsoleHelper.h"
#ifdef VISION_CAMERA_DISABLE_FRAME_PROCESSORS #ifdef VISION_CAMERA_DISABLE_FRAME_PROCESSORS
static bool enableFrameProcessors = false; static bool VISION_CAMERA_ENABLE_FRAME_PROCESSORS = false;
#else #else
static bool enableFrameProcessors = true; static bool VISION_CAMERA_ENABLE_FRAME_PROCESSORS = true;
#endif #endif
@interface CameraBridge: RCTViewManager @interface CameraBridge: RCTViewManager

View File

@ -125,7 +125,7 @@ extension CameraView {
captureSession.removeOutput(videoOutput) captureSession.removeOutput(videoOutput)
self.videoOutput = nil self.videoOutput = nil
} }
if video?.boolValue == true { if video?.boolValue == true || enableFrameProcessor {
ReactLogger.log(level: .info, message: "Adding Video Data output...") ReactLogger.log(level: .info, message: "Adding Video Data output...")
videoOutput = AVCaptureVideoDataOutput() videoOutput = AVCaptureVideoDataOutput()
guard captureSession.canAddOutput(videoOutput!) else { guard captureSession.canAddOutput(videoOutput!) else {

View File

@ -25,7 +25,8 @@ private let propsThatRequireReconfiguration = ["cameraId",
"enablePortraitEffectsMatteDelivery", "enablePortraitEffectsMatteDelivery",
"preset", "preset",
"photo", "photo",
"video"] "video",
"enableFrameProcessor"]
private let propsThatRequireDeviceReconfiguration = ["fps", private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr", "hdr",
"lowLightBoost", "lowLightBoost",
@ -47,6 +48,7 @@ public final class CameraView: UIView {
@objc var photo: NSNumber? // nullable bool @objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool @objc var video: NSNumber? // nullable bool
@objc var audio: NSNumber? // nullable bool @objc var audio: NSNumber? // nullable bool
@objc var enableFrameProcessor = false
// props that require format reconfiguring // props that require format reconfiguring
@objc var format: NSDictionary? @objc var format: NSDictionary?
@objc var fps: NSNumber? @objc var fps: NSNumber?

View File

@ -31,6 +31,7 @@ RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(enableFrameProcessor, BOOL);
// device format // device format
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary); RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber); RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);

View File

@ -23,7 +23,7 @@ final class CameraViewManager: RCTViewManager {
#endif #endif
// Install Frame Processor bindings and setup Runtime // Install Frame Processor bindings and setup Runtime
if enableFrameProcessors { if VISION_CAMERA_ENABLE_FRAME_PROCESSORS {
CameraQueues.frameProcessorQueue.async { CameraQueues.frameProcessorQueue.async {
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge) self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
self.bridge.runOnJS { self.bridge.runOnJS {

View File

@ -1,5 +1,13 @@
import React from 'react'; import React from 'react';
import { requireNativeComponent, NativeModules, NativeSyntheticEvent, findNodeHandle, NativeMethods, Platform } from 'react-native'; import {
requireNativeComponent,
NativeModules,
NativeSyntheticEvent,
findNodeHandle,
NativeMethods,
Platform,
LayoutChangeEvent,
} from 'react-native';
import type { CameraDevice } from './CameraDevice'; import type { CameraDevice } from './CameraDevice';
import type { ErrorWithCause } from './CameraError'; import type { ErrorWithCause } from './CameraError';
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError'; import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
@ -21,6 +29,7 @@ interface OnErrorEvent {
} }
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & { type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & {
cameraId: string; cameraId: string;
enableFrameProcessor: boolean;
onInitialized?: (event: NativeSyntheticEvent<void>) => void; onInitialized?: (event: NativeSyntheticEvent<void>) => void;
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void; onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void;
}; };
@ -63,25 +72,21 @@ if (CameraModule == null) console.error("Camera: Native Module 'CameraView' was
* @component * @component
*/ */
export class Camera extends React.PureComponent<CameraProps> { export class Camera extends React.PureComponent<CameraProps> {
/** /** @internal */
* @internal
*/
static displayName = 'Camera'; static displayName = 'Camera';
/** /** @internal */
* @internal
*/
displayName = Camera.displayName; displayName = Camera.displayName;
private lastFrameProcessor: ((frame: Frame) => void) | undefined; private lastFrameProcessor: ((frame: Frame) => void) | undefined;
private isNativeViewMounted = false;
private readonly ref: React.RefObject<RefType>; private readonly ref: React.RefObject<RefType>;
/** /** @internal */
* @internal
*/
constructor(props: CameraProps) { constructor(props: CameraProps) {
super(props); super(props);
this.onInitialized = this.onInitialized.bind(this); this.onInitialized = this.onInitialized.bind(this);
this.onError = this.onError.bind(this); this.onError = this.onError.bind(this);
this.onLayout = this.onLayout.bind(this);
this.ref = React.createRef<RefType>(); this.ref = React.createRef<RefType>();
this.lastFrameProcessor = undefined; this.lastFrameProcessor = undefined;
} }
@ -331,13 +336,14 @@ export class Camera extends React.PureComponent<CameraProps> {
//#endregion //#endregion
//#region Lifecycle //#region Lifecycle
/** /** @internal */
* @internal
*/
private assertFrameProcessorsEnabled(): void { private assertFrameProcessorsEnabled(): void {
// @ts-expect-error JSI functions aren't typed // @ts-expect-error JSI functions aren't typed
if (global.setFrameProcessor == null || global.unsetFrameProcessor == null) if (global.setFrameProcessor == null || global.unsetFrameProcessor == null) {
throw new Error('Frame Processors are not enabled. Make sure you install react-native-reanimated 2.2.0 or above!'); throw new Error(
'Frame Processors are not enabled. See https://mrousavy.github.io/react-native-vision-camera/docs/guides/troubleshooting',
);
}
} }
private setFrameProcessor(frameProcessor: (frame: Frame) => void): void { private setFrameProcessor(frameProcessor: (frame: Frame) => void): void {
@ -352,52 +358,45 @@ export class Camera extends React.PureComponent<CameraProps> {
global.unsetFrameProcessor(this.handle); global.unsetFrameProcessor(this.handle);
} }
/** private onLayout(event: LayoutChangeEvent): void {
* @internal if (!this.isNativeViewMounted) {
*/ this.isNativeViewMounted = true;
componentWillUnmount(): void {
if (this.lastFrameProcessor != null || this.props.frameProcessor != null) this.unsetFrameProcessor();
}
/**
* @internal
*/
componentDidMount(): void {
if (this.props.frameProcessor != null) { if (this.props.frameProcessor != null) {
if (Platform.OS === 'android') { // user passed a `frameProcessor` but we didn't set it yet because the native view was not mounted yet. set it now.
// on Android the View is not fully mounted yet (`findViewById` returns null), so we wait 300ms.
setTimeout(() => {
if (this.props.frameProcessor != null) this.setFrameProcessor(this.props.frameProcessor);
}, 300);
} else {
// on other platforms (iOS) the View we can assume that the View is immediatelly available.
this.setFrameProcessor(this.props.frameProcessor); this.setFrameProcessor(this.props.frameProcessor);
} this.lastFrameProcessor = this.props.frameProcessor;
} }
} }
/** this.props.onLayout?.(event);
* @internal }
*/
/** @internal */
componentDidUpdate(): void { componentDidUpdate(): void {
if (this.props.frameProcessor !== this.lastFrameProcessor) { if (!this.isNativeViewMounted) return;
const frameProcessor = this.props.frameProcessor;
if (frameProcessor !== this.lastFrameProcessor) {
// frameProcessor argument identity changed. Update native to reflect the change. // frameProcessor argument identity changed. Update native to reflect the change.
if (this.props.frameProcessor != null) this.setFrameProcessor(this.props.frameProcessor); if (frameProcessor != null) this.setFrameProcessor(frameProcessor);
else this.unsetFrameProcessor(); else this.unsetFrameProcessor();
this.lastFrameProcessor = this.props.frameProcessor; this.lastFrameProcessor = frameProcessor;
}
}
/** @internal */
componentWillUnmount(): void {
if (this.lastFrameProcessor != null || this.props.frameProcessor != null) {
this.unsetFrameProcessor();
this.lastFrameProcessor = undefined;
} }
} }
//#endregion //#endregion
/** /** @internal */
* @internal
*/
public render(): React.ReactNode { public render(): React.ReactNode {
// We remove the big `device` object from the props because we only need to pass `cameraId` to native. // We remove the big `device` object from the props because we only need to pass `cameraId` to native.
const { device, video: enableVideo, frameProcessor, ...props } = this.props; const { device, frameProcessor, ...props } = this.props;
// on iOS, enabling a frameProcessor requires `video` to be `true`. On Android, it doesn't.
const video = Platform.OS === 'ios' ? frameProcessor != null || enableVideo : enableVideo;
return ( return (
<NativeCameraView <NativeCameraView
{...props} {...props}
@ -405,7 +404,8 @@ export class Camera extends React.PureComponent<CameraProps> {
ref={this.ref} ref={this.ref}
onInitialized={this.onInitialized} onInitialized={this.onInitialized}
onError={this.onError} onError={this.onError}
video={video} enableFrameProcessor={frameProcessor != null}
onLayout={this.onLayout}
/> />
); );
} }