diff --git a/.github/workflows/build-android.yml b/.github/workflows/build-android.yml index ca207b4..e6d39ea 100644 --- a/.github/workflows/build-android.yml +++ b/.github/workflows/build-android.yml @@ -30,11 +30,11 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Setup JDK 11 + - name: Setup JDK 17 uses: actions/setup-java@v4 with: distribution: 'zulu' - java-version: 11 + java-version: 17 java-package: jdk - name: Get yarn cache directory path @@ -74,11 +74,11 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Setup JDK 11 + - name: Setup JDK 17 uses: actions/setup-java@v4 with: distribution: 'zulu' - java-version: 11 + java-version: 17 java-package: jdk - name: Get yarn cache directory path diff --git a/.github/workflows/build-ios.yml b/.github/workflows/build-ios.yml index 9c4d499..8067991 100644 --- a/.github/workflows/build-ios.yml +++ b/.github/workflows/build-ios.yml @@ -54,7 +54,7 @@ jobs: working-directory: package/example/ios - name: Restore Pods cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: package/example/ios/Pods key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} @@ -113,7 +113,7 @@ jobs: working-directory: package/example/ios - name: Restore Pods cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: package/example/ios/Pods key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} diff --git a/.github/workflows/compress-images.yml b/.github/workflows/compress-images.yml new file mode 100644 index 0000000..c12d854 --- /dev/null +++ b/.github/workflows/compress-images.yml @@ -0,0 +1,33 @@ +name: Compress Images (docs) +on: + pull_request: + # Run Image Actions when JPG, JPEG, PNG or WebP files are added or changed. + # See https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions#onpushpull_requestpaths for reference. + paths: + - ".github/workflows/compress-images.yml" + - "**.jpg" + - "**.jpeg" + - "**.png" + - "**.webp" + +jobs: + compress-images: + # Only run on Pull Requests within the same repository, and not from forks. + if: github.event.pull_request.head.repo.full_name == github.repository + name: πŸ—‚ Compress images + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + + - name: Compress Images + uses: calibreapp/image-actions@main + with: + # The `GITHUB_TOKEN` is automatically generated by GitHub and scoped only to the repository that is currently running the action. By default, the action can’t update Pull Requests initiated from forked repositories. + # See https://docs.github.com/en/actions/reference/authentication-in-a-workflow and https://help.github.com/en/articles/virtual-environments-for-github-actions#token-permissions + githubToken: ${{ secrets.GITHUB_TOKEN }} + ignorePaths: "e2e/**" + jpegQuality: "80" + jpegProgressive: false + pngQuality: "80" + webpQuality: "80" diff --git a/README.md b/README.md index 2ae50a2..39dceed 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,15 @@ cd ios && pod install To see VisionCamera in action, check out [ShadowLens](https://mrousavy.com/projects/shadowlens)! +
+ + + + + + +
+ ### Example ```tsx diff --git a/docs/docs/guides/CODE_SCANNING.mdx b/docs/docs/guides/CODE_SCANNING.mdx index 01f8c96..d59ef8f 100644 --- a/docs/docs/guides/CODE_SCANNING.mdx +++ b/docs/docs/guides/CODE_SCANNING.mdx @@ -130,4 +130,23 @@ The Code Scanner will call your [`onCodeScanned`](/docs/api/interfaces/CodeScann
+## UPC-A vs EAN-13 codes + +UPC-A is a special case to handle if you need to cater for it. Android's SDK officially supports UPC-A but iOS does not, instead they handle the code as EAN-13. Since EAN-13 is a superset of UPC-A, with an extra 0 digit at the front. + +This means, the `upc-a` types are reported under the `ean-13` umbrella type on iOS: + +```jsx +const codeScanner = useCodeScanner({ + codeTypes: ['upc-a'], // <-- βœ… We configure for 'upc-a' types + onCodeScanned: (codes) => { + for (const code of codes) { + console.log(code.type); // <-- ❌ On iOS, we receive 'ean-13' + } + } +}) +``` + +You will need to keep this in mind and do the conversion from EAN-13 to UPC-A yourself. This can be done by removing the front `0` digit from the code to get a UPC-A code. + #### πŸš€ Next section: [Frame Processors](frame-processors) diff --git a/docs/docs/guides/FORMATS.mdx b/docs/docs/guides/FORMATS.mdx index e2efed1..bdfd092 100644 --- a/docs/docs/guides/FORMATS.mdx +++ b/docs/docs/guides/FORMATS.mdx @@ -34,12 +34,13 @@ To understand a bit more about camera formats, you first need to understand a fe * 4k Photos, 1080p Videos, 240 FPS (ultra high FPS/slow motion) * 720p Photos, 720p Videos, 30 FPS (smaller buffers/e.g. faster face detection) * Each app has different requirements, so the format filtering is up to you. +* The `videoResolution` and `videoAspectRatio` options also affect the preview, as preview is also running in the video stream. To get all available formats, simply use the `CameraDevice`'s [`formats` property](/docs/api/interfaces/CameraDevice#formats). These are a [CameraFormat's](/docs/api/interfaces/CameraDeviceFormat) props: - [`photoHeight`](/docs/api/interfaces/CameraDeviceFormat#photoheight)/[`photoWidth`](/docs/api/interfaces/CameraDeviceFormat#photoWidth): The resolution that will be used for taking photos. Choose a format with your desired resolution. -- [`videoHeight`](/docs/api/interfaces/CameraDeviceFormat#videoheight)/[`videoWidth`](/docs/api/interfaces/CameraDeviceFormat#videoWidth): The resolution that will be used for recording videos. Choose a format with your desired resolution. -- [`minFps`](/docs/api/interfaces/CameraDeviceFormat#minfps)/[`maxFps`](/docs/api/interfaces/CameraDeviceFormat#maxfps): A range of possible values for the `fps` property. For example, if your format has `minFps: 1` and `maxFps: 60`, you can either use `fps={30}`, `fps={60}` or any other value in between for recording videos. +- [`videoHeight`](/docs/api/interfaces/CameraDeviceFormat#videoheight)/[`videoWidth`](/docs/api/interfaces/CameraDeviceFormat#videoWidth): The resolution that will be used for recording videos and streaming into frame processors. This also affects the preview's aspect ratio. Choose a format with your desired resolution. +- [`minFps`](/docs/api/interfaces/CameraDeviceFormat#minfps)/[`maxFps`](/docs/api/interfaces/CameraDeviceFormat#maxfps): A range of possible values for the `fps` property. For example, if your format has `minFps: 1` and `maxFps: 60`, you can either use `fps={30}`, `fps={60}` or any other value in between for recording videos and streaming into frame processors. - [`videoStabilizationModes`](/docs/api/interfaces/CameraDeviceFormat#videostabilizationmodes): All supported Video Stabilization Modes, digital and optical. If this specific format contains your desired [`VideoStabilizationMode`](/docs/api/#videostabilizationmode), you can pass it to your `` via the [`videoStabilizationMode` property](/docs/api/interfaces/CameraProps#videoStabilizationMode). - [`pixelFormats`](/docs/api/interfaces/CameraDeviceFormat#pixelformats): All supported Pixel Formats. If this specific format contains your desired [`PixelFormat`](/docs/api/#PixelFormat), you can pass it to your `` via the [`pixelFormat` property](/docs/api/interfaces/CameraProps#pixelFormat). - [`supportsVideoHdr`](/docs/api/interfaces/CameraDeviceFormat#supportsvideohdr): Whether this specific format supports true 10-bit HDR for video capture. If this is `true`, you can enable `videoHdr` on your ``. @@ -61,6 +62,7 @@ You can either find a matching format manually by looping through your `CameraDe ```ts const device = ... const format = useCameraFormat(device, [ + { videoAspectRatio: 16 / 9 }, { videoResolution: { width: 3048, height: 2160 } }, { fps: 60 } ]) @@ -72,6 +74,7 @@ const format = useCameraFormat(device, [ ```ts const device = ... const format = getCameraFormat(device, [ + { videoAspectRatio: 16 / 9 }, { videoResolution: { width: 3048, height: 2160 } }, { fps: 60 } ]) diff --git a/docs/docs/guides/FRAME_PROCESSORS.mdx b/docs/docs/guides/FRAME_PROCESSORS.mdx index 31b36a1..755c510 100644 --- a/docs/docs/guides/FRAME_PROCESSORS.mdx +++ b/docs/docs/guides/FRAME_PROCESSORS.mdx @@ -89,7 +89,8 @@ Additionally, you can also directly access the Frame's pixel data using [`toArra const frameProcessor = useFrameProcessor((frame) => { 'worklet' if (frame.pixelFormat === 'rgb') { - const data = frame.toArrayBuffer() + const buffer = frame.toArrayBuffer() + const data = new Uint8Array(buffer) console.log(`Pixel at 0,0: RGB(${data[0]}, ${data[1]}, ${data[2]})`) } }, []) diff --git a/docs/docs/guides/FRAME_PROCESSOR_PLUGINS.mdx b/docs/docs/guides/FRAME_PROCESSOR_PLUGINS.mdx index 9a777bd..a425609 100644 --- a/docs/docs/guides/FRAME_PROCESSOR_PLUGINS.mdx +++ b/docs/docs/guides/FRAME_PROCESSOR_PLUGINS.mdx @@ -33,6 +33,7 @@ cd ios && pod install * [xulihang/**vision-camera-dynamsoft-barcode-reader**](https://github.com/xulihang/vision-camera-dynamsoft-barcode-reader): A plugin to read barcodes using Dynamsoft Barcode Reader. * [xulihang/**vision-camera-dynamsoft-label-recognizer**](https://github.com/xulihang/vision-camera-dynamsoft-label-recognizer): A plugin to recognize text on labels, MRZ passports, etc. using Dynamsoft Label Recognizer. * [tony-xlh/**vision-camera-dynamsoft-document-normalizer**](https://github.com/tony-xlh/vision-camera-dynamsoft-document-normalizer): A plugin to scan documents using Dynamsoft Document Normalizer with features like document border detection and perspective transformation. +* [tony-xlh/**vision-camera-cropper**](https://github.com/tony-xlh/vision-camera-cropper): A plugin to crop frames and save frames to files or as base64. * [aarongrider/**vision-camera-ocr**](https://github.com/aarongrider/vision-camera-ocr): A plugin to detect text in real time using MLKit Text Detector (OCR). * [yasintorun/**vision-camera-base64**](https://github.com/yasintorun/vision-camera-base64): A plugin to convert a Frame to a base64 string. @@ -40,7 +41,7 @@ cd ios && pod install - +* [nonam4/**react-native-vision-camera-face-detector**](https://github.com/nonam4/react-native-vision-camera-face-detector): A V3 frame processor plugin to detect faces using MLKit Vision Face Detector. @@ -50,7 +51,7 @@ cd ios && pod install

-Click here to add your Frame Processor Plugin to this list! +Click here to add your Frame Processor Plugin to this list!


diff --git a/docs/docs/guides/PERFORMANCE.mdx b/docs/docs/guides/PERFORMANCE.mdx index c943f97..8e2434f 100644 --- a/docs/docs/guides/PERFORMANCE.mdx +++ b/docs/docs/guides/PERFORMANCE.mdx @@ -70,6 +70,13 @@ Enable Buffer Compression ([`enableBufferCompression`](/docs/api/interfaces/Came Note: When not using a `frameProcessor`, buffer compression is automatically enabled. +### GPU buffers + +Enable GPU Buffer flags ([`enableGpuBuffers`](/docs/api/interfaces/CameraProps#enablegpubuffers)) to optimize the Video Pipeline for zero-copy buffer forwarding. +If this is enabled, the Video Pipeline can avoid an additional CPU -> GPU copy, resulting in better performance and more efficiency. + +Note: This only takes effect when using a `frameProcessor`. + ### Video Stabilization Video Stabilization requires additional overhead to start the algorithm, so disabling [`videoStabilizationMode`](/docs/api/interfaces/CameraProps#videostabilizationmode) can significantly speed up the Camera initialization time. diff --git a/docs/docs/guides/SHADOW_LENS.mdx b/docs/docs/guides/SHADOW_LENS.mdx index 47e6a0a..bcc0d1c 100644 --- a/docs/docs/guides/SHADOW_LENS.mdx +++ b/docs/docs/guides/SHADOW_LENS.mdx @@ -21,10 +21,10 @@ import useBaseUrl from '@docusaurus/useBaseUrl' **Download now:**
- + - +
diff --git a/docs/docs/guides/TROUBLESHOOTING.mdx b/docs/docs/guides/TROUBLESHOOTING.mdx index 18398c5..c068c3f 100644 --- a/docs/docs/guides/TROUBLESHOOTING.mdx +++ b/docs/docs/guides/TROUBLESHOOTING.mdx @@ -112,7 +112,7 @@ If you're experiencing build issues or runtime issues in VisionCamera, make sure 2. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information. 3. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs. There is useful information about the Frame Processor Runtime that will tell you if something goes wrong. 4. If your Frame Processor is not running, make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI. -5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr` or `format` on the `` component except for the required ones: +5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr`, `enableGpuBuffers` or `format` on the `` component except for the required ones: ```tsx ``` diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 89d3ef2..66fc859 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -25,7 +25,7 @@ module.exports = { }, announcementBar: { id: 'shadowlens', - content: 'ShadowLens is out!!! πŸ₯³πŸ₯³ Download the iOS app to see VisionCamera in action: apps.apple.com/shadowlens', + content: 'ShadowLens is out!!! πŸ₯³πŸ₯³ See VisionCamera in action: Download ShadowLens for iOS or Download ShadowLens for Android', backgroundColor: '#e39600', textColor: '#ffffff', isCloseable: false, diff --git a/docs/static/img/googleplay.svg b/docs/static/img/googleplay.svg new file mode 100644 index 0000000..68b707a --- /dev/null +++ b/docs/static/img/googleplay.svg @@ -0,0 +1,2 @@ + +image/svg+xml diff --git a/docs/static/img/playstore.png b/docs/static/img/playstore.png deleted file mode 100644 index 131f3ac..0000000 Binary files a/docs/static/img/playstore.png and /dev/null differ diff --git a/package/android/build.gradle b/package/android/build.gradle index 8a79120..88ea6a5 100644 --- a/package/android/build.gradle +++ b/package/android/build.gradle @@ -46,6 +46,10 @@ def safeExtGet(prop, fallback) { rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback } +def safeExtGetBool(prop, fallback) { + Boolean.parseBoolean("${safeExtGet(prop, fallback)}") +} + def reactNativeArchitectures() { def value = project.getProperties().get("reactNativeArchitectures") return value ? value.split(",") : ["armeabi-v7a", "x86", "x86_64", "arm64-v8a"] @@ -68,10 +72,10 @@ static def findNodeModules(baseDir) { def nodeModules = findNodeModules(projectDir) -def hasWorklets = !safeExtGet("VisionCamera_disableFrameProcessors", false) && findProject(":react-native-worklets-core") != null +def hasWorklets = !safeExtGetBool('VisionCamera_disableFrameProcessors', false) && findProject(":react-native-worklets-core") != null logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!") -def enableCodeScanner = safeExtGet("VisionCamera_enableCodeScanner", false) +def enableCodeScanner = safeExtGetBool('VisionCamera_enableCodeScanner', false) repositories { google() diff --git a/package/android/src/main/cpp/MutableJByteBuffer.cpp b/package/android/src/main/cpp/MutableJByteBuffer.cpp index b4d5695..e472da3 100644 --- a/package/android/src/main/cpp/MutableJByteBuffer.cpp +++ b/package/android/src/main/cpp/MutableJByteBuffer.cpp @@ -14,7 +14,7 @@ MutableJByteBuffer::MutableJByteBuffer(jni::alias_ref byteBuff _byteBuffer = jni::make_global(byteBuffer); } -MutableJByteBuffer::~MutableJByteBuffer() noexcept { +MutableJByteBuffer::~MutableJByteBuffer() { // Hermes GC might destroy HostObjects on an arbitrary Thread which might not be // connected to the JNI environment. To make sure fbjni can properly destroy // the Java method, we connect to a JNI environment first. diff --git a/package/android/src/main/cpp/OpenGLRenderer.cpp b/package/android/src/main/cpp/OpenGLRenderer.cpp index 3768399..e9d6bd3 100644 --- a/package/android/src/main/cpp/OpenGLRenderer.cpp +++ b/package/android/src/main/cpp/OpenGLRenderer.cpp @@ -29,10 +29,11 @@ OpenGLRenderer::OpenGLRenderer(std::shared_ptr context, ANativeWi } OpenGLRenderer::~OpenGLRenderer() { + __android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGLRenderer..."); + destroy(); if (_outputSurface != nullptr) { ANativeWindow_release(_outputSurface); } - destroy(); } void OpenGLRenderer::destroy() { diff --git a/package/android/src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp b/package/android/src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp index 474d0dd..46f94f8 100644 --- a/package/android/src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp +++ b/package/android/src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp @@ -34,7 +34,7 @@ JSharedArray::JSharedArray(const jni::alias_ref& javaThis, const #else jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime(); #endif - __android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping Java ByteBuffer with size %i...", byteBuffer->getDirectSize()); + __android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping Java ByteBuffer with size %zu...", byteBuffer->getDirectSize()); _byteBuffer = jni::make_global(byteBuffer); _size = _byteBuffer->getDirectSize(); diff --git a/package/android/src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp b/package/android/src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp index 77a28a6..816b0af 100644 --- a/package/android/src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp +++ b/package/android/src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp @@ -15,6 +15,7 @@ TSelf JVisionCameraScheduler::initHybrid(jni::alias_ref jThis) { } void JVisionCameraScheduler::dispatchAsync(const std::function& job) { + std::unique_lock lock(_mutex); // 1. add job to queue _jobs.push(job); scheduleTrigger(); diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt b/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt index 7061f0b..3384ef2 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt @@ -5,6 +5,8 @@ import com.facebook.react.bridge.Arguments import com.facebook.react.bridge.ReactContext import com.facebook.react.bridge.WritableMap import com.facebook.react.uimanager.events.RCTEventEmitter +import com.facebook.react.uimanager.UIManagerHelper +import com.facebook.react.uimanager.events.Event import com.google.mlkit.vision.barcode.common.Barcode import com.mrousavy.camera.core.CameraError import com.mrousavy.camera.core.CodeScannerFrame @@ -12,26 +14,30 @@ import com.mrousavy.camera.core.UnknownCameraError import com.mrousavy.camera.core.code import com.mrousavy.camera.types.CodeType import java.io.File +import com.mrousavy.camera.types.* fun CameraView.invokeOnInitialized() { Log.i(CameraView.TAG, "invokeOnInitialized()") - val reactContext = context as ReactContext - reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraInitialized", null) + val surfaceId = UIManagerHelper.getSurfaceId(this) + val event = CameraInitializedEvent(surfaceId, id) + this.sendEvent(event) } fun CameraView.invokeOnStarted() { Log.i(CameraView.TAG, "invokeOnStarted()") - val reactContext = context as ReactContext - reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStarted", null) + val surfaceId = UIManagerHelper.getSurfaceId(this) + val event = CameraStartedEvent(surfaceId, id) + this.sendEvent(event) } fun CameraView.invokeOnStopped() { Log.i(CameraView.TAG, "invokeOnStopped()") - val reactContext = context as ReactContext - reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null) + val surfaceId = UIManagerHelper.getSurfaceId(this) + val event = CameraStoppedEvent(surfaceId, id) + this.sendEvent(event) } fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { @@ -47,24 +53,27 @@ fun CameraView.invokeOnError(error: Throwable) { Log.e(CameraView.TAG, "invokeOnError(...):") error.printStackTrace() - val cameraError = when (error) { - is CameraError -> error - else -> UnknownCameraError(error) - } - val event = Arguments.createMap() - event.putString("code", cameraError.code) - event.putString("message", cameraError.message) + val cameraError = + when (error) { + is CameraError -> error + else -> UnknownCameraError(error) + } + val data = Arguments.createMap() + data.putString("code", cameraError.code) + data.putString("message", cameraError.message) cameraError.cause?.let { cause -> - event.putMap("cause", errorToMap(cause)) + data.putMap("cause", errorToMap(cause)) } - val reactContext = context as ReactContext - reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event) + + val surfaceId = UIManagerHelper.getSurfaceId(this) + val event = CameraErrorEvent(surfaceId, id, data) + this.sendEvent(event) } fun CameraView.invokeOnViewReady() { - val event = Arguments.createMap() - val reactContext = context as ReactContext - reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraViewReady", event) + val surfaceId = UIManagerHelper.getSurfaceId(this) + val event = CameraViewReadyEvent(surfaceId, id) + this.sendEvent(event) } fun CameraView.invokeOnCodeScanned(barcodes: List, scannerFrame: CodeScannerFrame) { @@ -97,14 +106,23 @@ fun CameraView.invokeOnCodeScanned(barcodes: List, scannerFrame: CodeSc codes.pushMap(code) } - val event = Arguments.createMap() - event.putArray("codes", codes) + val data = Arguments.createMap() + data.putArray("codes", codes) val codeScannerFrame = Arguments.createMap() codeScannerFrame.putInt("width", scannerFrame.width) codeScannerFrame.putInt("height", scannerFrame.height) - event.putMap("frame", codeScannerFrame) + data.putMap("frame", codeScannerFrame) + + val surfaceId = UIManagerHelper.getSurfaceId(this) + val event = CameraCodeScannedEvent(surfaceId, id, data) + this.sendEvent(event) +} + +private fun CameraView.sendEvent(event: Event<*>) { val reactContext = context as ReactContext - reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraCodeScanned", event) + val dispatcher = + UIManagerHelper.getEventDispatcherForReactTag(reactContext, id) + dispatcher?.dispatchEvent(event) } private fun errorToMap(error: Throwable): WritableMap { diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraView+TakePhoto.kt b/package/android/src/main/java/com/mrousavy/camera/CameraView+TakePhoto.kt index 7ba6775..c09ae3f 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraView+TakePhoto.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraView+TakePhoto.kt @@ -30,9 +30,12 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced" val flash = options["flash"] as? String ?: "off" - val enableAutoRedEyeReduction = options["enableAutoRedEyeReduction"] == true val enableAutoStabilization = options["enableAutoStabilization"] == true val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true + val enablePrecapture = options["enablePrecapture"] as? Boolean ?: false + + // TODO: Implement Red Eye Reduction + options["enableAutoRedEyeReduction"] val flashMode = Flash.fromUnionValue(flash) val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization) @@ -41,8 +44,8 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { qualityPrioritizationMode, flashMode, enableShutterSound, - enableAutoRedEyeReduction, enableAutoStabilization, + enablePrecapture, orientation ) diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraView.kt b/package/android/src/main/java/com/mrousavy/camera/CameraView.kt index 204258c..5849573 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraView.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraView.kt @@ -4,9 +4,9 @@ import android.annotation.SuppressLint import android.content.Context import android.hardware.camera2.CameraManager import android.util.Log +import android.view.Gravity import android.view.ScaleGestureDetector import android.widget.FrameLayout -import com.facebook.react.bridge.ReadableMap import com.google.mlkit.vision.barcode.common.Barcode import com.mrousavy.camera.core.CameraConfiguration import com.mrousavy.camera.core.CameraQueues @@ -48,23 +48,23 @@ class CameraView(context: Context) : // props that require reconfiguring var cameraId: String? = null var enableDepthData = false - var enableHighQualityPhotos: Boolean? = null var enablePortraitEffectsMatteDelivery = false // use-cases - var photo: Boolean? = null - var video: Boolean? = null - var audio: Boolean? = null + var photo = false + var video = false + var audio = false var enableFrameProcessor = false var pixelFormat: PixelFormat = PixelFormat.NATIVE // props that require format reconfiguring - var format: ReadableMap? = null + var format: CameraDeviceFormat? = null var fps: Int? = null var videoStabilizationMode: VideoStabilizationMode? = null var videoHdr = false var photoHdr = false - var lowLightBoost: Boolean? = null // nullable bool + var lowLightBoost = false + var enableGpuBuffers = false // other props var isActive = false @@ -72,7 +72,7 @@ class CameraView(context: Context) : var zoom: Float = 1f // in "factor" var exposure: Double = 1.0 var orientation: Orientation = Orientation.PORTRAIT - var enableZoomGesture: Boolean = false + var enableZoomGesture = false set(value) { field = value updateZoomGesture() @@ -82,7 +82,7 @@ class CameraView(context: Context) : previewView.resizeMode = value field = value } - var enableFpsGraph: Boolean = false + var enableFpsGraph = false set(value) { field = value updateFpsGraph() @@ -110,21 +110,26 @@ class CameraView(context: Context) : clipToOutline = true cameraSession = CameraSession(context, cameraManager, this) previewView = cameraSession.createPreviewView(context) + previewView.layoutParams = LayoutParams( + LayoutParams.MATCH_PARENT, + LayoutParams.MATCH_PARENT, + Gravity.CENTER + ) addView(previewView) } override fun onAttachedToWindow() { + super.onAttachedToWindow() if (!isMounted) { isMounted = true invokeOnViewReady() } update() - super.onAttachedToWindow() } override fun onDetachedFromWindow() { - update() super.onDetachedFromWindow() + update() } fun destroy() { @@ -149,19 +154,20 @@ class CameraView(context: Context) : config.cameraId = cameraId // Photo - if (photo == true) { + if (photo) { config.photo = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Photo(photoHdr)) } else { config.photo = CameraConfiguration.Output.Disabled.create() } // Video/Frame Processor - if (video == true || enableFrameProcessor) { + if (video || enableFrameProcessor) { config.video = CameraConfiguration.Output.Enabled.create( CameraConfiguration.Video( videoHdr, pixelFormat, - enableFrameProcessor + enableFrameProcessor, + enableGpuBuffers ) ) } else { @@ -169,7 +175,7 @@ class CameraView(context: Context) : } // Audio - if (audio == true) { + if (audio) { config.audio = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Audio(Unit)) } else { config.audio = CameraConfiguration.Output.Disabled.create() @@ -189,12 +195,7 @@ class CameraView(context: Context) : config.orientation = orientation // Format - val format = format - if (format != null) { - config.format = CameraDeviceFormat.fromJSValue(format) - } else { - config.format = null - } + config.format = format // Side-Props config.fps = fps diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraViewManager.kt b/package/android/src/main/java/com/mrousavy/camera/CameraViewManager.kt index 13bf33a..2c9f579 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraViewManager.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraViewManager.kt @@ -5,6 +5,7 @@ import com.facebook.react.common.MapBuilder import com.facebook.react.uimanager.ThemedReactContext import com.facebook.react.uimanager.ViewGroupManager import com.facebook.react.uimanager.annotations.ReactProp +import com.mrousavy.camera.types.CameraDeviceFormat import com.mrousavy.camera.types.CodeScannerOptions import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.PixelFormat @@ -45,17 +46,17 @@ class CameraViewManager : ViewGroupManager() { } @ReactProp(name = "photo") - fun setPhoto(view: CameraView, photo: Boolean?) { + fun setPhoto(view: CameraView, photo: Boolean) { view.photo = photo } @ReactProp(name = "video") - fun setVideo(view: CameraView, video: Boolean?) { + fun setVideo(view: CameraView, video: Boolean) { view.video = video } @ReactProp(name = "audio") - fun setAudio(view: CameraView, audio: Boolean?) { + fun setAudio(view: CameraView, audio: Boolean) { view.audio = audio } @@ -66,8 +67,12 @@ class CameraViewManager : ViewGroupManager() { @ReactProp(name = "pixelFormat") fun setPixelFormat(view: CameraView, pixelFormat: String?) { - val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) - view.pixelFormat = newPixelFormat + if (pixelFormat != null) { + val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) + view.pixelFormat = newPixelFormat + } else { + view.pixelFormat = PixelFormat.NATIVE + } } @ReactProp(name = "enableDepthData") @@ -85,15 +90,19 @@ class CameraViewManager : ViewGroupManager() { view.enableFpsGraph = enableFpsGraph } - @ReactProp(name = "videoStabilizationMode") - fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { - val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) - view.videoStabilizationMode = newMode + @ReactProp(name = "enableGpuBuffers") + fun setEnableGpuBuffers(view: CameraView, enableGpuBuffers: Boolean) { + view.enableGpuBuffers = enableGpuBuffers } - @ReactProp(name = "enableHighQualityPhotos") - fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) { - view.enableHighQualityPhotos = enableHighQualityPhotos + @ReactProp(name = "videoStabilizationMode") + fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { + if (videoStabilizationMode != null) { + val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) + view.videoStabilizationMode = newMode + } else { + view.videoStabilizationMode = null + } } @ReactProp(name = "enablePortraitEffectsMatteDelivery") @@ -103,13 +112,22 @@ class CameraViewManager : ViewGroupManager() { @ReactProp(name = "format") fun setFormat(view: CameraView, format: ReadableMap?) { - view.format = format + if (format != null) { + val newFormat = CameraDeviceFormat.fromJSValue(format) + view.format = newFormat + } else { + view.format = null + } } @ReactProp(name = "resizeMode") - fun setResizeMode(view: CameraView, resizeMode: String) { - val newMode = ResizeMode.fromUnionValue(resizeMode) - view.resizeMode = newMode + fun setResizeMode(view: CameraView, resizeMode: String?) { + if (resizeMode != null) { + val newMode = ResizeMode.fromUnionValue(resizeMode) + view.resizeMode = newMode + } else { + view.resizeMode = ResizeMode.COVER + } } // TODO: Change when TurboModules release. @@ -120,30 +138,34 @@ class CameraViewManager : ViewGroupManager() { view.fps = if (fps > 0) fps else null } - @ReactProp(name = "photoHdr", defaultBoolean = false) + @ReactProp(name = "photoHdr") fun setPhotoHdr(view: CameraView, photoHdr: Boolean) { view.photoHdr = photoHdr } - @ReactProp(name = "videoHdr", defaultBoolean = false) + @ReactProp(name = "videoHdr") fun setVideoHdr(view: CameraView, videoHdr: Boolean) { view.videoHdr = videoHdr } @ReactProp(name = "lowLightBoost") - fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) { + fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean) { view.lowLightBoost = lowLightBoost } - @ReactProp(name = "isActive", defaultBoolean = false) + @ReactProp(name = "isActive") fun setIsActive(view: CameraView, isActive: Boolean) { view.isActive = isActive } @ReactProp(name = "torch") - fun setTorch(view: CameraView, torch: String) { - val newMode = Torch.fromUnionValue(torch) - view.torch = newMode + fun setTorch(view: CameraView, torch: String?) { + if (torch != null) { + val newMode = Torch.fromUnionValue(torch) + view.torch = newMode + } else { + view.torch = Torch.OFF + } } @ReactProp(name = "zoom") @@ -158,14 +180,22 @@ class CameraViewManager : ViewGroupManager() { @ReactProp(name = "orientation") fun setOrientation(view: CameraView, orientation: String?) { - val newMode = Orientation.fromUnionValue(orientation) - view.orientation = newMode + if (orientation != null) { + val newMode = Orientation.fromUnionValue(orientation) + view.orientation = newMode + } else { + view.orientation = Orientation.PORTRAIT + } } @ReactProp(name = "codeScannerOptions") - fun setCodeScanner(view: CameraView, codeScannerOptions: ReadableMap) { - val newCodeScannerOptions = CodeScannerOptions(codeScannerOptions) - view.codeScannerOptions = newCodeScannerOptions + fun setCodeScanner(view: CameraView, codeScannerOptions: ReadableMap?) { + if (codeScannerOptions != null) { + val newCodeScannerOptions = CodeScannerOptions.fromJSValue(codeScannerOptions) + view.codeScannerOptions = newCodeScannerOptions + } else { + view.codeScannerOptions = null + } } companion object { diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt index a948b41..593d40a 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt @@ -44,7 +44,7 @@ data class CameraConfiguration( // Output types, those need to be comparable data class CodeScanner(val codeTypes: List) data class Photo(val enableHdr: Boolean) - data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean) + data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean, val enableGpuBuffers: Boolean) data class Audio(val nothing: Unit) data class Preview(val surface: Surface) @@ -67,7 +67,7 @@ data class CameraConfiguration( } data class Difference( - // Input Camera (cameraId, isActive) + // Input Camera (cameraId) val deviceChanged: Boolean, // Outputs & Session (Photo, Video, CodeScanner, HDR, Format) val outputsChanged: Boolean, @@ -75,14 +75,17 @@ data class CameraConfiguration( val sidePropsChanged: Boolean, // (isActive) changed val isActiveChanged: Boolean - ) + ) { + val hasChanges: Boolean + get() = deviceChanged || outputsChanged || sidePropsChanged || isActiveChanged + } companion object { fun copyOf(other: CameraConfiguration?): CameraConfiguration = other?.copy() ?: CameraConfiguration() fun difference(left: CameraConfiguration?, right: CameraConfiguration): Difference { // input device - val deviceChanged = left?.cameraId != right.cameraId || left?.isActive != right.isActive + val deviceChanged = left?.cameraId != right.cameraId // outputs val outputsChanged = deviceChanged || @@ -101,7 +104,7 @@ data class CameraConfiguration( left.videoStabilizationMode != right.videoStabilizationMode || left.exposure != right.exposure - val isActiveChanged = left?.isActive != right.isActive + val isActiveChanged = sidePropsChanged || left?.isActive != right.isActive return Difference( deviceChanged, diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt index 2f86c72..028cbf6 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt @@ -1,17 +1,24 @@ package com.mrousavy.camera.core +import android.content.res.Resources import android.graphics.ImageFormat import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraExtensionCharacteristics import android.hardware.camera2.CameraManager import android.hardware.camera2.CameraMetadata import android.os.Build +import android.util.Log import android.util.Range import android.util.Size +import android.util.SizeF +import android.view.SurfaceHolder import com.facebook.react.bridge.Arguments import com.facebook.react.bridge.ReadableArray import com.facebook.react.bridge.ReadableMap +import com.mrousavy.camera.extensions.bigger import com.mrousavy.camera.extensions.getPhotoSizes import com.mrousavy.camera.extensions.getVideoSizes +import com.mrousavy.camera.extensions.smaller import com.mrousavy.camera.extensions.toJSValue import com.mrousavy.camera.types.AutoFocusSystem import com.mrousavy.camera.types.DeviceType @@ -20,65 +27,127 @@ import com.mrousavy.camera.types.LensFacing import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.PixelFormat import com.mrousavy.camera.types.VideoStabilizationMode +import com.mrousavy.camera.utils.CamcorderProfileUtils import kotlin.math.atan2 import kotlin.math.sqrt -class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String) { - val characteristics = cameraManager.getCameraCharacteristics(cameraId) - val hardwareLevel = HardwareLevel.fromCameraCharacteristics(characteristics) - val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0) - val extensions = getSupportedExtensions() +class CameraDeviceDetails(private val cameraManager: CameraManager, val cameraId: String) { + companion object { + private const val TAG = "CameraDeviceDetails" + + fun getMaximumPreviewSize(): Size { + // See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap + // According to the Android Developer documentation, PREVIEW streams can have a resolution + // of up to the phone's display's resolution, with a maximum of 1920x1080. + val display1080p = Size(1920, 1080) + val displaySize = Size( + Resources.getSystem().displayMetrics.widthPixels, + Resources.getSystem().displayMetrics.heightPixels + ) + val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller + + return if (isHighResScreen) display1080p else displaySize + } + } + + val characteristics by lazy { cameraManager.getCameraCharacteristics(cameraId) } + val hardwareLevel by lazy { HardwareLevel.fromCameraCharacteristics(characteristics) } + val capabilities by lazy { characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0) } + val extensions by lazy { getSupportedExtensions() } // device characteristics - val isMultiCam = capabilities.contains(11) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA - val supportsDepthCapture = capabilities.contains(8) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT - val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) - val supportsLowLightBoost = extensions.contains(4) // TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT - val lensFacing = LensFacing.fromCameraCharacteristics(characteristics) - val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false - val focalLengths = - characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) - // 35mm is the film standard sensor size - ?: floatArrayOf(35f) - val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!! - val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! - val minFocusDistance = getMinFocusDistanceCm() - val name = ( - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { - characteristics.get(CameraCharacteristics.INFO_VERSION) - } else { - null - } - ) ?: "$lensFacing ($cameraId)" + val isMultiCam by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) } + val supportsDepthCapture by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) } + val supportsRawCapture by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) } + val supportsLowLightBoost by lazy { + extensions.contains(CameraExtensionCharacteristics.EXTENSION_NIGHT) && + modes.contains(CameraCharacteristics.CONTROL_MODE_USE_SCENE_MODE) + } + val lensFacing by lazy { LensFacing.fromCameraCharacteristics(characteristics) } + val hasFlash by lazy { characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false } + val focalLengths by lazy { + // 35mm is the film standard sensor size + characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f) + } + val sensorSize by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE) ?: SizeF(0f, 0f) } + val activeSize + get() = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! + val sensorOrientation by lazy { + val degrees = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0 + return@lazy Orientation.fromRotationDegrees(degrees) + } + val minFocusDistance by lazy { getMinFocusDistanceCm() } + val name by lazy { + val info = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION) else null + return@lazy info ?: "$lensFacing ($cameraId)" + } // "formats" (all possible configurations for this device) - val zoomRange = ( - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { + val maxDigitalZoom by lazy { characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f } + val zoomRange by lazy { + val range = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) } else { null } - ) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f) - val physicalDevices = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && characteristics.physicalCameraIds.isNotEmpty()) { - characteristics.physicalCameraIds - } else { - setOf(cameraId) + return@lazy range ?: Range(1f, maxDigitalZoom) } - val minZoom = zoomRange.lower.toDouble() - val maxZoom = zoomRange.upper.toDouble() + val physicalDevices by lazy { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && characteristics.physicalCameraIds.isNotEmpty()) { + characteristics.physicalCameraIds + } else { + setOf(cameraId) + } + } + val minZoom by lazy { zoomRange.lower.toDouble() } + val maxZoom by lazy { zoomRange.upper.toDouble() } - val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! - val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) - val exposureRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE) ?: Range(0, 0) - val digitalStabilizationModes = + val cameraConfig by lazy { characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! } + val isoRange by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) } + val exposureRange by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE) ?: Range(0, 0) } + val digitalStabilizationModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0) - val opticalStabilizationModes = + } + val opticalStabilizationModes by lazy { characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0) - val supportsPhotoHdr = extensions.contains(3) // TODO: CameraExtensionCharacteristics.EXTENSION_HDR - val supportsVideoHdr = getHasVideoHdr() - val autoFocusSystem = getAutoFocusSystemMode() + } + val supportsPhotoHdr by lazy { extensions.contains(CameraExtensionCharacteristics.EXTENSION_HDR) } + val supportsVideoHdr by lazy { getHasVideoHdr() } + val autoFocusSystem by lazy { getAutoFocusSystemMode() } + val supportsYuvProcessing by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING) } + val supportsPrivateProcessing by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING) } + val supportsZsl by lazy { supportsYuvProcessing || supportsPrivateProcessing } + + val isBackwardsCompatible by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) } + val supportsSnapshotCapture by lazy { supportsSnapshotCapture() } + + val supportsFocusRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) ?: 0) > 0 } + val supportsExposureRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0) > 0 } + val supportsWhiteBalanceRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB) ?: 0) > 0 } + + val modes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_MODES)?.toList() ?: emptyList() } + val afModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES)?.toList() ?: emptyList() } + val aeModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES)?.toList() ?: emptyList() } + val awbModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES)?.toList() ?: emptyList() } + + val availableAberrationModes by lazy { + characteristics.get(CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES) + ?: intArrayOf() + } + val availableHotPixelModes by lazy { characteristics.get(CameraCharacteristics.HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES) ?: intArrayOf() } + val availableEdgeModes by lazy { characteristics.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES) ?: intArrayOf() } + val availableDistortionCorrectionModes by lazy { getAvailableDistortionCorrectionModesOrEmptyArray() } + val availableShadingModes by lazy { characteristics.get(CameraCharacteristics.SHADING_AVAILABLE_MODES) ?: intArrayOf() } + val availableToneMapModes by lazy { characteristics.get(CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES) ?: intArrayOf() } + val availableNoiseReductionModes by lazy { + characteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES) + ?: intArrayOf() + } + + // TODO: Also add 10-bit YUV here? val videoFormat = ImageFormat.YUV_420_888 + val photoFormat = ImageFormat.JPEG // get extensions (HDR, Night Mode, ..) private fun getSupportedExtensions(): List = @@ -89,6 +158,13 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String emptyList() } + private fun getAvailableDistortionCorrectionModesOrEmptyArray(): IntArray = + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + characteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES) ?: intArrayOf() + } else { + intArrayOf() + } + private fun getHasVideoHdr(): Boolean { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) { @@ -102,10 +178,19 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String private fun getMinFocusDistanceCm(): Double { val distance = characteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) if (distance == null || distance == 0f) return 0.0 + if (distance.isNaN() || distance.isInfinite()) return 0.0 // distance is in "diopters", meaning 1/meter. Convert to meters, then centi-meters return 1.0 / distance * 100.0 } + @Suppress("RedundantIf") + private fun supportsSnapshotCapture(): Boolean { + // As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation: + if (hardwareLevel == HardwareLevel.LEGACY) return false + if (supportsDepthCapture && !isBackwardsCompatible) return false + return true + } + private fun createStabilizationModes(): ReadableArray { val array = Arguments.createArray() digitalStabilizationModes.forEach { videoStabilizationMode -> @@ -146,6 +231,9 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String } private fun getFieldOfView(focalLength: Float): Double { + if ((sensorSize.width == 0f) || (sensorSize.height == 0f)) { + return 0.0 + } val sensorDiagonal = sqrt((sensorSize.width * sensorSize.width + sensorSize.height * sensorSize.height).toDouble()) val fovRadians = 2.0 * atan2(sensorDiagonal, (2.0 * focalLength)) return Math.toDegrees(fovRadians) @@ -156,18 +244,31 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String return getFieldOfView(smallestFocalLength) } - private fun getVideoSizes(): List = characteristics.getVideoSizes(cameraId, videoFormat) - private fun getPhotoSizes(): List = characteristics.getPhotoSizes(ImageFormat.JPEG) + fun getVideoSizes(format: Int): List = characteristics.getVideoSizes(cameraId, format) + fun getPhotoSizes(): List = characteristics.getPhotoSizes(photoFormat) + fun getPreviewSizes(): List { + val maximumPreviewSize = getMaximumPreviewSize() + return cameraConfig.getOutputSizes(SurfaceHolder::class.java) + .filter { it.bigger <= maximumPreviewSize.bigger && it.smaller <= maximumPreviewSize.smaller } + } private fun getFormats(): ReadableArray { val array = Arguments.createArray() - val videoSizes = getVideoSizes() + val videoSizes = getVideoSizes(videoFormat) val photoSizes = getPhotoSizes() videoSizes.forEach { videoSize -> val frameDuration = cameraConfig.getOutputMinFrameDuration(videoFormat, videoSize) - val maxFps = (1.0 / (frameDuration.toDouble() / 1_000_000_000)).toInt() + var maxFps = (1.0 / (frameDuration.toDouble() / 1_000_000_000)).toInt() + val maxEncoderFps = CamcorderProfileUtils.getMaximumFps(cameraId, videoSize) + if (maxEncoderFps != null && maxEncoderFps < maxFps) { + Log.i( + TAG, + "Camera could do $maxFps FPS at $videoSize, but Media Encoder can only do $maxEncoderFps FPS. Clamping to $maxEncoderFps FPS..." + ) + maxFps = maxEncoderFps + } photoSizes.forEach { photoSize -> val map = buildFormatMap(photoSize, videoSize, Range(1, maxFps)) @@ -175,8 +276,6 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String } } - // TODO: Add high-speed video ranges (high-fps / slow-motion) - return array } @@ -223,14 +322,14 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String map.putBoolean("isMultiCam", isMultiCam) map.putBoolean("supportsRawCapture", supportsRawCapture) map.putBoolean("supportsLowLightBoost", supportsLowLightBoost) - map.putBoolean("supportsFocus", true) // I believe every device here supports focussing + map.putBoolean("supportsFocus", supportsFocusRegions) map.putDouble("minZoom", minZoom) map.putDouble("maxZoom", maxZoom) map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android map.putDouble("minExposure", exposureRange.lower.toDouble()) map.putDouble("maxExposure", exposureRange.upper.toDouble()) map.putString("hardwareLevel", hardwareLevel.unionValue) - map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue) + map.putString("sensorOrientation", sensorOrientation.unionValue) map.putArray("formats", getFormats()) return map } diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt index 70dc1ed..834c190 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt @@ -62,6 +62,8 @@ class FlashUnavailableError : "flash-unavailable", "The Camera Device does not have a flash unit! Make sure you select a device where `device.hasFlash`/`device.hasTorch` is true." ) +class FocusNotSupportedError : + CameraError("device", "focus-not-supported", "The currently selected camera device does not support focusing!") class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") @@ -71,6 +73,8 @@ class CameraSessionCannotBeConfiguredError(cameraId: String) : CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera #$cameraId!") class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error") +class NoOutputsError : + CameraError("session", "no-outputs", "Cannot create a CameraCaptureSession without any outputs! (PREVIEW, PHOTO, VIDEO, ...)") class PropRequiresFormatToBeNonNullError(propName: String) : CameraError("format", "format-required", "The prop \"$propName\" requires a format to be set, but format was null!") @@ -100,6 +104,8 @@ class PhotoNotEnabledError : CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.") class CaptureAbortedError(wasImageCaptured: Boolean) : CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") +class FocusCanceledError : CameraError("capture", "focus-canceled", "The focus operation was canceled.") +class CaptureTimedOutError : CameraError("capture", "timed-out", "The image capture was aborted because it timed out.") class UnknownCaptureError(wasImageCaptured: Boolean) : CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") class RecorderError(name: String, extra: Int) : @@ -113,6 +119,16 @@ class RecordingInProgressError : "recording-in-progress", "There is already an active video recording in progress! Did you call startRecording() twice?" ) +class FrameInvalidError : + CameraError( + "capture", + "frame-invalid", + "Trying to access an already closed Frame! " + + "Are you trying to access the Image data outside of a Frame Processor's lifetime?\n" + + "- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n" + + "- If you want to do async processing, use `runAsync(...)` instead.\n" + + "- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`" + ) class CodeTypeNotSupportedError(codeType: String) : CameraError( diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt index 3476249..501cab3 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt @@ -5,50 +5,33 @@ import android.content.Context import android.content.pm.PackageManager import android.graphics.ImageFormat import android.graphics.Point -import android.hardware.camera2.CameraCaptureSession import android.hardware.camera2.CameraCharacteristics -import android.hardware.camera2.CameraDevice import android.hardware.camera2.CameraManager -import android.hardware.camera2.CameraMetadata -import android.hardware.camera2.CaptureRequest import android.hardware.camera2.CaptureResult import android.hardware.camera2.TotalCaptureResult -import android.hardware.camera2.params.MeteringRectangle import android.media.Image import android.media.ImageReader -import android.os.Build import android.util.Log -import android.util.Range import android.util.Size import android.view.Surface import android.view.SurfaceHolder import androidx.core.content.ContextCompat import com.google.mlkit.vision.barcode.common.Barcode +import com.mrousavy.camera.core.capture.RepeatingCaptureRequest import com.mrousavy.camera.core.outputs.BarcodeScannerOutput import com.mrousavy.camera.core.outputs.PhotoOutput import com.mrousavy.camera.core.outputs.SurfaceOutput import com.mrousavy.camera.core.outputs.VideoPipelineOutput -import com.mrousavy.camera.extensions.capture import com.mrousavy.camera.extensions.closestToOrMax -import com.mrousavy.camera.extensions.createCaptureSession -import com.mrousavy.camera.extensions.createPhotoCaptureRequest -import com.mrousavy.camera.extensions.getPhotoSizes -import com.mrousavy.camera.extensions.getPreviewTargetSize -import com.mrousavy.camera.extensions.getVideoSizes -import com.mrousavy.camera.extensions.openCamera -import com.mrousavy.camera.extensions.setZoom import com.mrousavy.camera.frameprocessor.Frame -import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.types.Flash +import com.mrousavy.camera.types.LensFacing import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.QualityPrioritization import com.mrousavy.camera.types.RecordVideoOptions -import com.mrousavy.camera.types.Torch -import com.mrousavy.camera.types.VideoStabilizationMode import com.mrousavy.camera.utils.ImageFormatUtils import java.io.Closeable -import java.lang.IllegalStateException -import java.util.concurrent.CancellationException +import kotlin.coroutines.cancellation.CancellationException import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking @@ -57,8 +40,8 @@ import kotlinx.coroutines.sync.withLock import java.io.File class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) : - CameraManager.AvailabilityCallback(), - Closeable { + Closeable, + PersistentCameraCaptureSession.Callback { companion object { private const val TAG = "CameraSession" } @@ -67,14 +50,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam private var configuration: CameraConfiguration? = null // Camera State - private var cameraDevice: CameraDevice? = null - set(value) { - field = value - cameraDeviceDetails = if (value != null) CameraDeviceDetails(cameraManager, value.id) else null - } - private var cameraDeviceDetails: CameraDeviceDetails? = null - private var captureSession: CameraCaptureSession? = null - private var previewRequest: CaptureRequest.Builder? = null + private val captureSession = PersistentCameraCaptureSession(cameraManager, this) private var photoOutput: PhotoOutput? = null private var videoOutput: VideoPipelineOutput? = null private var codeScannerOutput: BarcodeScannerOutput? = null @@ -102,11 +78,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam field = value updateVideoOutputs() } - var frameProcessor: FrameProcessor? = null - set(value) { - field = value - updateVideoOutputs() - } val orientation: Orientation get() { @@ -116,14 +87,9 @@ class CameraSession(private val context: Context, private val cameraManager: Cam return Orientation.fromRotationDegrees(sensorRotation) } - init { - cameraManager.registerAvailabilityCallback(this, CameraQueues.cameraQueue.handler) - } - override fun close() { Log.i(TAG, "Closing CameraSession...") isDestroyed = true - cameraManager.unregisterAvailabilityCallback(this) runBlocking { mutex.withLock { destroy() @@ -133,18 +99,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam Log.i(TAG, "CameraSession closed!") } - override fun onCameraAvailable(cameraId: String) { - super.onCameraAvailable(cameraId) - if (this.configuration?.cameraId == cameraId && cameraDevice == null && configuration?.isActive == true) { - Log.i(TAG, "Camera #$cameraId is now available again, trying to re-open it now...") - coroutineScope.launch { - configure { - // re-open CameraDevice if needed - } - } - } - } - suspend fun configure(lambda: (configuration: CameraConfiguration) -> Unit) { Log.i(TAG, "configure { ... }: Waiting for lock...") @@ -153,6 +107,12 @@ class CameraSession(private val context: Context, private val cameraManager: Cam val config = CameraConfiguration.copyOf(this.configuration) lambda(config) val diff = CameraConfiguration.difference(this.configuration, config) + this.configuration = config + + if (!diff.hasChanges) { + Log.i(TAG, "Nothing changed, aborting configure { ... }") + return@withLock + } if (isDestroyed) { Log.i(TAG, "CameraSession is already destroyed. Skipping configure { ... }") @@ -162,29 +122,11 @@ class CameraSession(private val context: Context, private val cameraManager: Cam Log.i(TAG, "configure { ... }: Updating CameraSession Configuration... $diff") try { - val needsRebuild = cameraDevice == null || captureSession == null - if (needsRebuild) { - Log.i(TAG, "Need to rebuild CameraDevice and CameraCaptureSession...") - } - - // Since cameraDevice and captureSession are OS resources, we have three possible paths here: - if (needsRebuild) { - if (config.isActive) { - // A: The Camera has been torn down by the OS and we want it to be active - rebuild everything - Log.i(TAG, "Need to rebuild CameraDevice and CameraCaptureSession...") - configureCameraDevice(config) - configureOutputs(config) - configureCaptureRequest(config) - } else { - // B: The Camera has been torn down by the OS but it's currently in the background - ignore this - Log.i(TAG, "CameraDevice and CameraCaptureSession is torn down but Camera is not active, skipping update...") - } - } else { - // C: The Camera has not been torn down and we just want to update some props - update incrementally + captureSession.withConfiguration { // Build up session or update any props if (diff.deviceChanged) { // 1. cameraId changed, open device - configureCameraDevice(config) + configureInput(config) } if (diff.outputsChanged) { // 2. outputs changed, build new session @@ -194,10 +136,18 @@ class CameraSession(private val context: Context, private val cameraManager: Cam // 3. zoom etc changed, update repeating request configureCaptureRequest(config) } + if (diff.isActiveChanged) { + // 4. Either start or stop the session + val isActive = config.isActive && config.preview.isEnabled + captureSession.setIsActive(isActive) + } } - Log.i(TAG, "Successfully updated CameraSession Configuration! isActive: ${config.isActive}") - this.configuration = config + Log.i( + TAG, + "configure { ... }: Completed CameraSession Configuration! (isActive: ${config.isActive}, isRunning: ${captureSession.isRunning})" + ) + isRunning = captureSession.isRunning // Notify about Camera initialization if (diff.deviceChanged) { @@ -212,8 +162,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam private fun destroy() { Log.i(TAG, "Destroying session..") - cameraDevice?.close() - cameraDevice = null + captureSession.close() photoOutput?.close() photoOutput = null @@ -269,66 +218,20 @@ class CameraSession(private val context: Context, private val cameraManager: Cam Log.i(TAG, "Preview Output destroyed!") } - /** - * Set up the `CameraDevice` (`cameraId`) - */ - private suspend fun configureCameraDevice(configuration: CameraConfiguration) { - if (!configuration.isActive) { - // If isActive=false, we don't care if the device is opened or closed. - // Android OS can close the CameraDevice if it needs it, otherwise we keep it warm. - Log.i(TAG, "isActive is false, skipping CameraDevice configuration.") - return - } - - if (cameraDevice != null) { - // Close existing device - Log.i(TAG, "Closing previous Camera #${cameraDevice?.id}...") - cameraDevice?.close() - cameraDevice = null - } - isRunning = false - - // Check Camera Permission - val cameraPermission = ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) - if (cameraPermission != PackageManager.PERMISSION_GRANTED) throw CameraPermissionError() - - // Open new device + private fun configureInput(configuration: CameraConfiguration) { + Log.i(TAG, "Configuring inputs for CameraSession...") val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() - Log.i(TAG, "Configuring Camera #$cameraId...") - cameraDevice = cameraManager.openCamera(cameraId, { device, error -> - if (cameraDevice != device) { - // a previous device has been disconnected, but we already have a new one. - // this is just normal behavior - return@openCamera - } - - this.cameraDevice = null - isRunning = false - - if (error != null) { - Log.e(TAG, "Camera #${device.id} has been unexpectedly disconnected!", error) - callback.onError(error) - } else { - Log.i(TAG, "Camera #${device.id} has been gracefully disconnected!") - } - }, CameraQueues.cameraQueue) - - Log.i(TAG, "Successfully configured Camera #$cameraId!") + val status = ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) + if (status != PackageManager.PERMISSION_GRANTED) throw CameraPermissionError() + isRunning = false + captureSession.setInput(cameraId) } /** * Set up the `CaptureSession` with all outputs (preview, photo, video, codeScanner) and their HDR/Format settings. */ private suspend fun configureOutputs(configuration: CameraConfiguration) { - if (!configuration.isActive) { - Log.i(TAG, "isActive is false, skipping CameraCaptureSession configuration.") - return - } - val cameraDevice = cameraDevice - if (cameraDevice == null) { - Log.i(TAG, "CameraSession hasn't configured a CameraDevice, skipping session configuration...") - return - } + val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() // Destroy previous outputs Log.i(TAG, "Destroying previous outputs...") @@ -340,20 +243,20 @@ class CameraSession(private val context: Context, private val cameraManager: Cam codeScannerOutput = null isRunning = false - val characteristics = cameraManager.getCameraCharacteristics(cameraDevice.id) + val deviceDetails = CameraDeviceDetails(cameraManager, cameraId) val format = configuration.format - Log.i(TAG, "Creating outputs for Camera #${cameraDevice.id}...") + Log.i(TAG, "Creating outputs for Camera #$cameraId...") - val isSelfie = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT + val isSelfie = deviceDetails.lensFacing == LensFacing.FRONT val outputs = mutableListOf() // Photo Output val photo = configuration.photo as? CameraConfiguration.Output.Enabled if (photo != null) { - val imageFormat = ImageFormat.JPEG - val sizes = characteristics.getPhotoSizes(imageFormat) + val imageFormat = deviceDetails.photoFormat + val sizes = deviceDetails.getPhotoSizes() val size = sizes.closestToOrMax(format?.photoSize) val maxImages = 10 @@ -373,7 +276,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam val video = configuration.video as? CameraConfiguration.Output.Enabled if (video != null) { val imageFormat = video.config.pixelFormat.toImageFormat() - val sizes = characteristics.getVideoSizes(cameraDevice.id, imageFormat) + val sizes = deviceDetails.getVideoSizes(imageFormat) val size = sizes.closestToOrMax(format?.videoSize) Log.i(TAG, "Adding ${size.width}x${size.height} Video Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") @@ -383,6 +286,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam video.config.pixelFormat, isSelfie, video.config.enableFrameProcessor, + video.config.enableGpuBuffers, callback ) val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr) @@ -395,7 +299,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam if (preview != null) { // Compute Preview Size based on chosen video size val videoSize = videoOutput?.size ?: format?.videoSize - val size = characteristics.getPreviewTargetSize(videoSize) + val sizes = deviceDetails.getPreviewSizes() + val size = sizes.closestToOrMax(videoSize) val enableHdr = video?.config?.enableHdr ?: false @@ -408,7 +313,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam ) outputs.add(output) // Size is usually landscape, so we flip it here - previewView?.size = Size(size.height, size.width) + previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation) } // CodeScanner Output @@ -421,7 +326,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam } val imageFormat = ImageFormat.YUV_420_888 - val sizes = characteristics.getVideoSizes(cameraDevice.id, imageFormat) + val sizes = deviceDetails.getVideoSizes(imageFormat) val size = sizes.closestToOrMax(Size(1280, 720)) Log.i(TAG, "Adding ${size.width}x${size.height} CodeScanner Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") @@ -432,175 +337,63 @@ class CameraSession(private val context: Context, private val cameraManager: Cam } // Create session - captureSession = cameraDevice.createCaptureSession(cameraManager, outputs, { session -> - if (this.captureSession != session) { - // a previous session has been closed, but we already have a new one. - // this is just normal behavior - return@createCaptureSession - } + captureSession.setOutputs(outputs) - // onClosed - this.captureSession = null - isRunning = false - - Log.i(TAG, "Camera Session $session has been closed.") - }, CameraQueues.cameraQueue) - - Log.i(TAG, "Successfully configured Session with ${outputs.size} outputs for Camera #${cameraDevice.id}!") + Log.i(TAG, "Successfully configured Session with ${outputs.size} outputs for Camera #$cameraId!") // Update Frame Processor and RecordingSession for newly changed output updateVideoOutputs() } - private fun createRepeatingRequest(device: CameraDevice, targets: List, config: CameraConfiguration): CaptureRequest { - val deviceDetails = cameraDeviceDetails ?: CameraDeviceDetails(cameraManager, device.id) - - val template = if (config.video.isEnabled) CameraDevice.TEMPLATE_RECORD else CameraDevice.TEMPLATE_PREVIEW - val captureRequest = device.createCaptureRequest(template) - - targets.forEach { t -> captureRequest.addTarget(t) } - - val format = config.format - - // Set FPS - val fps = config.fps - if (fps != null) { - if (format == null) throw PropRequiresFormatToBeNonNullError("fps") - if (format.maxFps < fps) throw InvalidFpsError(fps) - captureRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) - } - - // Set Video Stabilization - if (config.videoStabilizationMode != VideoStabilizationMode.OFF) { - if (format == null) throw PropRequiresFormatToBeNonNullError("videoStabilizationMode") - if (!format.videoStabilizationModes.contains( - config.videoStabilizationMode - ) - ) { - throw InvalidVideoStabilizationMode(config.videoStabilizationMode) - } - } - when (config.videoStabilizationMode) { - VideoStabilizationMode.OFF -> { - // do nothing - } - VideoStabilizationMode.STANDARD -> { - val mode = if (Build.VERSION.SDK_INT >= - Build.VERSION_CODES.TIRAMISU - ) { - CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION - } else { - CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON - } - captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode) - } - VideoStabilizationMode.CINEMATIC, VideoStabilizationMode.CINEMATIC_EXTENDED -> { - captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) - } - } - - // Set HDR - val video = config.video as? CameraConfiguration.Output.Enabled - val videoHdr = video?.config?.enableHdr - if (videoHdr == true) { - if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr") - if (!format.supportsVideoHdr) throw InvalidVideoHdrError() - captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) - } else if (config.enableLowLightBoost) { - if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError() - captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT) - } - - // Set Exposure Bias - val exposure = config.exposure?.toInt() - if (exposure != null) { - val clamped = deviceDetails.exposureRange.clamp(exposure) - captureRequest.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, clamped) - } - - // Set Zoom - // TODO: Cache camera characteristics? Check perf. - val cameraCharacteristics = cameraManager.getCameraCharacteristics(device.id) - captureRequest.setZoom(config.zoom, cameraCharacteristics) - - // Set Torch - if (config.torch == Torch.ON) { - if (!deviceDetails.hasFlash) throw FlashUnavailableError() - captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) - } - - // Start repeating request if the Camera is active - return captureRequest.build() - } - private fun configureCaptureRequest(config: CameraConfiguration) { - val captureSession = captureSession + val video = config.video as? CameraConfiguration.Output.Enabled + val enableVideo = video != null + val enableVideoHdr = video?.config?.enableHdr == true - if (!config.isActive) { - isRunning = false - try { - captureSession?.stopRepeating() - } catch (e: IllegalStateException) { - // ignore - captureSession is already closed. - } - return - } - if (captureSession == null) { - Log.i(TAG, "CameraSession hasn't configured the capture session, skipping CaptureRequest...") - return - } - - val preview = config.preview as? CameraConfiguration.Output.Enabled - val previewSurface = preview?.config?.surface - val targets = listOfNotNull(previewSurface, videoOutput?.surface, codeScannerOutput?.surface) - if (targets.isEmpty()) { - Log.i(TAG, "CameraSession has no repeating outputs (Preview, Video, CodeScanner), skipping CaptureRequest...") - return - } - - val request = createRepeatingRequest(captureSession.device, targets, config) - captureSession.setRepeatingRequest(request, null, null) - isRunning = true + captureSession.setRepeatingRequest( + RepeatingCaptureRequest( + enableVideo, + config.torch, + config.fps, + config.videoStabilizationMode, + enableVideoHdr, + config.enableLowLightBoost, + config.exposure, + config.zoom, + config.format + ) + ) } suspend fun takePhoto( qualityPrioritization: QualityPrioritization, - flashMode: Flash, + flash: Flash, enableShutterSound: Boolean, - enableRedEyeReduction: Boolean, enableAutoStabilization: Boolean, + enablePrecapture: Boolean, outputOrientation: Orientation ): CapturedPhoto { - val captureSession = captureSession ?: throw CameraNotReadyError() val photoOutput = photoOutput ?: throw PhotoNotEnabledError() - Log.i(TAG, "Photo capture 0/3 - preparing capture request (${photoOutput.size.width}x${photoOutput.size.height})...") - - val zoom = configuration?.zoom ?: 1f - - val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) - val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics) - val captureRequest = captureSession.device.createPhotoCaptureRequest( - cameraManager, - photoOutput.surface, - zoom, + Log.i(TAG, "Photo capture 1/3 - capturing ${photoOutput.size.width}x${photoOutput.size.height} image...") + val result = captureSession.capture( qualityPrioritization, - flashMode, - enableRedEyeReduction, + flash, enableAutoStabilization, photoOutput.enableHdr, - orientation + outputOrientation, + enableShutterSound, + enablePrecapture ) - Log.i(TAG, "Photo capture 1/3 - starting capture...") - val result = captureSession.capture(captureRequest, enableShutterSound) - val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! - Log.i(TAG, "Photo capture 2/3 complete - received metadata with timestamp $timestamp") + try { + val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! + Log.i(TAG, "Photo capture 2/3 - waiting for image with timestamp $timestamp now...") val image = photoOutputSynchronizer.await(timestamp) - val isMirrored = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT - - Log.i(TAG, "Photo capture 3/3 complete - received ${image.width} x ${image.height} image.") + Log.i(TAG, "Photo capture 3/3 - received ${image.width} x ${image.height} image, preparing result...") + val deviceDetails = captureSession.getActiveDeviceDetails() + val isMirrored = deviceDetails?.lensFacing == LensFacing.FRONT return CapturedPhoto(image, result, orientation, isMirrored, image.format) } catch (e: CancellationException) { throw CaptureAbortedError(false) @@ -628,13 +421,13 @@ class CameraSession(private val context: Context, private val cameraManager: Cam mutex.withLock { if (recording != null) throw RecordingInProgressError() val videoOutput = videoOutput ?: throw VideoNotEnabledError() - val cameraDevice = cameraDevice ?: throw CameraNotReadyError() + val cameraId = configuration?.cameraId ?: throw NoCameraDeviceError() val fps = configuration?.fps ?: 30 val recording = RecordingSession( context, - cameraDevice.id, + cameraId, videoOutput.size, enableAudio, fps, @@ -674,40 +467,16 @@ class CameraSession(private val context: Context, private val cameraManager: Cam } } - suspend fun focus(x: Int, y: Int): Unit = throw NotImplementedError("focus() is not yet implemented!") + override fun onError(error: Throwable) { + callback.onError(error) + } - private suspend fun focus(point: Point) { - mutex.withLock { - // TODO: Fix this method - val captureSession = captureSession ?: throw CameraNotReadyError() - val request = previewRequest ?: throw CameraNotReadyError() + suspend fun focus(x: Int, y: Int) { + val previewView = previewView ?: throw CameraNotReadyError() + val deviceDetails = captureSession.getActiveDeviceDetails() ?: throw CameraNotReadyError() - val weight = MeteringRectangle.METERING_WEIGHT_MAX - 1 - val focusAreaTouch = MeteringRectangle(point, Size(150, 150), weight) - - // Quickly pause preview - captureSession.stopRepeating() - - request.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL) - request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF) - captureSession.capture(request.build(), null, null) - - // Add AF trigger with focus region - val characteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) - val maxSupportedFocusRegions = characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0 - if (maxSupportedFocusRegions >= 1) { - request.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(focusAreaTouch)) - } - request.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO) - request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) - request.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START) - - captureSession.capture(request.build(), false) - - // Resume preview - request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE) - captureSession.setRepeatingRequest(request.build(), null, null) - } + val cameraPoint = previewView.convertLayerPointToCameraCoordinates(Point(x, y), deviceDetails) + captureSession.focus(cameraPoint) } data class CapturedPhoto( diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt b/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt index 6a22f59..b5d147c 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt @@ -39,7 +39,7 @@ class CodeScannerPipeline( var isBusy = false imageReader = ImageReader.newInstance(size.width, size.height, format, MAX_IMAGES) imageReader.setOnImageAvailableListener({ reader -> - val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener + val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener if (isBusy) { // We're currently executing on a previous Frame, so we skip this one. diff --git a/package/android/src/main/java/com/mrousavy/camera/core/PersistentCameraCaptureSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/PersistentCameraCaptureSession.kt new file mode 100644 index 0000000..3bb1c15 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/core/PersistentCameraCaptureSession.kt @@ -0,0 +1,378 @@ +package com.mrousavy.camera.core + +import android.graphics.Point +import android.hardware.camera2.CameraAccessException +import android.hardware.camera2.CameraCaptureSession +import android.hardware.camera2.CameraDevice +import android.hardware.camera2.CameraManager +import android.hardware.camera2.CaptureRequest +import android.hardware.camera2.TotalCaptureResult +import android.util.Log +import com.mrousavy.camera.core.capture.PhotoCaptureRequest +import com.mrousavy.camera.core.capture.RepeatingCaptureRequest +import com.mrousavy.camera.core.outputs.SurfaceOutput +import com.mrousavy.camera.extensions.PrecaptureOptions +import com.mrousavy.camera.extensions.PrecaptureTrigger +import com.mrousavy.camera.extensions.capture +import com.mrousavy.camera.extensions.createCaptureSession +import com.mrousavy.camera.extensions.isValid +import com.mrousavy.camera.extensions.openCamera +import com.mrousavy.camera.extensions.precapture +import com.mrousavy.camera.extensions.tryAbortCaptures +import com.mrousavy.camera.extensions.tryStopRepeating +import com.mrousavy.camera.types.Flash +import com.mrousavy.camera.types.Orientation +import com.mrousavy.camera.types.QualityPrioritization +import java.io.Closeable +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.coroutineScope +import kotlinx.coroutines.delay +import kotlinx.coroutines.isActive +import kotlinx.coroutines.launch +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock + +/** + * A [CameraCaptureSession] wrapper that safely handles interruptions and remains open whenever available. + * + * This class aims to be similar to Apple's `AVCaptureSession`. + */ +class PersistentCameraCaptureSession(private val cameraManager: CameraManager, private val callback: Callback) : Closeable { + companion object { + private const val TAG = "PersistentCameraCaptureSession" + private const val FOCUS_RESET_TIMEOUT = 3000L + private const val PRECAPTURE_LOCK_TIMEOUT = 5000L + } + + // Inputs/Dependencies + private var cameraId: String? = null + private var outputs: List = emptyList() + private var repeatingRequest: RepeatingCaptureRequest? = null + private var isActive = false + + // State/Dependants + private var device: CameraDevice? = null // depends on [cameraId] + private var session: CameraCaptureSession? = null // depends on [device, surfaceOutputs] + private var cameraDeviceDetails: CameraDeviceDetails? = null // depends on [device] + + private val mutex = Mutex() + private var didDestroyFromOutside = false + private var focusJob: Job? = null + private val coroutineScope = CoroutineScope(CameraQueues.cameraQueue.coroutineDispatcher) + + val isRunning: Boolean + get() = isActive && session != null && device != null && !didDestroyFromOutside + + override fun close() { + focusJob?.cancel() + session?.tryAbortCaptures() + device?.close() + } + + private fun assertLocked(method: String) { + if (!mutex.isLocked) { + throw SessionIsNotLockedError("Failed to call $method, session is not locked! Call beginConfiguration() first.") + } + } + + suspend fun withConfiguration(block: suspend () -> Unit) { + // Cancel any ongoing focus jobs + focusJob?.cancel() + focusJob = null + + mutex.withLock { + block() + configure() + } + } + + fun setInput(cameraId: String) { + Log.d(TAG, "--> setInput($cameraId)") + assertLocked("setInput") + if (this.cameraId != cameraId || device?.id != cameraId) { + this.cameraId = cameraId + + // Abort any captures in the session so we get the onCaptureFailed handler for any outstanding photos + session?.tryAbortCaptures() + session = null + // Closing the device will also close the session above - even faster than manually closing it. + device?.close() + device = null + } + } + + fun setOutputs(outputs: List) { + Log.d(TAG, "--> setOutputs($outputs)") + assertLocked("setOutputs") + if (this.outputs != outputs) { + this.outputs = outputs + + if (outputs.isNotEmpty()) { + // Outputs have changed to something else, we don't wanna destroy the session directly + // so the outputs can be kept warm. The session that gets created next will take over the outputs. + session?.tryAbortCaptures() + } else { + // Just stop it, we don't have any outputs + session?.close() + } + session = null + } + } + + fun setRepeatingRequest(request: RepeatingCaptureRequest) { + assertLocked("setRepeatingRequest") + Log.d(TAG, "--> setRepeatingRequest(...)") + if (this.repeatingRequest != request) { + this.repeatingRequest = request + } + } + + fun setIsActive(isActive: Boolean) { + assertLocked("setIsActive") + Log.d(TAG, "--> setIsActive($isActive)") + if (this.isActive != isActive) { + this.isActive = isActive + } + if (isActive && didDestroyFromOutside) { + didDestroyFromOutside = false + } + } + + suspend fun capture( + qualityPrioritization: QualityPrioritization, + flash: Flash, + enableAutoStabilization: Boolean, + enablePhotoHdr: Boolean, + orientation: Orientation, + enableShutterSound: Boolean, + enablePrecapture: Boolean + ): TotalCaptureResult { + // Cancel any ongoing focus jobs + focusJob?.cancel() + focusJob = null + + mutex.withLock { + Log.i(TAG, "Capturing photo...") + val session = session ?: throw CameraNotReadyError() + val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() + val photoRequest = PhotoCaptureRequest( + repeatingRequest, + qualityPrioritization, + enableAutoStabilization, + enablePhotoHdr, + orientation + ) + val device = session.device + val deviceDetails = getOrCreateCameraDeviceDetails(device) + + // Submit a single high-res capture to photo output as well as all preview outputs + val outputs = outputs + val repeatingOutputs = outputs.filter { it.isRepeating } + + val skipPrecapture = !enablePrecapture || qualityPrioritization == QualityPrioritization.SPEED + if (skipPrecapture && flash == Flash.OFF) { + // 0. We want to take a picture as fast as possible, so skip any precapture sequence and just capture one Frame. + Log.i(TAG, "Using fast capture path without pre-capture sequence...") + val singleRequest = photoRequest.createCaptureRequest(device, deviceDetails, outputs) + return session.capture(singleRequest.build(), enableShutterSound) + } + + Log.i(TAG, "Locking AF/AE/AWB...") + + // 1. Run precapture sequence + var needsFlash: Boolean + try { + val precaptureRequest = repeatingRequest.createCaptureRequest(device, deviceDetails, repeatingOutputs) + val skipIfPassivelyFocused = flash == Flash.OFF + val options = PrecaptureOptions( + listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE, PrecaptureTrigger.AWB), + flash, + emptyList(), + skipIfPassivelyFocused, + PRECAPTURE_LOCK_TIMEOUT + ) + val result = session.precapture(precaptureRequest, deviceDetails, options) + needsFlash = result.needsFlash + } catch (e: CaptureTimedOutError) { + // the precapture just timed out after 5 seconds, take picture anyways without focus. + needsFlash = false + } catch (e: FocusCanceledError) { + throw CaptureAbortedError(false) + } + + try { + // 2. Once precapture AF/AE/AWB successfully locked, capture the actual photo + val singleRequest = photoRequest.createCaptureRequest(device, deviceDetails, outputs) + if (needsFlash) { + singleRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) + singleRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE) + } + return session.capture(singleRequest.build(), enableShutterSound) + } finally { + // 3. After taking a photo we set the repeating request back to idle to remove the AE/AF/AWB locks again + val idleRequest = repeatingRequest.createCaptureRequest(device, deviceDetails, repeatingOutputs) + session.setRepeatingRequest(idleRequest.build(), null, null) + } + } + } + + suspend fun focus(point: Point) { + // Cancel any previous focus jobs + focusJob?.cancel() + focusJob = null + + mutex.withLock { + Log.i(TAG, "Focusing to $point...") + val session = session ?: throw CameraNotReadyError() + val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() + val device = session.device + val deviceDetails = getOrCreateCameraDeviceDetails(device) + if (!deviceDetails.supportsFocusRegions) { + throw FocusNotSupportedError() + } + val outputs = outputs.filter { it.isRepeating } + + // 1. Run a precapture sequence for AF, AE and AWB. + focusJob = coroutineScope.launch { + val request = repeatingRequest.createCaptureRequest(device, deviceDetails, outputs) + val options = + PrecaptureOptions(listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE), Flash.OFF, listOf(point), false, FOCUS_RESET_TIMEOUT) + session.precapture(request, deviceDetails, options) + } + focusJob?.join() + + // 2. Reset AF/AE/AWB again after 3 seconds timeout + focusJob = coroutineScope.launch { + delay(FOCUS_RESET_TIMEOUT) + if (!this.isActive) { + // this job got canceled from the outside + return@launch + } + if (!isRunning || this@PersistentCameraCaptureSession.session != session) { + // the view/session has already been destroyed in the meantime + return@launch + } + Log.i(TAG, "Resetting focus to auto-focus...") + repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request -> + session.setRepeatingRequest(request.build(), null, null) + } + } + } + } + + fun getActiveDeviceDetails(): CameraDeviceDetails? { + val device = device ?: return null + return getOrCreateCameraDeviceDetails(device) + } + + private suspend fun configure() { + if (didDestroyFromOutside && !isActive) { + Log.d(TAG, "CameraCaptureSession has been destroyed by Android, skipping configuration until isActive is set to `true` again.") + return + } + Log.d(TAG, "Configure() with isActive: $isActive, ID: $cameraId, device: $device, session: $session") + val cameraId = cameraId ?: throw NoCameraDeviceError() + val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() + val outputs = outputs + + try { + didDestroyFromOutside = false + + val device = getOrCreateDevice(cameraId) + if (didDestroyFromOutside) return + + if (outputs.isEmpty()) return + val session = getOrCreateSession(device, outputs) + if (didDestroyFromOutside) return + + if (isActive) { + Log.d(TAG, "Updating repeating request...") + val details = getOrCreateCameraDeviceDetails(device) + val repeatingOutputs = outputs.filter { it.isRepeating } + val builder = repeatingRequest.createCaptureRequest(device, details, repeatingOutputs) + session.setRepeatingRequest(builder.build(), null, null) + } else { + Log.d(TAG, "Stopping repeating request...") + session.tryStopRepeating() + } + Log.d(TAG, "Configure() done! isActive: $isActive, ID: $cameraId, device: $device, session: $session") + } catch (e: CameraAccessException) { + if (didDestroyFromOutside) { + // Camera device has been destroyed in the meantime, that's fine. + Log.d(TAG, "Configure() canceled, session has been destroyed in the meantime!") + } else { + // Camera should still be active, so not sure what went wrong. Rethrow + throw e + } + } + } + + private suspend fun getOrCreateDevice(cameraId: String): CameraDevice { + val currentDevice = device + if (currentDevice?.id == cameraId && currentDevice.isValid) { + return currentDevice + } + + this.session?.tryAbortCaptures() + this.device?.close() + this.device = null + this.session = null + + Log.i(TAG, "Creating new device...") + val newDevice = cameraManager.openCamera(cameraId, { device, error -> + Log.i(TAG, "Camera $device closed!") + if (this.device == device) { + this.didDestroyFromOutside = true + this.session?.tryAbortCaptures() + this.session = null + this.device = null + this.isActive = false + } + if (error != null) { + callback.onError(error) + } + }, CameraQueues.videoQueue) + this.device = newDevice + return newDevice + } + + private suspend fun getOrCreateSession(device: CameraDevice, outputs: List): CameraCaptureSession { + val currentSession = session + if (currentSession?.device == device) { + return currentSession + } + + if (outputs.isEmpty()) throw NoOutputsError() + + Log.i(TAG, "Creating new session...") + val newSession = device.createCaptureSession(cameraManager, outputs, { session -> + Log.i(TAG, "Session $session closed!") + if (this.session == session) { + this.didDestroyFromOutside = true + this.session?.tryAbortCaptures() + this.session = null + this.isActive = false + } + }, CameraQueues.videoQueue) + session = newSession + return newSession + } + + private fun getOrCreateCameraDeviceDetails(device: CameraDevice): CameraDeviceDetails { + val currentDetails = cameraDeviceDetails + if (currentDetails?.cameraId == device.id) { + return currentDetails + } + + val newDetails = CameraDeviceDetails(cameraManager, device.id) + cameraDeviceDetails = newDetails + return newDetails + } + + interface Callback { + fun onError(error: Throwable) + } + + class SessionIsNotLockedError(message: String) : Error(message) +} diff --git a/package/android/src/main/java/com/mrousavy/camera/core/PreviewView.kt b/package/android/src/main/java/com/mrousavy/camera/core/PreviewView.kt index cfda57a..0727a1f 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/PreviewView.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/PreviewView.kt @@ -2,51 +2,111 @@ package com.mrousavy.camera.core import android.annotation.SuppressLint import android.content.Context +import android.graphics.Point import android.util.Log import android.util.Size -import android.view.Gravity import android.view.SurfaceHolder import android.view.SurfaceView -import android.widget.FrameLayout import com.facebook.react.bridge.UiThreadUtil -import com.mrousavy.camera.extensions.getMaximumPreviewSize +import com.mrousavy.camera.extensions.resize +import com.mrousavy.camera.extensions.rotatedBy +import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.ResizeMode import kotlin.math.roundToInt +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.withContext @SuppressLint("ViewConstructor") -class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceView(context) { - var size: Size = getMaximumPreviewSize() +class PreviewView(context: Context, callback: SurfaceHolder.Callback) : + SurfaceView(context), + SurfaceHolder.Callback { + var size: Size = CameraDeviceDetails.getMaximumPreviewSize() set(value) { - field = value - UiThreadUtil.runOnUiThread { - Log.i(TAG, "Setting PreviewView Surface Size to $width x $height...") - holder.setFixedSize(value.height, value.width) - requestLayout() - invalidate() + if (field != value) { + Log.i(TAG, "Surface Size changed: $field -> $value") + field = value + updateLayout() } } var resizeMode: ResizeMode = ResizeMode.COVER set(value) { - field = value - UiThreadUtil.runOnUiThread { - requestLayout() - invalidate() + if (field != value) { + Log.i(TAG, "Resize Mode changed: $field -> $value") + field = value + updateLayout() } } + private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT + set(value) { + if (field != value) { + Log.i(TAG, "Input Orientation changed: $field -> $value") + field = value + updateLayout() + } + } + private val viewSize: Size + get() { + val displayMetrics = context.resources.displayMetrics + val dpX = width / displayMetrics.density + val dpY = height / displayMetrics.density + return Size(dpX.toInt(), dpY.toInt()) + } init { Log.i(TAG, "Creating PreviewView...") - layoutParams = FrameLayout.LayoutParams( - FrameLayout.LayoutParams.MATCH_PARENT, - FrameLayout.LayoutParams.MATCH_PARENT, - Gravity.CENTER - ) + holder.setKeepScreenOn(true) + holder.addCallback(this) holder.addCallback(callback) + holder.setFixedSize(size.width, size.height) + } + + override fun surfaceCreated(holder: SurfaceHolder) = Unit + override fun surfaceDestroyed(holder: SurfaceHolder) = Unit + override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { + size = Size(width, height) + } + + suspend fun setSurfaceSize(width: Int, height: Int, cameraSensorOrientation: Orientation) { + withContext(Dispatchers.Main) { + inputOrientation = cameraSensorOrientation + holder.resize(width, height) + } + } + + fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point { + val sensorOrientation = cameraDeviceDetails.sensorOrientation + val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height()) + val viewOrientation = Orientation.PORTRAIT + + val rotated = point.rotatedBy(viewSize, cameraSize, viewOrientation, sensorOrientation) + Log.i(TAG, "Converted layer point $point to camera point $rotated! ($sensorOrientation, $cameraSize -> $viewSize)") + return rotated + } + + private fun updateLayout() { + UiThreadUtil.runOnUiThread { + requestLayout() + invalidate() + } + } + + override fun requestLayout() { + super.requestLayout() + // Manually trigger measure & layout, as RN on Android skips those. + // See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427 + post { + measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)) + layout(left, top, right, bottom) + } } private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { val contentAspectRatio = contentSize.width.toDouble() / contentSize.height val containerAspectRatio = containerSize.width.toDouble() / containerSize.height + if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { + // One of the aspect ratios is 0 or NaN, maybe the view hasn't been laid out yet. + return contentSize + } val widthOverHeight = when (resizeMode) { ResizeMode.COVER -> contentAspectRatio > containerAspectRatio @@ -69,9 +129,10 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceV super.onMeasure(widthMeasureSpec, heightMeasureSpec) val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) - val fittedSize = getSize(size, viewSize, resizeMode) + val surfaceSize = size.rotatedBy(inputOrientation) + val fittedSize = getSize(surfaceSize, viewSize, resizeMode) - Log.i(TAG, "PreviewView is $viewSize, rendering $size content. Resizing to: $fittedSize ($resizeMode)") + Log.i(TAG, "PreviewView is $viewSize, rendering $surfaceSize content ($inputOrientation). Resizing to: $fittedSize ($resizeMode)") setMeasuredDimension(fittedSize.width, fittedSize.height) } diff --git a/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt b/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt index dcbd75d..395d396 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt @@ -33,6 +33,7 @@ class VideoPipeline( val format: PixelFormat = PixelFormat.NATIVE, private val isMirrored: Boolean = false, private val enableFrameProcessor: Boolean = false, + enableGpuBuffers: Boolean = false, private val callback: CameraSession.Callback ) : SurfaceTexture.OnFrameAvailableListener, Closeable { @@ -79,17 +80,25 @@ class VideoPipeline( val format = getImageReaderFormat() Log.i(TAG, "Using ImageReader round-trip (format: #$format)") - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { - Log.i(TAG, "Using API 29 for GPU ImageReader...") + // Create ImageReader + if (enableGpuBuffers && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { val usageFlags = getRecommendedHardwareBufferFlags() - Log.i(TAG, "Using ImageReader flags: $usageFlags") + Log.i(TAG, "Creating ImageReader with GPU-optimized usage flags: $usageFlags") imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags) + } else { + Log.i(TAG, "Creating ImageReader with default usage flags...") + imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES) + } + + // Create ImageWriter + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + Log.i(TAG, "Creating ImageWriter with format #$format...") imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format) } else { - Log.i(TAG, "Using legacy API for CPU ImageReader...") - imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES) + Log.i(TAG, "Creating ImageWriter with default format...") imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES) } + imageReader!!.setOnImageAvailableListener({ reader -> // Log.i(TAG, "ImageReader::onImageAvailable!")s val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener @@ -107,7 +116,7 @@ class VideoPipeline( } } catch (e: Throwable) { Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e) - throw e + callback.onError(e) } finally { frame.decrementRefCount() } @@ -125,8 +134,11 @@ class VideoPipeline( isActive = false imageWriter?.close() imageReader?.close() + removeRecordingSessionOutputSurface() recordingSession = null + surfaceTexture.setOnFrameAvailableListener(null, null) surfaceTexture.release() + surface.release() } } @@ -173,7 +185,7 @@ class VideoPipeline( synchronized(this) { if (recordingSession != null) { // Configure OpenGL pipeline to stream Frames into the Recording Session's surface - Log.i(TAG, "Setting $width x $height RecordingSession Output...") + Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...") setRecordingSessionOutputSurface(recordingSession.surface) this.recordingSession = recordingSession } else { @@ -228,7 +240,11 @@ class VideoPipeline( @RequiresApi(Build.VERSION_CODES.Q) private fun supportsHardwareBufferFlags(flags: Long): Boolean { val hardwareBufferFormat = format.toHardwareBufferFormat() - return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags) + try { + return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags) + } catch (_: Throwable) { + return false + } } private external fun getInputTextureId(): Int diff --git a/package/android/src/main/java/com/mrousavy/camera/core/capture/CameraCaptureRequest.kt b/package/android/src/main/java/com/mrousavy/camera/core/capture/CameraCaptureRequest.kt new file mode 100644 index 0000000..eeb5276 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/core/capture/CameraCaptureRequest.kt @@ -0,0 +1,88 @@ +package com.mrousavy.camera.core.capture + +import android.hardware.camera2.CameraDevice +import android.hardware.camera2.CaptureRequest +import com.mrousavy.camera.core.CameraDeviceDetails +import com.mrousavy.camera.core.FlashUnavailableError +import com.mrousavy.camera.core.InvalidVideoHdrError +import com.mrousavy.camera.core.LowLightBoostNotSupportedError +import com.mrousavy.camera.core.PropRequiresFormatToBeNonNullError +import com.mrousavy.camera.core.outputs.SurfaceOutput +import com.mrousavy.camera.extensions.setZoom +import com.mrousavy.camera.types.CameraDeviceFormat +import com.mrousavy.camera.types.Torch + +abstract class CameraCaptureRequest( + private val torch: Torch = Torch.OFF, + private val enableVideoHdr: Boolean = false, + val enableLowLightBoost: Boolean = false, + val exposureBias: Double? = null, + val zoom: Float = 1.0f, + val format: CameraDeviceFormat? = null +) { + enum class Template { + RECORD, + PHOTO, + PHOTO_ZSL, + PHOTO_SNAPSHOT, + PREVIEW; + + fun toRequestTemplate(): Int = + when (this) { + RECORD -> CameraDevice.TEMPLATE_RECORD + PHOTO -> CameraDevice.TEMPLATE_STILL_CAPTURE + PHOTO_ZSL -> CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG + PHOTO_SNAPSHOT -> CameraDevice.TEMPLATE_VIDEO_SNAPSHOT + PREVIEW -> CameraDevice.TEMPLATE_PREVIEW + } + } + + abstract fun createCaptureRequest( + device: CameraDevice, + deviceDetails: CameraDeviceDetails, + outputs: List + ): CaptureRequest.Builder + + protected open fun createCaptureRequest( + template: Template, + device: CameraDevice, + deviceDetails: CameraDeviceDetails, + outputs: List + ): CaptureRequest.Builder { + val builder = device.createCaptureRequest(template.toRequestTemplate()) + + // Add all repeating output surfaces + outputs.forEach { output -> + builder.addTarget(output.surface) + } + + // Set HDR + if (enableVideoHdr) { + if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr") + if (!format.supportsVideoHdr) throw InvalidVideoHdrError() + builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) + builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE) + } else if (enableLowLightBoost) { + if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError() + builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT) + builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE) + } + + // Set Exposure Bias + if (exposureBias != null) { + val clamped = deviceDetails.exposureRange.clamp(exposureBias.toInt()) + builder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, clamped) + } + + // Set Zoom + builder.setZoom(zoom, deviceDetails) + + // Set Torch + if (torch == Torch.ON) { + if (!deviceDetails.hasFlash) throw FlashUnavailableError() + builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) + } + + return builder + } +} diff --git a/package/android/src/main/java/com/mrousavy/camera/core/capture/PhotoCaptureRequest.kt b/package/android/src/main/java/com/mrousavy/camera/core/capture/PhotoCaptureRequest.kt new file mode 100644 index 0000000..03ca3de --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/core/capture/PhotoCaptureRequest.kt @@ -0,0 +1,149 @@ +package com.mrousavy.camera.core.capture + +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraDevice +import android.hardware.camera2.CaptureRequest +import android.os.Build +import android.util.Log +import com.mrousavy.camera.core.CameraDeviceDetails +import com.mrousavy.camera.core.outputs.SurfaceOutput +import com.mrousavy.camera.types.HardwareLevel +import com.mrousavy.camera.types.Orientation +import com.mrousavy.camera.types.QualityPrioritization +import com.mrousavy.camera.types.Torch + +class PhotoCaptureRequest( + repeatingRequest: RepeatingCaptureRequest, + private val qualityPrioritization: QualityPrioritization, + private val enableAutoStabilization: Boolean, + enablePhotoHdr: Boolean, + private val outputOrientation: Orientation +) : CameraCaptureRequest( + Torch.OFF, + enablePhotoHdr, + repeatingRequest.enableLowLightBoost, + repeatingRequest.exposureBias, + repeatingRequest.zoom, + repeatingRequest.format +) { + companion object { + private const val TAG = "PhotoCaptureRequest" + } + + override fun createCaptureRequest( + device: CameraDevice, + deviceDetails: CameraDeviceDetails, + outputs: List + ): CaptureRequest.Builder { + val template = when (qualityPrioritization) { + QualityPrioritization.QUALITY -> Template.PHOTO + QualityPrioritization.BALANCED -> { + if (deviceDetails.supportsZsl) { + Template.PHOTO_ZSL + } else { + Template.PHOTO + } + } + QualityPrioritization.SPEED -> { + if (deviceDetails.supportsSnapshotCapture) { + Template.PHOTO_SNAPSHOT + } else if (deviceDetails.supportsZsl) { + Template.PHOTO_ZSL + } else { + Template.PHOTO + } + } + } + Log.i(TAG, "Using CaptureRequest Template $template...") + return this.createCaptureRequest(template, device, deviceDetails, outputs) + } + + override fun createCaptureRequest( + template: Template, + device: CameraDevice, + deviceDetails: CameraDeviceDetails, + outputs: List + ): CaptureRequest.Builder { + val builder = super.createCaptureRequest(template, device, deviceDetails, outputs) + + // Set various speed vs quality optimization flags + when (qualityPrioritization) { + QualityPrioritization.SPEED -> { + if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.FULL)) { + builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_FAST) + if (deviceDetails.availableEdgeModes.contains(CaptureRequest.EDGE_MODE_FAST)) { + builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_FAST) + } + } + if (deviceDetails.availableAberrationModes.contains(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_FAST)) { + builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_FAST) + } + if (deviceDetails.availableHotPixelModes.contains(CaptureRequest.HOT_PIXEL_MODE_FAST)) { + builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_FAST) + } + if (deviceDetails.availableDistortionCorrectionModes.contains(CaptureRequest.DISTORTION_CORRECTION_MODE_FAST) && + Build.VERSION.SDK_INT >= Build.VERSION_CODES.P + ) { + builder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_FAST) + } + if (deviceDetails.availableNoiseReductionModes.contains(CaptureRequest.NOISE_REDUCTION_MODE_FAST)) { + builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_FAST) + } + if (deviceDetails.availableShadingModes.contains(CaptureRequest.SHADING_MODE_FAST)) { + builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_FAST) + } + if (deviceDetails.availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_FAST)) { + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST) + } + builder.set(CaptureRequest.JPEG_QUALITY, 85) + } + QualityPrioritization.BALANCED -> { + builder.set(CaptureRequest.JPEG_QUALITY, 92) + } + QualityPrioritization.QUALITY -> { + if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.FULL)) { + builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY) + if (deviceDetails.availableEdgeModes.contains(CaptureRequest.EDGE_MODE_HIGH_QUALITY)) { + builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY) + } + } + if (deviceDetails.availableAberrationModes.contains(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY)) { + builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) + } + if (deviceDetails.availableHotPixelModes.contains(CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY)) { + builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY) + } + if (deviceDetails.availableDistortionCorrectionModes.contains(CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY) && + Build.VERSION.SDK_INT >= Build.VERSION_CODES.P + ) { + builder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY) + } + if (deviceDetails.availableNoiseReductionModes.contains(CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY)) { + builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY) + } + if (deviceDetails.availableShadingModes.contains(CaptureRequest.SHADING_MODE_HIGH_QUALITY)) { + builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_HIGH_QUALITY) + } + if (deviceDetails.availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_HIGH_QUALITY)) { + builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY) + } + builder.set(CaptureRequest.JPEG_QUALITY, 100) + } + } + + // Set JPEG Orientation + val targetOrientation = outputOrientation.toSensorRelativeOrientation(deviceDetails) + builder.set(CaptureRequest.JPEG_ORIENTATION, targetOrientation.toDegrees()) + + // Set stabilization for this Frame + if (enableAutoStabilization) { + if (deviceDetails.opticalStabilizationModes.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON)) { + builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) + } else if (deviceDetails.digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) { + builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) + } + } + + return builder + } +} diff --git a/package/android/src/main/java/com/mrousavy/camera/core/capture/RepeatingCaptureRequest.kt b/package/android/src/main/java/com/mrousavy/camera/core/capture/RepeatingCaptureRequest.kt new file mode 100644 index 0000000..a95ca29 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/core/capture/RepeatingCaptureRequest.kt @@ -0,0 +1,113 @@ +package com.mrousavy.camera.core.capture + +import android.hardware.camera2.CameraCharacteristics +import android.hardware.camera2.CameraDevice +import android.hardware.camera2.CaptureRequest +import android.os.Build +import android.util.Range +import com.mrousavy.camera.core.CameraDeviceDetails +import com.mrousavy.camera.core.InvalidFpsError +import com.mrousavy.camera.core.InvalidVideoStabilizationMode +import com.mrousavy.camera.core.PropRequiresFormatToBeNonNullError +import com.mrousavy.camera.core.outputs.SurfaceOutput +import com.mrousavy.camera.types.CameraDeviceFormat +import com.mrousavy.camera.types.HardwareLevel +import com.mrousavy.camera.types.Torch +import com.mrousavy.camera.types.VideoStabilizationMode + +class RepeatingCaptureRequest( + private val enableVideoPipeline: Boolean, + torch: Torch = Torch.OFF, + private val fps: Int? = null, + private val videoStabilizationMode: VideoStabilizationMode = VideoStabilizationMode.OFF, + enableVideoHdr: Boolean = false, + enableLowLightBoost: Boolean = false, + exposureBias: Double? = null, + zoom: Float = 1.0f, + format: CameraDeviceFormat? = null +) : CameraCaptureRequest(torch, enableVideoHdr, enableLowLightBoost, exposureBias, zoom, format) { + override fun createCaptureRequest( + device: CameraDevice, + deviceDetails: CameraDeviceDetails, + outputs: List + ): CaptureRequest.Builder { + val template = if (enableVideoPipeline) Template.RECORD else Template.PREVIEW + return this.createCaptureRequest(template, device, deviceDetails, outputs) + } + + private fun getBestDigitalStabilizationMode(deviceDetails: CameraDeviceDetails): Int { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { + if (deviceDetails.digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION)) { + return CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION + } + } + return CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON + } + + override fun createCaptureRequest( + template: Template, + device: CameraDevice, + deviceDetails: CameraDeviceDetails, + outputs: List + ): CaptureRequest.Builder { + val builder = super.createCaptureRequest(template, device, deviceDetails, outputs) + + if (deviceDetails.modes.contains(CameraCharacteristics.CONTROL_MODE_AUTO)) { + builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) + } + + // Set AF + if (enableVideoPipeline && deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { + builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) + } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) { + builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) + } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_AUTO)) { + builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) + } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_OFF)) { + builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF) + builder.set(CaptureRequest.LENS_FOCUS_DISTANCE, 0f) + } + + // Set AE + if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_ON)) { + builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) + } else if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_OFF)) { + builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF) + } + + // Set AWB + if (deviceDetails.awbModes.contains(CameraCharacteristics.CONTROL_AWB_MODE_AUTO)) { + builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) + } + + // Set FPS + if (fps != null) { + if (format == null) throw PropRequiresFormatToBeNonNullError("fps") + if (format.maxFps < fps) throw InvalidFpsError(fps) + builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) + } + + // Set Video Stabilization + if (videoStabilizationMode != VideoStabilizationMode.OFF) { + if (format == null) throw PropRequiresFormatToBeNonNullError("videoStabilizationMode") + if (!format.videoStabilizationModes.contains(videoStabilizationMode)) { + throw InvalidVideoStabilizationMode(videoStabilizationMode) + } + when (videoStabilizationMode) { + VideoStabilizationMode.STANDARD -> { + builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, getBestDigitalStabilizationMode(deviceDetails)) + } + VideoStabilizationMode.CINEMATIC, VideoStabilizationMode.CINEMATIC_EXTENDED -> { + if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED)) { + builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) + } else { + builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, getBestDigitalStabilizationMode(deviceDetails)) + } + } + else -> throw InvalidVideoStabilizationMode(videoStabilizationMode) + } + } + + return builder + } +} diff --git a/package/android/src/main/java/com/mrousavy/camera/core/outputs/SurfaceOutput.kt b/package/android/src/main/java/com/mrousavy/camera/core/outputs/SurfaceOutput.kt index 5ca1b15..6948a60 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/outputs/SurfaceOutput.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/outputs/SurfaceOutput.kt @@ -10,13 +10,7 @@ import android.view.Surface import androidx.annotation.RequiresApi import java.io.Closeable -open class SurfaceOutput( - val surface: Surface, - val size: Size, - val outputType: OutputType, - val enableHdr: Boolean = false, - private val closeSurfaceOnEnd: Boolean = false -) : Closeable { +open class SurfaceOutput(val surface: Surface, val size: Size, val outputType: OutputType, val enableHdr: Boolean = false) : Closeable { companion object { const val TAG = "SurfaceOutput" @@ -52,12 +46,18 @@ open class SurfaceOutput( return result } + val isRepeating: Boolean + get() { + return when (outputType) { + OutputType.VIDEO, OutputType.PREVIEW, OutputType.VIDEO_AND_PREVIEW -> true + OutputType.PHOTO -> false + } + } + override fun toString(): String = "$outputType (${size.width} x ${size.height})" override fun close() { - if (closeSurfaceOnEnd) { - surface.release() - } + // close() does nothing by default } enum class OutputType { diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt index 4a6a323..4ff3e3f 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt @@ -5,47 +5,70 @@ import android.hardware.camera2.CaptureFailure import android.hardware.camera2.CaptureRequest import android.hardware.camera2.TotalCaptureResult import android.media.MediaActionSound -import com.mrousavy.camera.core.CameraQueues +import android.util.Log import com.mrousavy.camera.core.CaptureAbortedError +import com.mrousavy.camera.core.CaptureTimedOutError import com.mrousavy.camera.core.UnknownCaptureError import kotlin.coroutines.resume import kotlin.coroutines.resumeWithException -import kotlin.coroutines.suspendCoroutine +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch +import kotlinx.coroutines.suspendCancellableCoroutine + +private const val TAG = "CameraCaptureSession" suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult = - suspendCoroutine { continuation -> + suspendCancellableCoroutine { continuation -> val shutterSound = if (enableShutterSound) MediaActionSound() else null shutterSound?.load(MediaActionSound.SHUTTER_CLICK) + CoroutineScope(Dispatchers.Default).launch { + delay(5000) // after 5s, cancel capture + if (continuation.isActive) { + Log.e(TAG, "Capture timed out after 5 seconds!") + continuation.resumeWithException(CaptureTimedOutError()) + tryAbortCaptures() + } + } + this.capture( captureRequest, object : CameraCaptureSession.CaptureCallback() { override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { super.onCaptureCompleted(session, request, result) - continuation.resume(result) - shutterSound?.release() + if (request == captureRequest) { + continuation.resume(result) + shutterSound?.release() + } } override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) { super.onCaptureStarted(session, request, timestamp, frameNumber) - if (enableShutterSound) { - shutterSound?.play(MediaActionSound.SHUTTER_CLICK) + if (request == captureRequest) { + if (enableShutterSound) { + shutterSound?.play(MediaActionSound.SHUTTER_CLICK) + } } } override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { super.onCaptureFailed(session, request, failure) - val wasImageCaptured = failure.wasImageCaptured() - val error = when (failure.reason) { - CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured) - CaptureFailure.REASON_FLUSHED -> CaptureAbortedError(wasImageCaptured) - else -> UnknownCaptureError(wasImageCaptured) + + if (request == captureRequest) { + val wasImageCaptured = failure.wasImageCaptured() + val error = when (failure.reason) { + CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured) + CaptureFailure.REASON_FLUSHED -> CaptureAbortedError(wasImageCaptured) + else -> UnknownCaptureError(wasImageCaptured) + } + continuation.resumeWithException(error) } - continuation.resumeWithException(error) } }, - CameraQueues.cameraQueue.handler + null ) } diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+precapture.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+precapture.kt new file mode 100644 index 0000000..e52ad2c --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+precapture.kt @@ -0,0 +1,151 @@ +package com.mrousavy.camera.extensions + +import android.graphics.Point +import android.hardware.camera2.CameraCaptureSession +import android.hardware.camera2.CaptureRequest +import android.hardware.camera2.CaptureResult +import android.hardware.camera2.params.MeteringRectangle +import android.util.Log +import android.util.Size +import com.mrousavy.camera.core.CameraDeviceDetails +import com.mrousavy.camera.core.FocusCanceledError +import com.mrousavy.camera.types.Flash +import com.mrousavy.camera.types.HardwareLevel +import kotlin.coroutines.coroutineContext +import kotlinx.coroutines.isActive + +data class PrecaptureOptions( + val modes: List, + val flash: Flash = Flash.OFF, + val pointsOfInterest: List, + val skipIfPassivelyFocused: Boolean, + val timeoutMs: Long +) + +data class PrecaptureResult(val needsFlash: Boolean) + +private const val TAG = "Precapture" +private val DEFAULT_METERING_SIZE = Size(100, 100) + +/** + * Run a precapture sequence to trigger an AF, AE or AWB scan and lock to the optimal values. + * After this function completes, you can capture high quality photos as AF/AE/AWB are in focused state. + * + * To reset to auto-focus again, create a new `RepeatingRequest` with a fresh set of CONTROL_MODEs set. + */ +suspend fun CameraCaptureSession.precapture( + request: CaptureRequest.Builder, + deviceDetails: CameraDeviceDetails, + options: PrecaptureOptions +): PrecaptureResult { + Log.i(TAG, "Running precapture sequence... ($options)") + request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) + + var enableFlash = options.flash == Flash.ON + var afState = FocusState.Inactive + var aeState = ExposureState.Inactive + var awbState = WhiteBalanceState.Inactive + val precaptureModes = options.modes.toMutableList() + + // 1. Cancel any ongoing precapture sequences + request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL) + request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) + if (options.flash == Flash.AUTO || options.skipIfPassivelyFocused) { + // We want to read the current AE/AF/AWB values to determine if we need flash or can skip AF/AE/AWB precapture + val result = this.capture(request.build(), false) + + afState = FocusState.fromAFState(result.get(CaptureResult.CONTROL_AF_STATE) ?: CaptureResult.CONTROL_AF_STATE_INACTIVE) + aeState = ExposureState.fromAEState(result.get(CaptureResult.CONTROL_AE_STATE) ?: CaptureResult.CONTROL_AE_STATE_INACTIVE) + awbState = WhiteBalanceState.fromAWBState(result.get(CaptureResult.CONTROL_AWB_STATE) ?: CaptureResult.CONTROL_AWB_STATE_INACTIVE) + + Log.i(TAG, "Precapture current states: AF: $afState, AE: $aeState, AWB: $awbState") + enableFlash = aeState == ExposureState.FlashRequired && options.flash == Flash.AUTO + } else { + // we either want Flash ON or OFF, so we don't care about lighting conditions - do a fast capture. + this.capture(request.build(), null, null) + } + + if (!coroutineContext.isActive) throw FocusCanceledError() + + val meteringWeight = MeteringRectangle.METERING_WEIGHT_MAX - 1 + val meteringRectangles = options.pointsOfInterest.map { point -> + MeteringRectangle(point, DEFAULT_METERING_SIZE, meteringWeight) + }.toTypedArray() + + if (options.skipIfPassivelyFocused) { + // If user allows us to skip precapture for values that are already focused, remove them from the precapture modes. + if (afState.isPassivelyFocused) { + Log.i(TAG, "AF is already focused, skipping...") + precaptureModes.remove(PrecaptureTrigger.AF) + } + if (aeState.isPassivelyFocused) { + Log.i(TAG, "AE is already focused, skipping...") + precaptureModes.remove(PrecaptureTrigger.AE) + } + if (awbState.isPassivelyFocused) { + Log.i(TAG, "AWB is already focused, skipping...") + precaptureModes.remove(PrecaptureTrigger.AWB) + } + } + + // 2. Submit a precapture start sequence + if (enableFlash && deviceDetails.hasFlash) { + request.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) + } + if (precaptureModes.contains(PrecaptureTrigger.AF)) { + // AF Precapture + if (deviceDetails.afModes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO)) { + request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) + request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START) + if (meteringRectangles.isNotEmpty() && deviceDetails.supportsFocusRegions) { + request.set(CaptureRequest.CONTROL_AF_REGIONS, meteringRectangles) + } + } else { + // AF is not supported on this device. + precaptureModes.remove(PrecaptureTrigger.AF) + } + } + if (precaptureModes.contains(PrecaptureTrigger.AE)) { + // AE Precapture + if (deviceDetails.aeModes.contains(CaptureRequest.CONTROL_AE_MODE_ON) && deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED)) { + request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) + request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) + if (meteringRectangles.isNotEmpty() && + deviceDetails.supportsExposureRegions && + deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED) + ) { + request.set(CaptureRequest.CONTROL_AE_REGIONS, meteringRectangles) + } + } else { + // AE is not supported on this device. + precaptureModes.remove(PrecaptureTrigger.AE) + } + } + if (precaptureModes.contains(PrecaptureTrigger.AWB)) { + // AWB Precapture + if (deviceDetails.awbModes.contains(CaptureRequest.CONTROL_AWB_MODE_AUTO)) { + request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) + if (meteringRectangles.isNotEmpty() && deviceDetails.supportsWhiteBalanceRegions) { + request.set(CaptureRequest.CONTROL_AWB_REGIONS, meteringRectangles) + } + } else { + // AWB is not supported on this device. + precaptureModes.remove(PrecaptureTrigger.AWB) + } + } + this.capture(request.build(), null, null) + + if (!coroutineContext.isActive) throw FocusCanceledError() + + // 3. Start a repeating request without the trigger and wait until AF/AE/AWB locks + request.set(CaptureRequest.CONTROL_AF_TRIGGER, null) + request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, null) + val result = this.setRepeatingRequestAndWaitForPrecapture(request.build(), options.timeoutMs, *precaptureModes.toTypedArray()) + + if (!coroutineContext.isActive) throw FocusCanceledError() + + Log.i(TAG, "AF/AE/AWB successfully locked!") + + val needsFlash = result.exposureState == ExposureState.FlashRequired + return PrecaptureResult(needsFlash) +} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+setRepeatingRequestAndWaitForPrecapture.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+setRepeatingRequestAndWaitForPrecapture.kt new file mode 100644 index 0000000..48ca860 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+setRepeatingRequestAndWaitForPrecapture.kt @@ -0,0 +1,193 @@ +package com.mrousavy.camera.extensions + +import android.hardware.camera2.CameraCaptureSession +import android.hardware.camera2.CaptureFailure +import android.hardware.camera2.CaptureRequest +import android.hardware.camera2.CaptureResult +import android.hardware.camera2.TotalCaptureResult +import android.util.Log +import com.mrousavy.camera.core.CaptureAbortedError +import com.mrousavy.camera.core.CaptureTimedOutError +import kotlin.coroutines.resume +import kotlin.coroutines.resumeWithException +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch +import kotlinx.coroutines.suspendCancellableCoroutine + +private const val TAG = "CameraCaptureSession" + +enum class PrecaptureTrigger { + AE, + AF, + AWB +} + +interface AutoState { + val isCompleted: Boolean + val isPassivelyFocused: Boolean +} + +enum class FocusState : AutoState { + Unknown, + Inactive, + Scanning, + Focused, + Unfocused, + PassiveScanning, + PassiveFocused, + PassiveUnfocused; + + override val isCompleted: Boolean + get() = this == Focused || this == Unfocused + override val isPassivelyFocused: Boolean + get() = this == PassiveFocused + + companion object { + fun fromAFState(afState: Int): FocusState = + when (afState) { + CaptureResult.CONTROL_AF_STATE_INACTIVE -> Inactive + CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN -> Scanning + CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED -> Focused + CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED -> Unfocused + CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN -> PassiveScanning + CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED -> PassiveFocused + CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED -> PassiveUnfocused + else -> Unknown + } + } +} +enum class ExposureState : AutoState { + Unknown, + Locked, + Inactive, + Precapture, + Searching, + Converged, + FlashRequired; + + override val isCompleted: Boolean + get() = this == Converged || this == FlashRequired + override val isPassivelyFocused: Boolean + get() = this == Converged + + companion object { + fun fromAEState(aeState: Int): ExposureState = + when (aeState) { + CaptureResult.CONTROL_AE_STATE_INACTIVE -> Inactive + CaptureResult.CONTROL_AE_STATE_SEARCHING -> Searching + CaptureResult.CONTROL_AE_STATE_PRECAPTURE -> Precapture + CaptureResult.CONTROL_AE_STATE_CONVERGED -> Converged + CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED -> FlashRequired + CaptureResult.CONTROL_AE_STATE_LOCKED -> Locked + else -> Unknown + } + } +} + +enum class WhiteBalanceState : AutoState { + Unknown, + Inactive, + Locked, + Searching, + Converged; + + override val isCompleted: Boolean + get() = this == Converged + override val isPassivelyFocused: Boolean + get() = this == Converged + + companion object { + fun fromAWBState(awbState: Int): WhiteBalanceState = + when (awbState) { + CaptureResult.CONTROL_AWB_STATE_INACTIVE -> Inactive + CaptureResult.CONTROL_AWB_STATE_SEARCHING -> Searching + CaptureResult.CONTROL_AWB_STATE_CONVERGED -> Converged + CaptureResult.CONTROL_AWB_STATE_LOCKED -> Locked + else -> Unknown + } + } +} + +data class ResultState(val focusState: FocusState, val exposureState: ExposureState, val whiteBalanceState: WhiteBalanceState) + +/** + * Set a new repeating request for the [CameraCaptureSession] that contains a precapture trigger, and wait until the given precaptures have locked. + */ +suspend fun CameraCaptureSession.setRepeatingRequestAndWaitForPrecapture( + request: CaptureRequest, + timeoutMs: Long, + vararg precaptureTriggers: PrecaptureTrigger +): ResultState = + suspendCancellableCoroutine { continuation -> + // Map of all completed precaptures + val completed = precaptureTriggers.associateWith { false }.toMutableMap() + + CoroutineScope(Dispatchers.Default).launch { + delay(timeoutMs) // after timeout, cancel capture + if (continuation.isActive) { + Log.e(TAG, "Precapture timed out after ${timeoutMs / 1000} seconds!") + continuation.resumeWithException(CaptureTimedOutError()) + try { + setRepeatingRequest(request, null, null) + } catch (e: Throwable) { + // session might have already been closed + Log.e(TAG, "Error resetting session repeating request..", e) + } + } + } + + this.setRepeatingRequest( + request, + object : CameraCaptureSession.CaptureCallback() { + override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { + super.onCaptureCompleted(session, request, result) + + if (continuation.isActive) { + val afState = FocusState.fromAFState(result.get(CaptureResult.CONTROL_AF_STATE) ?: CaptureResult.CONTROL_AF_STATE_INACTIVE) + val aeState = ExposureState.fromAEState( + result.get(CaptureResult.CONTROL_AE_STATE) ?: CaptureResult.CONTROL_AE_STATE_INACTIVE + ) + val awbState = WhiteBalanceState.fromAWBState( + result.get(CaptureResult.CONTROL_AWB_STATE) ?: CaptureResult.CONTROL_AWB_STATE_INACTIVE + ) + Log.i(TAG, "Precapture state: AF: $afState, AE: $aeState, AWB: $awbState") + + // AF Precapture + if (precaptureTriggers.contains(PrecaptureTrigger.AF)) { + completed[PrecaptureTrigger.AF] = afState.isCompleted + } + // AE Precapture + if (precaptureTriggers.contains(PrecaptureTrigger.AE)) { + completed[PrecaptureTrigger.AE] = aeState.isCompleted + } + // AWB Precapture + if (precaptureTriggers.contains(PrecaptureTrigger.AWB)) { + completed[PrecaptureTrigger.AWB] = awbState.isCompleted + } + + if (completed.values.all { it == true }) { + // All precaptures did complete! + continuation.resume(ResultState(afState, aeState, awbState)) + session.setRepeatingRequest(request, null, null) + } + } + } + override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { + super.onCaptureFailed(session, request, failure) + + if (continuation.isActive) { + // Capture failed or session closed. + continuation.resumeWithException(CaptureAbortedError(failure.wasImageCaptured())) + try { + session.setRepeatingRequest(request, null, null) + } catch (e: Throwable) { + Log.e(TAG, "Failed to continue repeating request!", e) + } + } + } + }, + null + ) + } diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+tryAbortCaptures.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+tryAbortCaptures.kt new file mode 100644 index 0000000..24c64ce --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+tryAbortCaptures.kt @@ -0,0 +1,9 @@ +package com.mrousavy.camera.extensions + +import android.hardware.camera2.CameraCaptureSession + +fun CameraCaptureSession.tryAbortCaptures() { + try { + abortCaptures() + } catch (_: Throwable) {} +} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+tryStopRepeating.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+tryStopRepeating.kt new file mode 100644 index 0000000..0810978 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+tryStopRepeating.kt @@ -0,0 +1,9 @@ +package com.mrousavy.camera.extensions + +import android.hardware.camera2.CameraCaptureSession + +fun CameraCaptureSession.tryStopRepeating() { + try { + stopRepeating() + } catch (_: Throwable) {} +} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getOutputSizes.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getOutputSizes.kt index 168dabe..883e4c6 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getOutputSizes.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getOutputSizes.kt @@ -1,39 +1,13 @@ package com.mrousavy.camera.extensions import android.hardware.camera2.CameraCharacteristics -import android.media.CamcorderProfile -import android.os.Build import android.util.Size - -private fun getMaximumVideoSize(cameraId: String): Size? { - try { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH) - if (profiles != null) { - val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height } - if (largestProfile != null) { - return Size(largestProfile.width, largestProfile.height) - } - } - } - - val cameraIdInt = cameraId.toIntOrNull() - if (cameraIdInt != null) { - val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH) - return Size(profile.videoFrameWidth, profile.videoFrameHeight) - } - - return null - } catch (e: Throwable) { - // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. - return null - } -} +import com.mrousavy.camera.utils.CamcorderProfileUtils fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List { val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! val sizes = config.getOutputSizes(format) ?: emptyArray() - val maxVideoSize = getMaximumVideoSize(cameraId) + val maxVideoSize = CamcorderProfileUtils.getMaximumVideoSize(cameraId) if (maxVideoSize != null) { return sizes.filter { it.bigger <= maxVideoSize.bigger } } diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getPreviewSize.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getPreviewSize.kt deleted file mode 100644 index e758c45..0000000 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCharacteristics+getPreviewSize.kt +++ /dev/null @@ -1,29 +0,0 @@ -package com.mrousavy.camera.extensions - -import android.content.res.Resources -import android.hardware.camera2.CameraCharacteristics -import android.util.Size -import android.view.SurfaceHolder - -fun getMaximumPreviewSize(): Size { - // See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap - // According to the Android Developer documentation, PREVIEW streams can have a resolution - // of up to the phone's display's resolution, with a maximum of 1920x1080. - val display1080p = Size(1080, 1920) - val displaySize = Size( - Resources.getSystem().displayMetrics.widthPixels, - Resources.getSystem().displayMetrics.heightPixels - ) - val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller - - return if (isHighResScreen) display1080p else displaySize -} - -fun CameraCharacteristics.getPreviewTargetSize(targetSize: Size?): Size { - val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! - val maximumPreviewSize = getMaximumPreviewSize() - val outputSizes = config.getOutputSizes(SurfaceHolder::class.java) - .filter { it.bigger <= maximumPreviewSize.bigger && it.smaller <= maximumPreviewSize.smaller } - - return outputSizes.closestToOrMax(targetSize) -} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt deleted file mode 100644 index 0c425a8..0000000 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt +++ /dev/null @@ -1,104 +0,0 @@ -package com.mrousavy.camera.extensions - -import android.hardware.camera2.CameraCharacteristics -import android.hardware.camera2.CameraDevice -import android.hardware.camera2.CameraManager -import android.hardware.camera2.CaptureRequest -import android.view.Surface -import com.mrousavy.camera.types.Flash -import com.mrousavy.camera.types.Orientation -import com.mrousavy.camera.types.QualityPrioritization - -private fun supportsSnapshotCapture(cameraCharacteristics: CameraCharacteristics): Boolean { - // As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation: - val hardwareLevel = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!! - if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) return false - - val capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!! - val hasDepth = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) - val isBackwardsCompatible = !capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) - if (hasDepth && !isBackwardsCompatible) return false - - return true -} - -fun CameraDevice.createPhotoCaptureRequest( - cameraManager: CameraManager, - surface: Surface, - zoom: Float, - qualityPrioritization: QualityPrioritization, - flashMode: Flash, - enableRedEyeReduction: Boolean, - enableAutoStabilization: Boolean, - enableHdr: Boolean, - orientation: Orientation -): CaptureRequest { - val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id) - - val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) { - CameraDevice.TEMPLATE_VIDEO_SNAPSHOT - } else { - CameraDevice.TEMPLATE_STILL_CAPTURE - } - val captureRequest = this.createCaptureRequest(template) - captureRequest.addTarget(surface) - - // TODO: Maybe we can even expose that prop directly? - val jpegQuality = when (qualityPrioritization) { - QualityPrioritization.SPEED -> 85 - QualityPrioritization.BALANCED -> 92 - QualityPrioritization.QUALITY -> 100 - } - captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte()) - - captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees()) - - // TODO: Use the same options as from the preview request. This is duplicate code! - - when (flashMode) { - // Set the Flash Mode - Flash.OFF -> { - captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) - captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF) - } - Flash.ON -> { - captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) - captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) - } - Flash.AUTO -> { - if (enableRedEyeReduction) { - captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) - } else { - captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH) - } - } - } - - if (enableAutoStabilization) { - // Enable optical or digital image stabilization - val digitalStabilization = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) - val hasDigitalStabilization = digitalStabilization?.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON) ?: false - - val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) - val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false - if (hasOpticalStabilization) { - captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF) - captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) - } else if (hasDigitalStabilization) { - captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) - } else { - // no stabilization is supported. ignore it - } - } - - // TODO: Check if that zoom value is even supported. - captureRequest.setZoom(zoom, cameraCharacteristics) - - // Set HDR - // TODO: Check if that value is even supported - if (enableHdr) { - captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) - } - - return captureRequest.build() -} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+isValid.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+isValid.kt new file mode 100644 index 0000000..4a991f4 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+isValid.kt @@ -0,0 +1,13 @@ +package com.mrousavy.camera.extensions + +import android.hardware.camera2.CameraDevice + +val CameraDevice.isValid: Boolean + get() { + try { + this.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW) + return true + } catch (e: Throwable) { + return false + } + } diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CaptureRequest+setZoom.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CaptureRequest+setZoom.kt index 657886e..d097cbc 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CaptureRequest+setZoom.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CaptureRequest+setZoom.kt @@ -1,20 +1,18 @@ package com.mrousavy.camera.extensions -import android.hardware.camera2.CameraCharacteristics import android.hardware.camera2.CaptureRequest import android.os.Build -import android.util.Range +import com.mrousavy.camera.core.CameraDeviceDetails +import com.mrousavy.camera.types.HardwareLevel -fun CaptureRequest.Builder.setZoom(zoom: Float, cameraCharacteristics: CameraCharacteristics) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { - val zoomRange = cameraCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) ?: Range(1f, 1f) - val zoomClamped = zoomRange.clamp(zoom) +fun CaptureRequest.Builder.setZoom(zoom: Float, deviceDetails: CameraDeviceDetails) { + val zoomRange = deviceDetails.zoomRange + val zoomClamped = zoomRange.clamp(zoom) + + if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { this.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomClamped) } else { - val maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) - val zoomRange = Range(1f, maxZoom ?: 1f) - val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! - val zoomClamped = zoomRange.clamp(zoom) + val size = deviceDetails.activeSize this.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoomClamped)) } } diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/Point+rotatedBy.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/Point+rotatedBy.kt new file mode 100644 index 0000000..7e84e3f --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/Point+rotatedBy.kt @@ -0,0 +1,25 @@ +package com.mrousavy.camera.extensions + +import android.graphics.Point +import android.graphics.PointF +import android.util.Log +import android.util.Size +import com.mrousavy.camera.types.Orientation + +fun Point.rotatedBy(fromSize: Size, toSize: Size, fromOrientation: Orientation, toOrientation: Orientation): Point { + val differenceDegrees = (fromOrientation.toDegrees() + toOrientation.toDegrees()) % 360 + val difference = Orientation.fromRotationDegrees(differenceDegrees) + val normalizedPoint = PointF(this.x / fromSize.width.toFloat(), this.y / fromSize.height.toFloat()) + + val rotatedNormalizedPoint = when (difference) { + Orientation.PORTRAIT -> normalizedPoint + Orientation.PORTRAIT_UPSIDE_DOWN -> PointF(1 - normalizedPoint.x, 1 - normalizedPoint.y) + Orientation.LANDSCAPE_LEFT -> PointF(normalizedPoint.y, 1 - normalizedPoint.x) + Orientation.LANDSCAPE_RIGHT -> PointF(1 - normalizedPoint.y, normalizedPoint.x) + } + + val rotatedX = rotatedNormalizedPoint.x * toSize.width + val rotatedY = rotatedNormalizedPoint.y * toSize.height + Log.i("ROTATE", "$this -> $normalizedPoint -> $difference -> $rotatedX, $rotatedY") + return Point(rotatedX.toInt(), rotatedY.toInt()) +} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/RecordingSession+getRecommendedBitRate.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/RecordingSession+getRecommendedBitRate.kt index ed6cee2..d616a6e 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/RecordingSession+getRecommendedBitRate.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/RecordingSession+getRecommendedBitRate.kt @@ -4,9 +4,9 @@ import android.media.CamcorderProfile import android.media.MediaRecorder.VideoEncoder import android.os.Build import android.util.Log -import android.util.Size import com.mrousavy.camera.core.RecordingSession import com.mrousavy.camera.types.VideoCodec +import com.mrousavy.camera.utils.CamcorderProfileUtils import kotlin.math.abs data class RecommendedProfile( @@ -23,7 +23,7 @@ fun RecordingSession.getRecommendedBitRate(fps: Int, codec: VideoCodec, hdr: Boo val targetResolution = size val encoder = codec.toVideoEncoder() val bitDepth = if (hdr) 10 else 8 - val quality = findClosestCamcorderProfileQuality(cameraId, targetResolution) + val quality = CamcorderProfileUtils.findClosestCamcorderProfileQuality(cameraId, targetResolution, true) Log.i("CamcorderProfile", "Closest matching CamcorderProfile: $quality") var recommendedProfile: RecommendedProfile? = null @@ -75,39 +75,3 @@ fun RecordingSession.getRecommendedBitRate(fps: Int, codec: VideoCodec, hdr: Boo } return bitRate.toInt() } - -private fun getResolutionForCamcorderProfileQuality(camcorderProfile: Int): Int = - when (camcorderProfile) { - CamcorderProfile.QUALITY_QCIF -> 176 * 144 - CamcorderProfile.QUALITY_QVGA -> 320 * 240 - CamcorderProfile.QUALITY_CIF -> 352 * 288 - CamcorderProfile.QUALITY_VGA -> 640 * 480 - CamcorderProfile.QUALITY_480P -> 720 * 480 - CamcorderProfile.QUALITY_720P -> 1280 * 720 - CamcorderProfile.QUALITY_1080P -> 1920 * 1080 - CamcorderProfile.QUALITY_2K -> 2048 * 1080 - CamcorderProfile.QUALITY_QHD -> 2560 * 1440 - CamcorderProfile.QUALITY_2160P -> 3840 * 2160 - CamcorderProfile.QUALITY_4KDCI -> 4096 * 2160 - CamcorderProfile.QUALITY_8KUHD -> 7680 * 4320 - else -> throw Error("Invalid CamcorderProfile \"$camcorderProfile\"!") - } - -private fun findClosestCamcorderProfileQuality(cameraId: String, resolution: Size): Int { - // Iterate through all available CamcorderProfiles and find the one that matches the closest - val targetResolution = resolution.width * resolution.height - val cameraIdInt = cameraId.toIntOrNull() - - val profiles = (CamcorderProfile.QUALITY_QCIF..CamcorderProfile.QUALITY_8KUHD).filter { profile -> - if (cameraIdInt != null) { - return@filter CamcorderProfile.hasProfile(cameraIdInt, profile) - } else { - return@filter CamcorderProfile.hasProfile(profile) - } - } - val closestProfile = profiles.minBy { profile -> - val currentResolution = getResolutionForCamcorderProfileQuality(profile) - return@minBy abs(currentResolution - targetResolution) - } - return closestProfile -} diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/Size+Extensions.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/Size+Extensions.kt index b9664c5..b0699e9 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/Size+Extensions.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/Size+Extensions.kt @@ -2,7 +2,7 @@ package com.mrousavy.camera.extensions import android.util.Size import android.util.SizeF -import android.view.Surface +import com.mrousavy.camera.types.Orientation import kotlin.math.abs import kotlin.math.max import kotlin.math.min @@ -14,13 +14,10 @@ fun List.closestToOrMax(size: Size?): Size = this.maxBy { it.width * it.height } } -fun Size.rotated(surfaceRotation: Int): Size = - when (surfaceRotation) { - Surface.ROTATION_0 -> Size(width, height) - Surface.ROTATION_90 -> Size(height, width) - Surface.ROTATION_180 -> Size(width, height) - Surface.ROTATION_270 -> Size(height, width) - else -> Size(width, height) +fun Size.rotatedBy(orientation: Orientation): Size = + when (orientation) { + Orientation.PORTRAIT, Orientation.PORTRAIT_UPSIDE_DOWN -> this + Orientation.LANDSCAPE_LEFT, Orientation.LANDSCAPE_RIGHT -> Size(height, width) } val Size.bigger: Int diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/SurfaceHolder+resize.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/SurfaceHolder+resize.kt new file mode 100644 index 0000000..561f6fc --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/SurfaceHolder+resize.kt @@ -0,0 +1,41 @@ +package com.mrousavy.camera.extensions + +import android.util.Log +import android.view.SurfaceHolder +import androidx.annotation.UiThread +import kotlin.coroutines.resume +import kotlinx.coroutines.suspendCancellableCoroutine + +private const val TAG = "SurfaceHolder" + +@UiThread +suspend fun SurfaceHolder.resize(targetWidth: Int, targetHeight: Int) { + return suspendCancellableCoroutine { continuation -> + val currentSize = this.surfaceFrame + if (currentSize.width() == targetWidth && currentSize.height() == targetHeight) { + // Already in target size + continuation.resume(Unit) + return@suspendCancellableCoroutine + } + + Log.i(TAG, "Resizing SurfaceHolder to $targetWidth x $targetHeight...") + + val callback = object : SurfaceHolder.Callback { + override fun surfaceCreated(holder: SurfaceHolder) = Unit + override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { + if (width == targetWidth && height == targetHeight) { + holder.removeCallback(this) + Log.i(TAG, "Resized SurfaceHolder to $width x $height!") + continuation.resume(Unit) + } + } + override fun surfaceDestroyed(holder: SurfaceHolder) { + holder.removeCallback(this) + Log.e(TAG, "Failed to resize SurfaceHolder to $targetWidth x $targetHeight!") + continuation.cancel(Error("Tried to resize SurfaceView, but Surface has been destroyed!")) + } + } + this.addCallback(callback) + this.setFixedSize(targetWidth, targetHeight) + } +} diff --git a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java index 763b9cc..c8f6ecf 100644 --- a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java +++ b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java @@ -4,6 +4,7 @@ import android.hardware.HardwareBuffer; import android.media.Image; import android.os.Build; import com.facebook.proguard.annotations.DoNotStrip; +import com.mrousavy.camera.core.FrameInvalidError; import com.mrousavy.camera.core.HardwareBuffersNotAvailableError; import com.mrousavy.camera.types.PixelFormat; import com.mrousavy.camera.types.Orientation; @@ -23,42 +24,17 @@ public class Frame { this.isMirrored = isMirrored; } - public Image getImage() { - synchronized (this) { - Image img = image; - if (!getIsImageValid(img)) { - throw new RuntimeException("Frame is already closed! " + - "Are you trying to access the Image data outside of a Frame Processor's lifetime?\n" + - "- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n" + - "- If you want to do async processing, use `runAsync(...)` instead.\n" + - "- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`"); - } - return img; + private void assertIsValid() throws FrameInvalidError { + if (!getIsImageValid(image)) { + throw new FrameInvalidError(); } } - @SuppressWarnings("unused") - @DoNotStrip - public int getWidth() { - return getImage().getWidth(); - } - - @SuppressWarnings("unused") - @DoNotStrip - public int getHeight() { - return getImage().getHeight(); - } - - @SuppressWarnings("unused") - @DoNotStrip - public boolean getIsValid() { - return getIsImageValid(getImage()); - } - - private boolean getIsImageValid(Image image) { + private synchronized boolean getIsImageValid(Image image) { + if (refCount <= 0) return false; try { // will throw an exception if the image is already closed - synchronized (this) { image.getFormat(); } + image.getFormat(); // no exception thrown, image must still be valid. return true; } catch (IllegalStateException e) { @@ -67,78 +43,104 @@ public class Frame { } } + public synchronized Image getImage() { + return image; + } + @SuppressWarnings("unused") @DoNotStrip - public boolean getIsMirrored() { + public synchronized int getWidth() throws FrameInvalidError { + assertIsValid(); + return image.getWidth(); + } + + @SuppressWarnings("unused") + @DoNotStrip + public synchronized int getHeight() throws FrameInvalidError { + assertIsValid(); + return image.getHeight(); + } + + @SuppressWarnings("unused") + @DoNotStrip + public synchronized boolean getIsValid() throws FrameInvalidError { + assertIsValid(); + return getIsImageValid(image); + } + + @SuppressWarnings("unused") + @DoNotStrip + public synchronized boolean getIsMirrored() throws FrameInvalidError { + assertIsValid(); return isMirrored; } @SuppressWarnings("unused") @DoNotStrip - public long getTimestamp() { + public synchronized long getTimestamp() throws FrameInvalidError { + assertIsValid(); return timestamp; } @SuppressWarnings("unused") @DoNotStrip - public Orientation getOrientation() { + public synchronized Orientation getOrientation() throws FrameInvalidError { + assertIsValid(); return orientation; } @SuppressWarnings("unused") @DoNotStrip - public PixelFormat getPixelFormat() { - return PixelFormat.Companion.fromImageFormat(getImage().getFormat()); + public synchronized PixelFormat getPixelFormat() throws FrameInvalidError { + assertIsValid(); + return PixelFormat.Companion.fromImageFormat(image.getFormat()); } @SuppressWarnings("unused") @DoNotStrip - public int getPlanesCount() { - return getImage().getPlanes().length; + public synchronized int getPlanesCount() throws FrameInvalidError { + assertIsValid(); + return image.getPlanes().length; } @SuppressWarnings("unused") @DoNotStrip - public int getBytesPerRow() { - return getImage().getPlanes()[0].getRowStride(); + public synchronized int getBytesPerRow() throws FrameInvalidError { + assertIsValid(); + return image.getPlanes()[0].getRowStride(); } @SuppressWarnings("unused") @DoNotStrip - public Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError { + private Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError, FrameInvalidError { return getHardwareBuffer(); } - public HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError { + public synchronized HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError, FrameInvalidError { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { throw new HardwareBuffersNotAvailableError(); } - return getImage().getHardwareBuffer(); + assertIsValid(); + return image.getHardwareBuffer(); } @SuppressWarnings("unused") @DoNotStrip - public void incrementRefCount() { - synchronized (this) { - refCount++; - } + public synchronized void incrementRefCount() { + refCount++; } @SuppressWarnings("unused") @DoNotStrip - public void decrementRefCount() { - synchronized (this) { - refCount--; - if (refCount <= 0) { - // If no reference is held on this Image, close it. - close(); - } + public synchronized void decrementRefCount() { + refCount--; + if (refCount <= 0) { + // If no reference is held on this Image, close it. + close(); } } - private void close() { - synchronized (this) { - image.close(); - } + private synchronized void close() { + image.close(); } } diff --git a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/FrameProcessor.java b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/FrameProcessor.java index 09e1b29..80c3a8d 100644 --- a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/FrameProcessor.java +++ b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/FrameProcessor.java @@ -21,6 +21,8 @@ public final class FrameProcessor { @Keep private final HybridData mHybridData; + @DoNotStrip + @Keep public FrameProcessor(HybridData hybridData) { mHybridData = hybridData; } diff --git a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt index db41e55..81bd1af 100644 --- a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt +++ b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt @@ -14,7 +14,7 @@ import com.mrousavy.camera.core.ViewNotFoundError import java.lang.ref.WeakReference @Suppress("KotlinJniMissingFunction") // we use fbjni. -class VisionCameraProxy(context: ReactApplicationContext) { +class VisionCameraProxy(private val reactContext: ReactApplicationContext) { companion object { const val TAG = "VisionCameraProxy" } @@ -24,6 +24,8 @@ class VisionCameraProxy(context: ReactApplicationContext) { private var mHybridData: HybridData private var mContext: WeakReference private var mScheduler: VisionCameraScheduler + val context: ReactApplicationContext + get() = reactContext init { val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl diff --git a/package/android/src/main/java/com/mrousavy/camera/types/CodeScannerOptions.kt b/package/android/src/main/java/com/mrousavy/camera/types/CodeScannerOptions.kt index ee47657..618f4aa 100644 --- a/package/android/src/main/java/com/mrousavy/camera/types/CodeScannerOptions.kt +++ b/package/android/src/main/java/com/mrousavy/camera/types/CodeScannerOptions.kt @@ -3,20 +3,12 @@ package com.mrousavy.camera.types import com.facebook.react.bridge.ReadableMap import com.mrousavy.camera.core.InvalidTypeScriptUnionError -class CodeScannerOptions(map: ReadableMap) { - val codeTypes: List - - init { - val codeTypes = map.getArray("codeTypes")?.toArrayList() ?: throw InvalidTypeScriptUnionError("codeScanner", map.toString()) - this.codeTypes = codeTypes.map { - return@map CodeType.fromUnionValue(it as String) +data class CodeScannerOptions(val codeTypes: List) { + companion object { + fun fromJSValue(value: ReadableMap): CodeScannerOptions { + val jsCodeTypes = value.getArray("codeTypes") ?: throw InvalidTypeScriptUnionError("codeScanner", value.toString()) + val codeTypes = jsCodeTypes.toArrayList().map { CodeType.fromUnionValue(it as String) } + return CodeScannerOptions(codeTypes) } } - - override fun equals(other: Any?): Boolean { - if (other !is CodeScannerOptions) return false - return codeTypes.size == other.codeTypes.size && codeTypes.containsAll(other.codeTypes) - } - - override fun hashCode(): Int = codeTypes.hashCode() } diff --git a/package/android/src/main/java/com/mrousavy/camera/types/CodeType.kt b/package/android/src/main/java/com/mrousavy/camera/types/CodeType.kt index 11d137d..c279087 100644 --- a/package/android/src/main/java/com/mrousavy/camera/types/CodeType.kt +++ b/package/android/src/main/java/com/mrousavy/camera/types/CodeType.kt @@ -13,6 +13,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { EAN_8("ean-8"), ITF("itf"), UPC_E("upc-e"), + UPC_A("upc-a"), QR("qr"), PDF_417("pdf-417"), AZTEC("aztec"), @@ -29,6 +30,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { EAN_8 -> Barcode.FORMAT_EAN_8 ITF -> Barcode.FORMAT_ITF UPC_E -> Barcode.FORMAT_UPC_E + UPC_A -> Barcode.FORMAT_UPC_A QR -> Barcode.FORMAT_QR_CODE PDF_417 -> Barcode.FORMAT_PDF417 AZTEC -> Barcode.FORMAT_AZTEC @@ -47,6 +49,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { Barcode.FORMAT_EAN_8 -> EAN_8 Barcode.FORMAT_ITF -> ITF Barcode.FORMAT_UPC_E -> UPC_E + Barcode.FORMAT_UPC_A -> UPC_A Barcode.FORMAT_QR_CODE -> QR Barcode.FORMAT_PDF417 -> PDF_417 Barcode.FORMAT_AZTEC -> AZTEC @@ -64,6 +67,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { "ean-8" -> EAN_8 "itf" -> ITF "upc-e" -> UPC_E + "upc-a" -> UPC_A "qr" -> QR "pdf-417" -> PDF_417 "aztec" -> AZTEC diff --git a/package/android/src/main/java/com/mrousavy/camera/types/Events.kt b/package/android/src/main/java/com/mrousavy/camera/types/Events.kt new file mode 100644 index 0000000..e34c9f5 --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/types/Events.kt @@ -0,0 +1,36 @@ +package com.mrousavy.camera.types + +import com.facebook.react.bridge.Arguments +import com.facebook.react.bridge.WritableMap +import com.facebook.react.uimanager.events.Event + +class CameraInitializedEvent(surfaceId: Int, viewId: Int) : Event(surfaceId, viewId) { + override fun getEventName() = "cameraInitialized" + override fun getEventData(): WritableMap = Arguments.createMap() +} + +class CameraStartedEvent(surfaceId: Int, viewId: Int) : Event(surfaceId, viewId) { + override fun getEventName() = "cameraStarted" + override fun getEventData(): WritableMap = Arguments.createMap() +} + +class CameraStoppedEvent(surfaceId: Int, viewId: Int) : Event(surfaceId, viewId) { + override fun getEventName() = "cameraStopped" + override fun getEventData(): WritableMap = Arguments.createMap() +} + +class CameraErrorEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : Event(surfaceId, viewId) { + override fun getEventName() = "cameraError" + override fun getEventData() = data +} + +class CameraViewReadyEvent(surfaceId: Int, viewId: Int) : Event(surfaceId, viewId) { + override fun getEventName() = "cameraViewReady" + override fun getEventData(): WritableMap = Arguments.createMap() +} + +class CameraCodeScannedEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : + Event(surfaceId, viewId) { + override fun getEventName() = "cameraCodeScanned" + override fun getEventData() = data +} diff --git a/package/android/src/main/java/com/mrousavy/camera/types/HardwareLevel.kt b/package/android/src/main/java/com/mrousavy/camera/types/HardwareLevel.kt index 2df2049..ca34ea6 100644 --- a/package/android/src/main/java/com/mrousavy/camera/types/HardwareLevel.kt +++ b/package/android/src/main/java/com/mrousavy/camera/types/HardwareLevel.kt @@ -9,6 +9,19 @@ enum class HardwareLevel(override val unionValue: String) : JSUnionValue { FULL("full"), LEVEL_3("full"); + private val rank: Int + get() { + return when (this) { + LEGACY -> 0 + LIMITED -> 1 + EXTERNAL -> 1 + FULL -> 2 + LEVEL_3 -> 3 + } + } + + fun isAtLeast(level: HardwareLevel): Boolean = this.rank >= level.rank + companion object { fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel = when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { diff --git a/package/android/src/main/java/com/mrousavy/camera/types/Orientation.kt b/package/android/src/main/java/com/mrousavy/camera/types/Orientation.kt index 5d5051b..940986a 100644 --- a/package/android/src/main/java/com/mrousavy/camera/types/Orientation.kt +++ b/package/android/src/main/java/com/mrousavy/camera/types/Orientation.kt @@ -1,6 +1,6 @@ package com.mrousavy.camera.types -import android.hardware.camera2.CameraCharacteristics +import com.mrousavy.camera.core.CameraDeviceDetails enum class Orientation(override val unionValue: String) : JSUnionValue { PORTRAIT("portrait"), @@ -19,23 +19,22 @@ enum class Orientation(override val unionValue: String) : JSUnionValue { fun toDegrees(): Int = when (this) { PORTRAIT -> 0 - LANDSCAPE_RIGHT -> 90 + LANDSCAPE_LEFT -> 90 PORTRAIT_UPSIDE_DOWN -> 180 - LANDSCAPE_LEFT -> 270 + LANDSCAPE_RIGHT -> 270 } - fun toSensorRelativeOrientation(cameraCharacteristics: CameraCharacteristics): Orientation { - val sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! - + fun toSensorRelativeOrientation(deviceDetails: CameraDeviceDetails): Orientation { // Convert target orientation to rotation degrees (0, 90, 180, 270) var rotationDegrees = this.toDegrees() // Reverse device orientation for front-facing cameras - val facingFront = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT - if (facingFront) rotationDegrees = -rotationDegrees + if (deviceDetails.lensFacing == LensFacing.FRONT) { + rotationDegrees = -rotationDegrees + } // Rotate sensor rotation by target rotation - val newRotationDegrees = (sensorOrientation + rotationDegrees + 360) % 360 + val newRotationDegrees = (deviceDetails.sensorOrientation.toDegrees() + rotationDegrees + 360) % 360 return fromRotationDegrees(newRotationDegrees) } @@ -52,9 +51,9 @@ enum class Orientation(override val unionValue: String) : JSUnionValue { fun fromRotationDegrees(rotationDegrees: Int): Orientation = when (rotationDegrees) { - in 45..135 -> LANDSCAPE_RIGHT + in 45..135 -> LANDSCAPE_LEFT in 135..225 -> PORTRAIT_UPSIDE_DOWN - in 225..315 -> LANDSCAPE_LEFT + in 225..315 -> LANDSCAPE_RIGHT else -> PORTRAIT } } diff --git a/package/android/src/main/java/com/mrousavy/camera/types/ResizeMode.kt b/package/android/src/main/java/com/mrousavy/camera/types/ResizeMode.kt index b3fa76d..8d03d17 100644 --- a/package/android/src/main/java/com/mrousavy/camera/types/ResizeMode.kt +++ b/package/android/src/main/java/com/mrousavy/camera/types/ResizeMode.kt @@ -1,5 +1,7 @@ package com.mrousavy.camera.types +import com.mrousavy.camera.core.InvalidTypeScriptUnionError + enum class ResizeMode(override val unionValue: String) : JSUnionValue { COVER("cover"), CONTAIN("contain"); @@ -9,7 +11,7 @@ enum class ResizeMode(override val unionValue: String) : JSUnionValue { when (unionValue) { "cover" -> COVER "contain" -> CONTAIN - else -> COVER + else -> throw InvalidTypeScriptUnionError("resizeMode", unionValue) } } } diff --git a/package/android/src/main/java/com/mrousavy/camera/types/VideoStabilizationMode.kt b/package/android/src/main/java/com/mrousavy/camera/types/VideoStabilizationMode.kt index 7d1ff25..87de458 100644 --- a/package/android/src/main/java/com/mrousavy/camera/types/VideoStabilizationMode.kt +++ b/package/android/src/main/java/com/mrousavy/camera/types/VideoStabilizationMode.kt @@ -13,21 +13,6 @@ enum class VideoStabilizationMode(override val unionValue: String) : JSUnionValu CINEMATIC("cinematic"), CINEMATIC_EXTENDED("cinematic-extended"); - fun toDigitalStabilizationMode(): Int = - when (this) { - OFF -> CONTROL_VIDEO_STABILIZATION_MODE_OFF - STANDARD -> CONTROL_VIDEO_STABILIZATION_MODE_ON - CINEMATIC -> 2 // TODO: CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION - else -> CONTROL_VIDEO_STABILIZATION_MODE_OFF - } - - fun toOpticalStabilizationMode(): Int = - when (this) { - OFF -> LENS_OPTICAL_STABILIZATION_MODE_OFF - CINEMATIC_EXTENDED -> LENS_OPTICAL_STABILIZATION_MODE_ON - else -> LENS_OPTICAL_STABILIZATION_MODE_OFF - } - companion object : JSUnionValue.Companion { override fun fromUnionValue(unionValue: String?): VideoStabilizationMode = when (unionValue) { diff --git a/package/android/src/main/java/com/mrousavy/camera/utils/CamcorderProfileUtils.kt b/package/android/src/main/java/com/mrousavy/camera/utils/CamcorderProfileUtils.kt new file mode 100644 index 0000000..f1dc64e --- /dev/null +++ b/package/android/src/main/java/com/mrousavy/camera/utils/CamcorderProfileUtils.kt @@ -0,0 +1,101 @@ +package com.mrousavy.camera.utils + +import android.media.CamcorderProfile +import android.os.Build +import android.util.Size +import kotlin.math.abs + +class CamcorderProfileUtils { + companion object { + private fun getResolutionForCamcorderProfileQuality(camcorderProfile: Int): Int = + when (camcorderProfile) { + CamcorderProfile.QUALITY_QCIF -> 176 * 144 + CamcorderProfile.QUALITY_QVGA -> 320 * 240 + CamcorderProfile.QUALITY_CIF -> 352 * 288 + CamcorderProfile.QUALITY_VGA -> 640 * 480 + CamcorderProfile.QUALITY_480P -> 720 * 480 + CamcorderProfile.QUALITY_720P -> 1280 * 720 + CamcorderProfile.QUALITY_1080P -> 1920 * 1080 + CamcorderProfile.QUALITY_2K -> 2048 * 1080 + CamcorderProfile.QUALITY_QHD -> 2560 * 1440 + CamcorderProfile.QUALITY_2160P -> 3840 * 2160 + CamcorderProfile.QUALITY_4KDCI -> 4096 * 2160 + CamcorderProfile.QUALITY_8KUHD -> 7680 * 4320 + else -> throw Error("Invalid CamcorderProfile \"$camcorderProfile\"!") + } + + fun findClosestCamcorderProfileQuality(cameraId: String, resolution: Size, allowLargerSize: Boolean): Int { + // Iterate through all available CamcorderProfiles and find the one that matches the closest + val targetResolution = resolution.width * resolution.height + val cameraIdInt = cameraId.toIntOrNull() + + var profiles = (CamcorderProfile.QUALITY_QCIF..CamcorderProfile.QUALITY_8KUHD).filter { profile -> + if (cameraIdInt != null) { + return@filter CamcorderProfile.hasProfile(cameraIdInt, profile) + } else { + return@filter CamcorderProfile.hasProfile(profile) + } + } + if (!allowLargerSize) { + profiles = profiles.filter { profile -> + val currentResolution = getResolutionForCamcorderProfileQuality(profile) + return@filter currentResolution <= targetResolution + } + } + val closestProfile = profiles.minBy { profile -> + val currentResolution = getResolutionForCamcorderProfileQuality(profile) + return@minBy abs(currentResolution - targetResolution) + } + return closestProfile + } + + fun getMaximumVideoSize(cameraId: String): Size? { + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH) + if (profiles != null) { + val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height } + if (largestProfile != null) { + return Size(largestProfile.width, largestProfile.height) + } + } + } + + val cameraIdInt = cameraId.toIntOrNull() + if (cameraIdInt != null) { + val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH) + return Size(profile.videoFrameWidth, profile.videoFrameHeight) + } + + return null + } catch (e: Throwable) { + // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. + return null + } + } + + fun getMaximumFps(cameraId: String, size: Size): Int? { + try { + val quality = findClosestCamcorderProfileQuality(cameraId, size, false) + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + val profiles = CamcorderProfile.getAll(cameraId, quality) + if (profiles != null) { + return profiles.videoProfiles.maxOf { profile -> profile.frameRate } + } + } + + val cameraIdInt = cameraId.toIntOrNull() + if (cameraIdInt != null) { + val profile = CamcorderProfile.get(cameraIdInt, quality) + return profile.videoFrameRate + } + + return null + } catch (e: Throwable) { + // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. + return null + } + } + } +} diff --git a/package/example/README.md b/package/example/README.md index 1f87c4c..413a28f 100644 --- a/package/example/README.md +++ b/package/example/README.md @@ -30,7 +30,7 @@ To try the playground out for yourself, run the following commands: ```sh git clone https://github.com/mrousavy/react-native-vision-camera -cd react-native-vision-camera +cd react-native-vision-camera/package yarn bootstrap ``` diff --git a/package/example/ios/Podfile.lock b/package/example/ios/Podfile.lock index c6e827a..34be125 100644 --- a/package/example/ios/Podfile.lock +++ b/package/example/ios/Podfile.lock @@ -484,7 +484,7 @@ PODS: - libwebp (~> 1.0) - SDWebImage/Core (~> 5.10) - SocketRocket (0.6.1) - - VisionCamera (3.8.2): + - VisionCamera (3.9.0-beta.6): - React - React-callinvoker - React-Core @@ -724,9 +724,9 @@ SPEC CHECKSUMS: SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 - VisionCamera: edbcd00e27a438b2228f67823e2b8d15a189065f + VisionCamera: 33c90675adf75528199f840f81dfbe74a2fe6c3f Yoga: 4c3aa327e4a6a23eeacd71f61c81df1bcdf677d5 PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb -COCOAPODS: 1.11.3 +COCOAPODS: 1.14.3 diff --git a/package/ios/Core/CameraError.swift b/package/ios/Core/CameraError.swift index feb2e9b..eaef5b1 100644 --- a/package/ios/Core/CameraError.swift +++ b/package/ios/Core/CameraError.swift @@ -93,7 +93,7 @@ enum DeviceError: String { case .lowLightBoostNotSupported: return "The currently selected camera device does not support low-light boost! Select a device where `device.supportsLowLightBoost` is true." case .focusNotSupported: - return "The currently selected camera device does not support focussing!" + return "The currently selected camera device does not support focusing!" case .microphoneUnavailable: return "The microphone was unavailable." case .notAvailableOnSimulator: diff --git a/package/ios/Core/CameraSession.swift b/package/ios/Core/CameraSession.swift index 92caaac..7952a0e 100644 --- a/package/ios/Core/CameraSession.swift +++ b/package/ios/Core/CameraSession.swift @@ -109,6 +109,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC try lambda(config) } catch { self.onConfigureError(error) + return } let difference = CameraConfiguration.Difference(between: self.configuration, and: config) @@ -117,51 +118,64 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC do { // If needed, configure the AVCaptureSession (inputs, outputs) if difference.isSessionConfigurationDirty { - try self.withSessionLock { - // 1. Update input device - if difference.inputChanged { - try self.configureDevice(configuration: config) - } - // 2. Update outputs - if difference.outputsChanged { - try self.configureOutputs(configuration: config) - } - // 3. Update Video Stabilization - if difference.videoStabilizationChanged { - self.configureVideoStabilization(configuration: config) - } - // 4. Update output orientation - if difference.orientationChanged { - self.configureOrientation(configuration: config) - } + self.captureSession.beginConfiguration() + + // 1. Update input device + if difference.inputChanged { + try self.configureDevice(configuration: config) } + // 2. Update outputs + if difference.outputsChanged { + try self.configureOutputs(configuration: config) + } + // 3. Update Video Stabilization + if difference.videoStabilizationChanged { + self.configureVideoStabilization(configuration: config) + } + // 4. Update output orientation + if difference.orientationChanged { + self.configureOrientation(configuration: config) + } + } + + guard let device = self.videoDeviceInput?.device else { + throw CameraError.device(.noDevice) } // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) if difference.isDeviceConfigurationDirty { - try self.withDeviceLock { device in - // 4. Configure format - if difference.formatChanged { - try self.configureFormat(configuration: config, device: device) - } - // 5. After step 2. and 4., we also need to configure the PixelFormat. - // This needs to be done AFTER we updated the `format`, as this controls the supported PixelFormats. - if difference.outputsChanged || difference.formatChanged { - try self.configurePixelFormat(configuration: config) - } - // 6. Configure side-props (fps, lowLightBoost) - if difference.sidePropsChanged { - try self.configureSideProps(configuration: config, device: device) - } - // 7. Configure zoom - if difference.zoomChanged { - self.configureZoom(configuration: config, device: device) - } - // 8. Configure exposure bias - if difference.exposureChanged { - self.configureExposure(configuration: config, device: device) - } + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() } + + // 4. Configure format + if difference.formatChanged { + try self.configureFormat(configuration: config, device: device) + } + // 5. After step 2. and 4., we also need to configure the PixelFormat. + // This needs to be done AFTER we updated the `format`, as this controls the supported PixelFormats. + if difference.outputsChanged || difference.formatChanged { + try self.configurePixelFormat(configuration: config) + } + // 6. Configure side-props (fps, lowLightBoost) + if difference.sidePropsChanged { + try self.configureSideProps(configuration: config, device: device) + } + // 7. Configure zoom + if difference.zoomChanged { + self.configureZoom(configuration: config, device: device) + } + // 8. Configure exposure bias + if difference.exposureChanged { + self.configureExposure(configuration: config, device: device) + } + } + + if difference.isSessionConfigurationDirty { + // We commit the session config updates AFTER the device config, + // that way we can also batch those changes into one update instead of doing two updates. + self.captureSession.commitConfiguration() } // 9. Start or stop the session if needed @@ -169,9 +183,11 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC // 10. Enable or disable the Torch if needed (requires session to be running) if difference.torchChanged { - try self.withDeviceLock { device in - try self.configureTorch(configuration: config, device: device) + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() } + try self.configureTorch(configuration: config, device: device) } // Notify about Camera initialization @@ -206,41 +222,6 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC } } - /** - Runs the given [lambda] under an AVCaptureSession configuration lock (`beginConfiguration()`) - */ - private func withSessionLock(_ lambda: () throws -> Void) throws { - // Lock Capture Session for configuration - ReactLogger.log(level: .info, message: "Beginning CameraSession configuration...") - captureSession.beginConfiguration() - defer { - // Unlock Capture Session again and submit configuration to Hardware - self.captureSession.commitConfiguration() - ReactLogger.log(level: .info, message: "Committed CameraSession configuration!") - } - - // Call lambda - try lambda() - } - - /** - Runs the given [lambda] under an AVCaptureDevice configuration lock (`lockForConfiguration()`) - */ - private func withDeviceLock(_ lambda: (_ device: AVCaptureDevice) throws -> Void) throws { - guard let device = videoDeviceInput?.device else { - throw CameraError.session(.cameraNotReady) - } - ReactLogger.log(level: .info, message: "Beginning CaptureDevice configuration...") - try device.lockForConfiguration() - defer { - device.unlockForConfiguration() - ReactLogger.log(level: .info, message: "Committed CaptureDevice configuration!") - } - - // Call lambda with Device - try lambda(device) - } - /** Starts or stops the CaptureSession if needed (`isActive`) */ diff --git a/package/ios/Parsers/AVMetadataObject.ObjectType+descriptor.swift b/package/ios/Parsers/AVMetadataObject.ObjectType+descriptor.swift index e74838b..0e22bb7 100644 --- a/package/ios/Parsers/AVMetadataObject.ObjectType+descriptor.swift +++ b/package/ios/Parsers/AVMetadataObject.ObjectType+descriptor.swift @@ -40,6 +40,9 @@ extension AVMetadataObject.ObjectType { case "upc-e": self = .upce return + case "upc-a": + self = .ean13 + return case "qr": self = .qr return diff --git a/package/package.json b/package/package.json index f5eb461..e750ac2 100644 --- a/package/package.json +++ b/package/package.json @@ -1,6 +1,6 @@ { "name": "react-native-vision-camera", - "version": "3.8.2", + "version": "3.9.2", "description": "A powerful, high-performance React Native Camera library.", "main": "lib/commonjs/index", "module": "lib/module/index", @@ -49,26 +49,33 @@ "postpack": "rm ./README.md" }, "keywords": [ - "react-native", - "ios", - "android", - "camera", - "vision", - "native", - "module", "react", + "native", + "camera", + "react-native", + "react-native-camera", + "vision", + "javascript", + "typescript", + "android", + "ios", + "library", + "instagram", + "snapchat", "ai", - "ar", - "qr", - "qr-code", - "barcode", "scanner", + "qrcode", + "barcode", + "qr-code", + "jsi", + "worklet", + "module", "frame", "processing", "realtime" ], "repository": "https://github.com/mrousavy/react-native-vision-camera", - "author": "Marc Rousavy (https://github.com/mrousavy)", + "author": "Marc Rousavy (https://github.com/mrousavy)", "license": "MIT", "bugs": { "url": "https://github.com/mrousavy/react-native-vision-camera/issues" @@ -159,5 +166,6 @@ } ] ] - } + }, + "packageManager": "yarn@1.22.19+sha1.4ba7fc5c6e704fce2066ecbfb0b0d8976fe62447" } diff --git a/package/scripts/clang-format.sh b/package/scripts/clang-format.sh index 9c98c17..1d6cea4 100755 --- a/package/scripts/clang-format.sh +++ b/package/scripts/clang-format.sh @@ -5,5 +5,6 @@ if which clang-format >/dev/null; then clang-format -style=file:./cpp/.clang-format -i "$file" done else - echo "warning: clang-format not installed, install with 'brew install clang-format' (or manually from https://clang.llvm.org/docs/ClangFormat.html)" + echo "error: clang-format not installed, install with 'brew install clang-format' (or manually from https://clang.llvm.org/docs/ClangFormat.html)" + exit 1 fi diff --git a/package/scripts/ktlint.sh b/package/scripts/ktlint.sh index 3236451..7a8a263 100755 --- a/package/scripts/ktlint.sh +++ b/package/scripts/ktlint.sh @@ -3,5 +3,6 @@ if which ktlint >/dev/null; then cd android && ktlint --color --relative --editorconfig=./.editorconfig -F ./**/*.kt* else - echo "warning: KTLint not installed, install with 'brew install ktlint' (or manually from https://github.com/pinterest/ktlint)" + echo "error: KTLint not installed, install with 'brew install ktlint' (or manually from https://github.com/pinterest/ktlint)" + exit 1 fi diff --git a/package/scripts/swiftformat.sh b/package/scripts/swiftformat.sh index a7b628f..a06751d 100755 --- a/package/scripts/swiftformat.sh +++ b/package/scripts/swiftformat.sh @@ -3,5 +3,6 @@ if which swiftformat >/dev/null; then cd ios && swiftformat --quiet . else - echo "warning: SwiftFormat not installed, install with 'brew install swiftformat' (or manually from https://github.com/nicklockwood/SwiftFormat)" + echo "error: SwiftFormat not installed, install with 'brew install swiftformat' (or manually from https://github.com/nicklockwood/SwiftFormat)" + exit 1 fi diff --git a/package/scripts/swiftlint.sh b/package/scripts/swiftlint.sh index e355098..42efed7 100755 --- a/package/scripts/swiftlint.sh +++ b/package/scripts/swiftlint.sh @@ -3,5 +3,6 @@ if which swiftlint >/dev/null; then cd ios && swiftlint --quiet --fix && swiftlint --quiet else - echo "warning: SwiftLint not installed, install with 'brew install swiftlint' (or manually from https://github.com/realm/SwiftLint)" + echo "error: SwiftLint not installed, install with 'brew install swiftlint' (or manually from https://github.com/realm/SwiftLint)" + exit 1 fi diff --git a/package/src/Camera.tsx b/package/src/Camera.tsx index 441b414..1abaaa9 100644 --- a/package/src/Camera.tsx +++ b/package/src/Camera.tsx @@ -473,7 +473,8 @@ export class Camera extends React.PureComponent { // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition if (device == null) { - throw new Error( + throw new CameraRuntimeError( + 'device/no-device', 'Camera: `device` is null! Select a valid Camera device. See: https://mrousavy.com/react-native-vision-camera/docs/guides/devices', ) } diff --git a/package/src/CameraError.ts b/package/src/CameraError.ts index 94b205a..cca7c75 100644 --- a/package/src/CameraError.ts +++ b/package/src/CameraError.ts @@ -25,6 +25,7 @@ export type SessionError = | 'session/camera-cannot-be-opened' | 'session/camera-has-been-disconnected' | 'session/audio-in-use-by-other-app' + | 'session/no-outputs' | 'session/audio-session-failed-to-activate' export type CodeScannerError = | 'code-scanner/not-compatible-with-outputs' @@ -40,7 +41,10 @@ export type CaptureError = | 'capture/recorder-error' | 'capture/video-not-enabled' | 'capture/photo-not-enabled' + | 'capture/frame-invalid' | 'capture/aborted' + | 'capture/focus-canceled' + | 'capture/timed-out' | 'capture/unknown' export type SystemError = | 'system/camera-module-not-found' diff --git a/package/src/CameraProps.ts b/package/src/CameraProps.ts index 83ed1f6..eea1fdd 100644 --- a/package/src/CameraProps.ts +++ b/package/src/CameraProps.ts @@ -183,10 +183,29 @@ export interface CameraProps extends ViewProps { * * @platform iOS * @default - * - true // if video={true} and frameProcessor={undefined} + * - true // if frameProcessor={undefined} * - false // otherwise */ enableBufferCompression?: boolean + /** + * Enables or disables GPU-sampled buffers for the video stream. This only takes effect when using a {@linkcode frameProcessor}. + * + * When recording a Video ({@linkcode video}) while a Frame Processor is running ({@linkcode frameProcessor}), + * the {@linkcode Frame | Frames} will need to be forwarded to the Media Encoder. + * + * - When `enableGpuBuffers` is `false`, the Video Pipeline will use CPU buffers causing an additional copy + * from the Frame Processor to the Media Encoder, which potentially results in increased latency. + * - When `enableGpuBuffers` is `true`, the Video Pipeline will use shared GPU buffers which greatly increases + * it's efficiency as an additional buffer copy is avoided. + * (See [`USAGE_GPU_SAMPLED_IMAGE`](https://developer.android.com/reference/android/hardware/HardwareBuffer#USAGE_GPU_SAMPLED_IMAGE)) + * + * In general, it is recommended to set this to `true` if possible, as this can increase performance and efficiency of the Video Pipeline. + * + * @experimental This is an experimental feature flag, use at your own risk. Some devices (especially Samsungs) may crash when trying to use GPU buffers. + * @platform Android (API 29+) + * @default false + */ + enableGpuBuffers?: boolean /** * Enables or disables low-light boost on this camera device. * @@ -227,6 +246,7 @@ export interface CameraProps extends ViewProps { * * Dual Device fusion for greater detail ([`isDualCameraDualPhotoDeliveryEnabled`](https://developer.apple.com/documentation/avfoundation/avcapturephotosettings/2873917-isdualcameradualphotodeliveryena)) * * Sets the maximum quality prioritization to `.quality` ([`maxPhotoQualityPrioritization`](https://developer.apple.com/documentation/avfoundation/avcapturephotooutput/3182995-maxphotoqualityprioritization)) * + * @platform iOS * @default false */ enableHighQualityPhotos?: boolean diff --git a/package/src/CodeScanner.ts b/package/src/CodeScanner.ts index 1ad965d..8511d5a 100644 --- a/package/src/CodeScanner.ts +++ b/package/src/CodeScanner.ts @@ -12,6 +12,7 @@ export type CodeType = | 'ean-8' | 'itf' | 'upc-e' + | 'upc-a' | 'qr' | 'pdf-417' | 'aztec' diff --git a/package/src/PhotoFile.ts b/package/src/PhotoFile.ts index 4c7e132..10d6f5b 100644 --- a/package/src/PhotoFile.ts +++ b/package/src/PhotoFile.ts @@ -44,6 +44,14 @@ export interface TakePhotoOptions { * @default true */ enableShutterSound?: boolean + /** + * Whether to run the pre-capture sequence to properly lock AF, AE and AWB values. + * Enabling this results in greater photos, but might not work on some devices. + * + * @platform Android + * @default false + */ + enablePrecapture?: boolean } /** diff --git a/package/src/devices/Templates.ts b/package/src/devices/Templates.ts index 3a1df24..274c419 100644 --- a/package/src/devices/Templates.ts +++ b/package/src/devices/Templates.ts @@ -1,9 +1,17 @@ import { Dimensions } from 'react-native' import { FormatFilter } from './getCameraFormat' -type TTemplates = { - [key: string]: FormatFilter[] -} +type PredefinedTemplates = + | 'Video' + | 'Video60Fps' + | 'VideoSlowMotion' + | 'VideoStabilized' + | 'Photo' + | 'PhotoPortrait' + | 'FrameProcessingYUV' + | 'FrameProcessingRGB' + | 'Snapchat' + | 'Instagram' const SnapchatResolution = { width: 1920, height: 1080 } const InstagramResolution = { width: 3840, height: 2160 } @@ -16,7 +24,7 @@ const ScreenAspectRatio = Dimensions.get('window').height / Dimensions.get('wind * const format = useCameraFormat(device, Templates.Snapchat) * ``` */ -export const Templates: TTemplates = { +export const Templates: Record = { /** * Highest resolution video recordings (e.g. 4k) */ diff --git a/tea.yaml b/tea.yaml new file mode 100644 index 0000000..570c683 --- /dev/null +++ b/tea.yaml @@ -0,0 +1,6 @@ +# https://tea.xyz/what-is-this-file +--- +version: 1.0.0 +codeOwners: + - '0xcF3c286e7cDED19D87f61E85B3370283C885bA88' +quorum: 1