Compare commits
	
		
			78 Commits
		
	
	
		
			eyenov/chu
			...
			031aa9d43a
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 031aa9d43a | ||
| fcf5fe70f3 | |||
|  | 3a20c44a31 | ||
| 0329e7976d | |||
| 7c162fecb1 | |||
| b28a152471 | |||
| 65021b895a | |||
|  | 83168044a6 | ||
|  | 60925fc816 | ||
|  | d731fe491d | ||
|  | ebe04b246c | ||
|  | 97941a919f | ||
|  | 8e1f03907b | ||
|  | 3f1a7c9e32 | ||
|  | e8dd1e0b2e | ||
|  | bca9472ab8 | ||
|  | 79ebae56f3 | ||
|  | 86637ac112 | ||
|  | 8225ac1aff | ||
|  | 83b852acd0 | ||
|  | 5ab7b291db | ||
|  | eb57b3877c | ||
|  | 369cb4a043 | ||
|  | fabf019f66 | ||
|  | 4c159aff61 | ||
|  | 55992bb954 | ||
|  | 7ac6f4d008 | ||
|  | 9af6e61dc8 | ||
|  | 2c52fb01e4 | ||
|  | f6b7163660 | ||
|  | 2c976d8ccd | ||
|  | 147aff8683 | ||
|  | bda43d3984 | ||
|  | b105de0194 | ||
|  | 57c6431353 | ||
|  | aad7d2b8b3 | ||
|  | e6c12e2ed2 | ||
|  | 31754eb74c | ||
|  | 129e21f14e | ||
|  | 4168d8f752 | ||
|  | bcd12649e2 | ||
|  | c5646ca1e2 | ||
|  | 83c0184796 | ||
|  | 5df5ca9adf | ||
|  | 21042048ae | ||
|  | a7701c8c9c | ||
|  | 37398cc909 | ||
|  | 61b2f7dd4a | ||
|  | 14daaaaf9d | ||
|  | 1a0bd8f7c2 | ||
|  | 478688529b | ||
|  | 3699ccde94 | ||
|  | ad33dd91b1 | ||
|  | 0130085376 | ||
|  | ec7ce36d25 | ||
|  | b7e24c444e | ||
|  | a2a2e94865 | ||
|  | 1011c3f039 | ||
|  | 24f43efa35 | ||
|  | 0ed3aed48a | ||
|  | b3a88278de | ||
|  | 919e6c9fe8 | ||
|  | fb1d82ad9a | ||
|  | fce6616964 | ||
|  | a4e241a431 | ||
|  | 3192f5e939 | ||
|  | b20d0fc5f7 | ||
|  | 395ee7af89 | ||
|  | 3b892c209e | ||
|  | dbb7f80dc9 | ||
|  | 5acc64e031 | ||
|  | cd5fdd4924 | ||
|  | d8c95c901f | ||
|  | 97168c647c | ||
|  | 14554fa31a | ||
|  | e95264f782 | ||
|  | 8c66d36d8f | ||
|  | 1a8e518180 | 
							
								
								
									
										8
									
								
								.github/workflows/build-android.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/workflows/build-android.yml
									
									
									
									
										vendored
									
									
								
							| @@ -30,11 +30,11 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|       - name: Setup JDK 11 |       - name: Setup JDK 17 | ||||||
|         uses: actions/setup-java@v4 |         uses: actions/setup-java@v4 | ||||||
|         with: |         with: | ||||||
|           distribution: 'zulu' |           distribution: 'zulu' | ||||||
|           java-version: 11 |           java-version: 17 | ||||||
|           java-package: jdk |           java-package: jdk | ||||||
|  |  | ||||||
|       - name: Get yarn cache directory path |       - name: Get yarn cache directory path | ||||||
| @@ -74,11 +74,11 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|       - name: Setup JDK 11 |       - name: Setup JDK 17 | ||||||
|         uses: actions/setup-java@v4 |         uses: actions/setup-java@v4 | ||||||
|         with: |         with: | ||||||
|           distribution: 'zulu' |           distribution: 'zulu' | ||||||
|           java-version: 11 |           java-version: 17 | ||||||
|           java-package: jdk |           java-package: jdk | ||||||
|  |  | ||||||
|       - name: Get yarn cache directory path |       - name: Get yarn cache directory path | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								.github/workflows/build-ios.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/build-ios.yml
									
									
									
									
										vendored
									
									
								
							| @@ -54,7 +54,7 @@ jobs: | |||||||
|           working-directory: package/example/ios |           working-directory: package/example/ios | ||||||
|  |  | ||||||
|       - name: Restore Pods cache |       - name: Restore Pods cache | ||||||
|         uses: actions/cache@v3 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: package/example/ios/Pods |           path: package/example/ios/Pods | ||||||
|           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} |           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} | ||||||
| @@ -113,7 +113,7 @@ jobs: | |||||||
|           working-directory: package/example/ios |           working-directory: package/example/ios | ||||||
|  |  | ||||||
|       - name: Restore Pods cache |       - name: Restore Pods cache | ||||||
|         uses: actions/cache@v3 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: package/example/ios/Pods |           path: package/example/ios/Pods | ||||||
|           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} |           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} | ||||||
|   | |||||||
							
								
								
									
										33
									
								
								.github/workflows/compress-images.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								.github/workflows/compress-images.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | name: Compress Images (docs) | ||||||
|  | on: | ||||||
|  |   pull_request: | ||||||
|  |     # Run Image Actions when JPG, JPEG, PNG or WebP files are added or changed. | ||||||
|  |     # See https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions#onpushpull_requestpaths for reference. | ||||||
|  |     paths: | ||||||
|  |       - ".github/workflows/compress-images.yml" | ||||||
|  |       - "**.jpg" | ||||||
|  |       - "**.jpeg" | ||||||
|  |       - "**.png" | ||||||
|  |       - "**.webp" | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   compress-images: | ||||||
|  |     # Only run on Pull Requests within the same repository, and not from forks. | ||||||
|  |     if: github.event.pull_request.head.repo.full_name == github.repository | ||||||
|  |     name: 🗂 Compress images | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout Repo | ||||||
|  |         uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|  |       - name: Compress Images | ||||||
|  |         uses: calibreapp/image-actions@main | ||||||
|  |         with: | ||||||
|  |           # The `GITHUB_TOKEN` is automatically generated by GitHub and scoped only to the repository that is currently running the action. By default, the action can’t update Pull Requests initiated from forked repositories. | ||||||
|  |           # See https://docs.github.com/en/actions/reference/authentication-in-a-workflow and https://help.github.com/en/articles/virtual-environments-for-github-actions#token-permissions | ||||||
|  |           githubToken: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |           ignorePaths: "e2e/**" | ||||||
|  |           jpegQuality: "80" | ||||||
|  |           jpegProgressive: false | ||||||
|  |           pngQuality: "80" | ||||||
|  |           webpQuality: "80" | ||||||
| @@ -47,6 +47,15 @@ cd ios && pod install | |||||||
|  |  | ||||||
| To see VisionCamera in action, check out [ShadowLens](https://mrousavy.com/projects/shadowlens)! | To see VisionCamera in action, check out [ShadowLens](https://mrousavy.com/projects/shadowlens)! | ||||||
|  |  | ||||||
|  | <div> | ||||||
|  |   <a href="https://apps.apple.com/app/shadowlens/id6471849004"> | ||||||
|  |     <img height="40" src="docs/static/img/appstore.svg" /> | ||||||
|  |   </a> | ||||||
|  |   <a href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens"> | ||||||
|  |     <img height="40" src="docs/static/img/googleplay.svg" /> | ||||||
|  |   </a> | ||||||
|  | </div> | ||||||
|  |  | ||||||
| ### Example | ### Example | ||||||
|  |  | ||||||
| ```tsx | ```tsx | ||||||
|   | |||||||
| @@ -130,4 +130,23 @@ The Code Scanner will call your [`onCodeScanned`](/docs/api/interfaces/CodeScann | |||||||
|  |  | ||||||
| <br /> | <br /> | ||||||
|  |  | ||||||
|  | ## UPC-A vs EAN-13 codes | ||||||
|  |  | ||||||
|  | UPC-A is a special case to handle if you need to cater for it. Android's SDK officially supports UPC-A but iOS does not, instead they handle the code as EAN-13. Since EAN-13 is a superset of UPC-A, with an extra 0 digit at the front. | ||||||
|  |  | ||||||
|  | This means, the `upc-a` types are reported under the `ean-13` umbrella type on iOS: | ||||||
|  |  | ||||||
|  | ```jsx | ||||||
|  | const codeScanner = useCodeScanner({ | ||||||
|  |   codeTypes: ['upc-a'], // <-- ✅ We configure for 'upc-a' types | ||||||
|  |   onCodeScanned: (codes) => { | ||||||
|  |     for (const code of codes) { | ||||||
|  |       console.log(code.type); // <-- ❌ On iOS, we receive 'ean-13' | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | }) | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will need to keep this in mind and do the conversion from EAN-13 to UPC-A yourself. This can be done by removing the front `0` digit from the code to get a UPC-A code. | ||||||
|  |  | ||||||
| #### 🚀 Next section: [Frame Processors](frame-processors) | #### 🚀 Next section: [Frame Processors](frame-processors) | ||||||
|   | |||||||
| @@ -34,12 +34,13 @@ To understand a bit more about camera formats, you first need to understand a fe | |||||||
|   * 4k Photos, 1080p Videos, 240 FPS (ultra high FPS/slow motion) |   * 4k Photos, 1080p Videos, 240 FPS (ultra high FPS/slow motion) | ||||||
|   * 720p Photos, 720p Videos, 30 FPS (smaller buffers/e.g. faster face detection) |   * 720p Photos, 720p Videos, 30 FPS (smaller buffers/e.g. faster face detection) | ||||||
| * Each app has different requirements, so the format filtering is up to you. | * Each app has different requirements, so the format filtering is up to you. | ||||||
|  | * The `videoResolution` and `videoAspectRatio` options also affect the preview, as preview is also running in the video stream. | ||||||
|  |  | ||||||
| To get all available formats, simply use the `CameraDevice`'s [`formats` property](/docs/api/interfaces/CameraDevice#formats).  These are a [CameraFormat's](/docs/api/interfaces/CameraDeviceFormat) props: | To get all available formats, simply use the `CameraDevice`'s [`formats` property](/docs/api/interfaces/CameraDevice#formats).  These are a [CameraFormat's](/docs/api/interfaces/CameraDeviceFormat) props: | ||||||
|  |  | ||||||
| - [`photoHeight`](/docs/api/interfaces/CameraDeviceFormat#photoheight)/[`photoWidth`](/docs/api/interfaces/CameraDeviceFormat#photoWidth): The resolution that will be used for taking photos. Choose a format with your desired resolution. | - [`photoHeight`](/docs/api/interfaces/CameraDeviceFormat#photoheight)/[`photoWidth`](/docs/api/interfaces/CameraDeviceFormat#photoWidth): The resolution that will be used for taking photos. Choose a format with your desired resolution. | ||||||
| - [`videoHeight`](/docs/api/interfaces/CameraDeviceFormat#videoheight)/[`videoWidth`](/docs/api/interfaces/CameraDeviceFormat#videoWidth): The resolution that will be used for recording videos. Choose a format with your desired resolution. | - [`videoHeight`](/docs/api/interfaces/CameraDeviceFormat#videoheight)/[`videoWidth`](/docs/api/interfaces/CameraDeviceFormat#videoWidth): The resolution that will be used for recording videos and streaming into frame processors. This also affects the preview's aspect ratio. Choose a format with your desired resolution. | ||||||
| - [`minFps`](/docs/api/interfaces/CameraDeviceFormat#minfps)/[`maxFps`](/docs/api/interfaces/CameraDeviceFormat#maxfps): A range of possible values for the `fps` property. For example, if your format has `minFps: 1` and `maxFps: 60`, you can either use `fps={30}`, `fps={60}` or any other value in between for recording videos. | - [`minFps`](/docs/api/interfaces/CameraDeviceFormat#minfps)/[`maxFps`](/docs/api/interfaces/CameraDeviceFormat#maxfps): A range of possible values for the `fps` property. For example, if your format has `minFps: 1` and `maxFps: 60`, you can either use `fps={30}`, `fps={60}` or any other value in between for recording videos and streaming into frame processors. | ||||||
| - [`videoStabilizationModes`](/docs/api/interfaces/CameraDeviceFormat#videostabilizationmodes): All supported Video Stabilization Modes, digital and optical. If this specific format contains your desired [`VideoStabilizationMode`](/docs/api/#videostabilizationmode), you can pass it to your `<Camera>` via the [`videoStabilizationMode` property](/docs/api/interfaces/CameraProps#videoStabilizationMode). | - [`videoStabilizationModes`](/docs/api/interfaces/CameraDeviceFormat#videostabilizationmodes): All supported Video Stabilization Modes, digital and optical. If this specific format contains your desired [`VideoStabilizationMode`](/docs/api/#videostabilizationmode), you can pass it to your `<Camera>` via the [`videoStabilizationMode` property](/docs/api/interfaces/CameraProps#videoStabilizationMode). | ||||||
| - [`pixelFormats`](/docs/api/interfaces/CameraDeviceFormat#pixelformats): All supported Pixel Formats. If this specific format contains your desired [`PixelFormat`](/docs/api/#PixelFormat), you can pass it to your `<Camera>` via the [`pixelFormat` property](/docs/api/interfaces/CameraProps#pixelFormat). | - [`pixelFormats`](/docs/api/interfaces/CameraDeviceFormat#pixelformats): All supported Pixel Formats. If this specific format contains your desired [`PixelFormat`](/docs/api/#PixelFormat), you can pass it to your `<Camera>` via the [`pixelFormat` property](/docs/api/interfaces/CameraProps#pixelFormat). | ||||||
| - [`supportsVideoHdr`](/docs/api/interfaces/CameraDeviceFormat#supportsvideohdr): Whether this specific format supports true 10-bit HDR for video capture. If this is `true`, you can enable `videoHdr` on your `<Camera>`. | - [`supportsVideoHdr`](/docs/api/interfaces/CameraDeviceFormat#supportsvideohdr): Whether this specific format supports true 10-bit HDR for video capture. If this is `true`, you can enable `videoHdr` on your `<Camera>`. | ||||||
| @@ -61,6 +62,7 @@ You can either find a matching format manually by looping through your `CameraDe | |||||||
| ```ts | ```ts | ||||||
| const device = ... | const device = ... | ||||||
| const format = useCameraFormat(device, [ | const format = useCameraFormat(device, [ | ||||||
|  |   { videoAspectRatio: 16 / 9 }, | ||||||
|   { videoResolution: { width: 3048, height: 2160 } }, |   { videoResolution: { width: 3048, height: 2160 } }, | ||||||
|   { fps: 60 } |   { fps: 60 } | ||||||
| ]) | ]) | ||||||
| @@ -72,6 +74,7 @@ const format = useCameraFormat(device, [ | |||||||
| ```ts | ```ts | ||||||
| const device = ... | const device = ... | ||||||
| const format = getCameraFormat(device, [ | const format = getCameraFormat(device, [ | ||||||
|  |   { videoAspectRatio: 16 / 9 }, | ||||||
|   { videoResolution: { width: 3048, height: 2160 } }, |   { videoResolution: { width: 3048, height: 2160 } }, | ||||||
|   { fps: 60 } |   { fps: 60 } | ||||||
| ]) | ]) | ||||||
|   | |||||||
| @@ -89,7 +89,8 @@ Additionally, you can also directly access the Frame's pixel data using [`toArra | |||||||
| const frameProcessor = useFrameProcessor((frame) => { | const frameProcessor = useFrameProcessor((frame) => { | ||||||
|   'worklet' |   'worklet' | ||||||
|   if (frame.pixelFormat === 'rgb') { |   if (frame.pixelFormat === 'rgb') { | ||||||
|     const data = frame.toArrayBuffer() |     const buffer = frame.toArrayBuffer() | ||||||
|  |     const data = new Uint8Array(buffer) | ||||||
|     console.log(`Pixel at 0,0: RGB(${data[0]}, ${data[1]}, ${data[2]})`) |     console.log(`Pixel at 0,0: RGB(${data[0]}, ${data[1]}, ${data[2]})`) | ||||||
|   } |   } | ||||||
| }, []) | }, []) | ||||||
|   | |||||||
| @@ -33,6 +33,7 @@ cd ios && pod install | |||||||
| * [xulihang/**vision-camera-dynamsoft-barcode-reader**](https://github.com/xulihang/vision-camera-dynamsoft-barcode-reader): A plugin to read barcodes using Dynamsoft Barcode Reader. | * [xulihang/**vision-camera-dynamsoft-barcode-reader**](https://github.com/xulihang/vision-camera-dynamsoft-barcode-reader): A plugin to read barcodes using Dynamsoft Barcode Reader. | ||||||
| * [xulihang/**vision-camera-dynamsoft-label-recognizer**](https://github.com/xulihang/vision-camera-dynamsoft-label-recognizer): A plugin to recognize text on labels, MRZ passports, etc. using Dynamsoft Label Recognizer. | * [xulihang/**vision-camera-dynamsoft-label-recognizer**](https://github.com/xulihang/vision-camera-dynamsoft-label-recognizer): A plugin to recognize text on labels, MRZ passports, etc. using Dynamsoft Label Recognizer. | ||||||
| * [tony-xlh/**vision-camera-dynamsoft-document-normalizer**](https://github.com/tony-xlh/vision-camera-dynamsoft-document-normalizer): A plugin to scan documents using Dynamsoft Document Normalizer with features like document border detection and perspective transformation. | * [tony-xlh/**vision-camera-dynamsoft-document-normalizer**](https://github.com/tony-xlh/vision-camera-dynamsoft-document-normalizer): A plugin to scan documents using Dynamsoft Document Normalizer with features like document border detection and perspective transformation. | ||||||
|  | * [tony-xlh/**vision-camera-cropper**](https://github.com/tony-xlh/vision-camera-cropper): A plugin to crop frames and save frames to files or as base64. | ||||||
| * [aarongrider/**vision-camera-ocr**](https://github.com/aarongrider/vision-camera-ocr): A plugin to detect text in real time using MLKit Text Detector (OCR). | * [aarongrider/**vision-camera-ocr**](https://github.com/aarongrider/vision-camera-ocr): A plugin to detect text in real time using MLKit Text Detector (OCR). | ||||||
| * [yasintorun/**vision-camera-base64**](https://github.com/yasintorun/vision-camera-base64): A plugin to convert a Frame to a base64 string. | * [yasintorun/**vision-camera-base64**](https://github.com/yasintorun/vision-camera-base64): A plugin to convert a Frame to a base64 string. | ||||||
|  |  | ||||||
| @@ -40,7 +41,7 @@ cd ios && pod install | |||||||
|  |  | ||||||
|  |  | ||||||
| <!-- Add your Frame Processor Plugin here! --> | <!-- Add your Frame Processor Plugin here! --> | ||||||
|  | * [nonam4/**react-native-vision-camera-face-detector**](https://github.com/nonam4/react-native-vision-camera-face-detector): A V3 frame processor plugin to detect faces using MLKit Vision Face Detector. | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -50,7 +51,7 @@ cd ios && pod install | |||||||
| <br /> | <br /> | ||||||
| <p align="center"> | <p align="center"> | ||||||
| <b> | <b> | ||||||
| <a href="https://github.com/mrousavy/react-native-vision-camera/edit/main/docs/docs/guides/FRAME_PROCESSOR_PLUGIN_LIST.mdx">Click here</a> to add your Frame Processor Plugin to this list! | <a href="https://github.com/mrousavy/react-native-vision-camera/edit/main/docs/docs/guides/FRAME_PROCESSOR_PLUGINS.mdx">Click here</a> to add your Frame Processor Plugin to this list! | ||||||
| </b> | </b> | ||||||
| </p> | </p> | ||||||
| <br /> | <br /> | ||||||
|   | |||||||
| @@ -70,6 +70,13 @@ Enable Buffer Compression ([`enableBufferCompression`](/docs/api/interfaces/Came | |||||||
|  |  | ||||||
| Note: When not using a `frameProcessor`, buffer compression is automatically enabled. | Note: When not using a `frameProcessor`, buffer compression is automatically enabled. | ||||||
|  |  | ||||||
|  | ### GPU buffers | ||||||
|  |  | ||||||
|  | Enable GPU Buffer flags ([`enableGpuBuffers`](/docs/api/interfaces/CameraProps#enablegpubuffers)) to optimize the Video Pipeline for zero-copy buffer forwarding. | ||||||
|  | If this is enabled, the Video Pipeline can avoid an additional CPU -> GPU copy, resulting in better performance and more efficiency. | ||||||
|  |  | ||||||
|  | Note: This only takes effect when using a `frameProcessor`. | ||||||
|  |  | ||||||
| ### Video Stabilization | ### Video Stabilization | ||||||
|  |  | ||||||
| Video Stabilization requires additional overhead to start the algorithm, so disabling [`videoStabilizationMode`](/docs/api/interfaces/CameraProps#videostabilizationmode) can significantly speed up the Camera initialization time. | Video Stabilization requires additional overhead to start the algorithm, so disabling [`videoStabilizationMode`](/docs/api/interfaces/CameraProps#videostabilizationmode) can significantly speed up the Camera initialization time. | ||||||
|   | |||||||
| @@ -21,10 +21,10 @@ import useBaseUrl from '@docusaurus/useBaseUrl' | |||||||
| **Download now:** | **Download now:** | ||||||
|  |  | ||||||
| <div style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}> | <div style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}> | ||||||
|   <a href="https://apps.apple.com/at/app/shadowlens/id6471849004"> |   <a href="https://apps.apple.com/app/shadowlens/id6471849004"> | ||||||
|     <img height={40} src={useBaseUrl("img/appstore.svg")} /> |     <img height={40} src={useBaseUrl("img/appstore.svg")} /> | ||||||
|   </a> |   </a> | ||||||
|   <a href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens"> |   <a href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens"> | ||||||
|     <img height={58} src={useBaseUrl("img/playstore.png")} /> |     <img height={40} src={useBaseUrl("img/googleplay.svg")} /> | ||||||
|   </a> |   </a> | ||||||
| </div> | </div> | ||||||
|   | |||||||
| @@ -112,7 +112,7 @@ If you're experiencing build issues or runtime issues in VisionCamera, make sure | |||||||
| 2. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information. | 2. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information. | ||||||
| 3. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs. There is useful information about the Frame Processor Runtime that will tell you if something goes wrong. | 3. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs. There is useful information about the Frame Processor Runtime that will tell you if something goes wrong. | ||||||
| 4. If your Frame Processor is not running, make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI. | 4. If your Frame Processor is not running, make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI. | ||||||
| 5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr` or `format` on the `<Camera>` component except for the required ones: | 5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr`, `enableGpuBuffers` or `format` on the `<Camera>` component except for the required ones: | ||||||
|    ```tsx |    ```tsx | ||||||
|    <Camera device={device} isActive={true} style={{ width: 500, height: 500 }} /> |    <Camera device={device} isActive={true} style={{ width: 500, height: 500 }} /> | ||||||
|    ``` |    ``` | ||||||
|   | |||||||
| @@ -25,7 +25,7 @@ module.exports = { | |||||||
|     }, |     }, | ||||||
|     announcementBar: { |     announcementBar: { | ||||||
|       id: 'shadowlens', |       id: 'shadowlens', | ||||||
|       content: '<b>ShadowLens is out!!! 🥳🥳</b> Download the iOS app to see VisionCamera in action: <a target="_blank" rel="noopener noreferrer" href="https://apps.apple.com/at/app/shadowlens/id6471849004?l=en-GB">apps.apple.com/shadowlens</a>', |       content: '<b>ShadowLens is out!!! 🥳🥳</b> See VisionCamera in action: <a target="_blank" rel="noopener noreferrer" href="https://apps.apple.com/app/shadowlens/id6471849004">Download ShadowLens for iOS</a> or <a target="_blank" rel="noopener noreferrer" href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens">Download ShadowLens for Android</a>', | ||||||
|       backgroundColor: '#e39600', |       backgroundColor: '#e39600', | ||||||
|       textColor: '#ffffff', |       textColor: '#ffffff', | ||||||
|       isCloseable: false, |       isCloseable: false, | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								docs/static/img/googleplay.svg
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								docs/static/img/googleplay.svg
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| After Width: | Height: | Size: 7.0 KiB | 
							
								
								
									
										
											BIN
										
									
								
								docs/static/img/playstore.png
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/static/img/playstore.png
									
									
									
									
										vendored
									
									
								
							
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 4.8 KiB | 
| @@ -46,6 +46,10 @@ def safeExtGet(prop, fallback) { | |||||||
|   rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback |   rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback | ||||||
| } | } | ||||||
|  |  | ||||||
|  | def safeExtGetBool(prop, fallback) { | ||||||
|  |   Boolean.parseBoolean("${safeExtGet(prop, fallback)}") | ||||||
|  | } | ||||||
|  |  | ||||||
| def reactNativeArchitectures() { | def reactNativeArchitectures() { | ||||||
|   def value = project.getProperties().get("reactNativeArchitectures") |   def value = project.getProperties().get("reactNativeArchitectures") | ||||||
|   return value ? value.split(",") : ["armeabi-v7a", "x86", "x86_64", "arm64-v8a"] |   return value ? value.split(",") : ["armeabi-v7a", "x86", "x86_64", "arm64-v8a"] | ||||||
| @@ -68,10 +72,10 @@ static def findNodeModules(baseDir) { | |||||||
|  |  | ||||||
| def nodeModules = findNodeModules(projectDir) | def nodeModules = findNodeModules(projectDir) | ||||||
|  |  | ||||||
| def hasWorklets = !safeExtGet("VisionCamera_disableFrameProcessors", false) && findProject(":react-native-worklets-core") != null | def hasWorklets = !safeExtGetBool('VisionCamera_disableFrameProcessors', false) && findProject(":react-native-worklets-core") != null | ||||||
| logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!") | logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!") | ||||||
|  |  | ||||||
| def enableCodeScanner = safeExtGet("VisionCamera_enableCodeScanner", false) | def enableCodeScanner = safeExtGetBool('VisionCamera_enableCodeScanner', false) | ||||||
|  |  | ||||||
| repositories { | repositories { | ||||||
|   google() |   google() | ||||||
| @@ -129,8 +133,8 @@ android { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   compileOptions { |   compileOptions { | ||||||
|     sourceCompatibility JavaVersion.VERSION_1_8 |     sourceCompatibility JavaVersion.VERSION_17 | ||||||
|     targetCompatibility JavaVersion.VERSION_1_8 |     targetCompatibility JavaVersion.VERSION_17 | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   externalNativeBuild { |   externalNativeBuild { | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| <manifest xmlns:android="http://schemas.android.com/apk/res/android" | <manifest xmlns:android="http://schemas.android.com/apk/res/android"> | ||||||
|           package="com.mrousavy.camera"> |  | ||||||
|  |  | ||||||
| </manifest> | </manifest> | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ MutableJByteBuffer::MutableJByteBuffer(jni::alias_ref<jni::JByteBuffer> byteBuff | |||||||
|   _byteBuffer = jni::make_global(byteBuffer); |   _byteBuffer = jni::make_global(byteBuffer); | ||||||
| } | } | ||||||
|  |  | ||||||
| MutableJByteBuffer::~MutableJByteBuffer() noexcept { | MutableJByteBuffer::~MutableJByteBuffer() { | ||||||
|   // Hermes GC might destroy HostObjects on an arbitrary Thread which might not be |   // Hermes GC might destroy HostObjects on an arbitrary Thread which might not be | ||||||
|   // connected to the JNI environment. To make sure fbjni can properly destroy |   // connected to the JNI environment. To make sure fbjni can properly destroy | ||||||
|   // the Java method, we connect to a JNI environment first. |   // the Java method, we connect to a JNI environment first. | ||||||
|   | |||||||
| @@ -29,10 +29,11 @@ OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWi | |||||||
| } | } | ||||||
|  |  | ||||||
| OpenGLRenderer::~OpenGLRenderer() { | OpenGLRenderer::~OpenGLRenderer() { | ||||||
|  |   __android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGLRenderer..."); | ||||||
|  |   destroy(); | ||||||
|   if (_outputSurface != nullptr) { |   if (_outputSurface != nullptr) { | ||||||
|     ANativeWindow_release(_outputSurface); |     ANativeWindow_release(_outputSurface); | ||||||
|   } |   } | ||||||
|   destroy(); |  | ||||||
| } | } | ||||||
|  |  | ||||||
| void OpenGLRenderer::destroy() { | void OpenGLRenderer::destroy() { | ||||||
|   | |||||||
| @@ -34,7 +34,7 @@ JSharedArray::JSharedArray(const jni::alias_ref<jhybridobject>& javaThis, const | |||||||
| #else | #else | ||||||
|   jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime(); |   jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime(); | ||||||
| #endif | #endif | ||||||
|   __android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping Java ByteBuffer with size %i...", byteBuffer->getDirectSize()); |   __android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping Java ByteBuffer with size %zu...", byteBuffer->getDirectSize()); | ||||||
|   _byteBuffer = jni::make_global(byteBuffer); |   _byteBuffer = jni::make_global(byteBuffer); | ||||||
|   _size = _byteBuffer->getDirectSize(); |   _size = _byteBuffer->getDirectSize(); | ||||||
|  |  | ||||||
|   | |||||||
| @@ -15,6 +15,7 @@ TSelf JVisionCameraScheduler::initHybrid(jni::alias_ref<jhybridobject> jThis) { | |||||||
| } | } | ||||||
|  |  | ||||||
| void JVisionCameraScheduler::dispatchAsync(const std::function<void()>& job) { | void JVisionCameraScheduler::dispatchAsync(const std::function<void()>& job) { | ||||||
|  |   std::unique_lock<std::mutex> lock(_mutex); | ||||||
|   // 1. add job to queue |   // 1. add job to queue | ||||||
|   _jobs.push(job); |   _jobs.push(job); | ||||||
|   scheduleTrigger(); |   scheduleTrigger(); | ||||||
|   | |||||||
| @@ -5,6 +5,8 @@ import com.facebook.react.bridge.Arguments | |||||||
| import com.facebook.react.bridge.ReactContext | import com.facebook.react.bridge.ReactContext | ||||||
| import com.facebook.react.bridge.WritableMap | import com.facebook.react.bridge.WritableMap | ||||||
| import com.facebook.react.uimanager.events.RCTEventEmitter | import com.facebook.react.uimanager.events.RCTEventEmitter | ||||||
|  | import com.facebook.react.uimanager.UIManagerHelper | ||||||
|  | import com.facebook.react.uimanager.events.Event | ||||||
| import com.google.mlkit.vision.barcode.common.Barcode | import com.google.mlkit.vision.barcode.common.Barcode | ||||||
| import com.mrousavy.camera.core.CameraError | import com.mrousavy.camera.core.CameraError | ||||||
| import com.mrousavy.camera.core.CodeScannerFrame | import com.mrousavy.camera.core.CodeScannerFrame | ||||||
| @@ -12,26 +14,30 @@ import com.mrousavy.camera.core.UnknownCameraError | |||||||
| import com.mrousavy.camera.core.code | import com.mrousavy.camera.core.code | ||||||
| import com.mrousavy.camera.types.CodeType | import com.mrousavy.camera.types.CodeType | ||||||
| import java.io.File | import java.io.File | ||||||
|  | import com.mrousavy.camera.types.* | ||||||
|  |  | ||||||
| fun CameraView.invokeOnInitialized() { | fun CameraView.invokeOnInitialized() { | ||||||
|   Log.i(CameraView.TAG, "invokeOnInitialized()") |   Log.i(CameraView.TAG, "invokeOnInitialized()") | ||||||
|  |  | ||||||
|   val reactContext = context as ReactContext |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraInitialized", null) |   val event = CameraInitializedEvent(surfaceId, id) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnStarted() { | fun CameraView.invokeOnStarted() { | ||||||
|   Log.i(CameraView.TAG, "invokeOnStarted()") |   Log.i(CameraView.TAG, "invokeOnStarted()") | ||||||
|  |  | ||||||
|   val reactContext = context as ReactContext |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStarted", null) |   val event = CameraStartedEvent(surfaceId, id) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnStopped() { | fun CameraView.invokeOnStopped() { | ||||||
|   Log.i(CameraView.TAG, "invokeOnStopped()") |   Log.i(CameraView.TAG, "invokeOnStopped()") | ||||||
|  |  | ||||||
|   val reactContext = context as ReactContext |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null) |   val event = CameraStoppedEvent(surfaceId, id) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { | fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { | ||||||
| @@ -47,24 +53,27 @@ fun CameraView.invokeOnError(error: Throwable) { | |||||||
|   Log.e(CameraView.TAG, "invokeOnError(...):") |   Log.e(CameraView.TAG, "invokeOnError(...):") | ||||||
|   error.printStackTrace() |   error.printStackTrace() | ||||||
|  |  | ||||||
|   val cameraError = when (error) { |   val cameraError = | ||||||
|  |     when (error) { | ||||||
|       is CameraError -> error |       is CameraError -> error | ||||||
|       else -> UnknownCameraError(error) |       else -> UnknownCameraError(error) | ||||||
|     } |     } | ||||||
|   val event = Arguments.createMap() |   val data = Arguments.createMap() | ||||||
|   event.putString("code", cameraError.code) |   data.putString("code", cameraError.code) | ||||||
|   event.putString("message", cameraError.message) |   data.putString("message", cameraError.message) | ||||||
|   cameraError.cause?.let { cause -> |   cameraError.cause?.let { cause -> | ||||||
|     event.putMap("cause", errorToMap(cause)) |     data.putMap("cause", errorToMap(cause)) | ||||||
|   } |   } | ||||||
|   val reactContext = context as ReactContext |  | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event) |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|  |   val event = CameraErrorEvent(surfaceId, id, data) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnViewReady() { | fun CameraView.invokeOnViewReady() { | ||||||
|   val event = Arguments.createMap() |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   val reactContext = context as ReactContext |   val event = CameraViewReadyEvent(surfaceId, id) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraViewReady", event) |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnCodeScanned(barcodes: List<Barcode>, scannerFrame: CodeScannerFrame) { | fun CameraView.invokeOnCodeScanned(barcodes: List<Barcode>, scannerFrame: CodeScannerFrame) { | ||||||
| @@ -97,14 +106,23 @@ fun CameraView.invokeOnCodeScanned(barcodes: List<Barcode>, scannerFrame: CodeSc | |||||||
|     codes.pushMap(code) |     codes.pushMap(code) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   val event = Arguments.createMap() |   val data = Arguments.createMap() | ||||||
|   event.putArray("codes", codes) |   data.putArray("codes", codes) | ||||||
|   val codeScannerFrame = Arguments.createMap() |   val codeScannerFrame = Arguments.createMap() | ||||||
|   codeScannerFrame.putInt("width", scannerFrame.width) |   codeScannerFrame.putInt("width", scannerFrame.width) | ||||||
|   codeScannerFrame.putInt("height", scannerFrame.height) |   codeScannerFrame.putInt("height", scannerFrame.height) | ||||||
|   event.putMap("frame", codeScannerFrame) |   data.putMap("frame", codeScannerFrame) | ||||||
|  |  | ||||||
|  |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|  |   val event = CameraCodeScannedEvent(surfaceId, id, data) | ||||||
|  |   this.sendEvent(event) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | private fun CameraView.sendEvent(event: Event<*>) { | ||||||
|   val reactContext = context as ReactContext |   val reactContext = context as ReactContext | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraCodeScanned", event) |   val dispatcher = | ||||||
|  |     UIManagerHelper.getEventDispatcherForReactTag(reactContext, id) | ||||||
|  |   dispatcher?.dispatchEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| private fun errorToMap(error: Throwable): WritableMap { | private fun errorToMap(error: Throwable): WritableMap { | ||||||
|   | |||||||
| @@ -30,9 +30,12 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { | |||||||
|  |  | ||||||
|   val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced" |   val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced" | ||||||
|   val flash = options["flash"] as? String ?: "off" |   val flash = options["flash"] as? String ?: "off" | ||||||
|   val enableAutoRedEyeReduction = options["enableAutoRedEyeReduction"] == true |  | ||||||
|   val enableAutoStabilization = options["enableAutoStabilization"] == true |   val enableAutoStabilization = options["enableAutoStabilization"] == true | ||||||
|   val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true |   val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true | ||||||
|  |   val enablePrecapture = options["enablePrecapture"] as? Boolean ?: false | ||||||
|  |  | ||||||
|  |   // TODO: Implement Red Eye Reduction | ||||||
|  |   options["enableAutoRedEyeReduction"] | ||||||
|  |  | ||||||
|   val flashMode = Flash.fromUnionValue(flash) |   val flashMode = Flash.fromUnionValue(flash) | ||||||
|   val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization) |   val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization) | ||||||
| @@ -41,8 +44,8 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { | |||||||
|     qualityPrioritizationMode, |     qualityPrioritizationMode, | ||||||
|     flashMode, |     flashMode, | ||||||
|     enableShutterSound, |     enableShutterSound, | ||||||
|     enableAutoRedEyeReduction, |  | ||||||
|     enableAutoStabilization, |     enableAutoStabilization, | ||||||
|  |     enablePrecapture, | ||||||
|     orientation |     orientation | ||||||
|   ) |   ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -4,9 +4,9 @@ import android.annotation.SuppressLint | |||||||
| import android.content.Context | import android.content.Context | ||||||
| import android.hardware.camera2.CameraManager | import android.hardware.camera2.CameraManager | ||||||
| import android.util.Log | import android.util.Log | ||||||
|  | import android.view.Gravity | ||||||
| import android.view.ScaleGestureDetector | import android.view.ScaleGestureDetector | ||||||
| import android.widget.FrameLayout | import android.widget.FrameLayout | ||||||
| import com.facebook.react.bridge.ReadableMap |  | ||||||
| import com.google.mlkit.vision.barcode.common.Barcode | import com.google.mlkit.vision.barcode.common.Barcode | ||||||
| import com.mrousavy.camera.core.CameraConfiguration | import com.mrousavy.camera.core.CameraConfiguration | ||||||
| import com.mrousavy.camera.core.CameraQueues | import com.mrousavy.camera.core.CameraQueues | ||||||
| @@ -48,23 +48,23 @@ class CameraView(context: Context) : | |||||||
|   // props that require reconfiguring |   // props that require reconfiguring | ||||||
|   var cameraId: String? = null |   var cameraId: String? = null | ||||||
|   var enableDepthData = false |   var enableDepthData = false | ||||||
|   var enableHighQualityPhotos: Boolean? = null |  | ||||||
|   var enablePortraitEffectsMatteDelivery = false |   var enablePortraitEffectsMatteDelivery = false | ||||||
|  |  | ||||||
|   // use-cases |   // use-cases | ||||||
|   var photo: Boolean? = null |   var photo = false | ||||||
|   var video: Boolean? = null |   var video = false | ||||||
|   var audio: Boolean? = null |   var audio = false | ||||||
|   var enableFrameProcessor = false |   var enableFrameProcessor = false | ||||||
|   var pixelFormat: PixelFormat = PixelFormat.NATIVE |   var pixelFormat: PixelFormat = PixelFormat.NATIVE | ||||||
|  |  | ||||||
|   // props that require format reconfiguring |   // props that require format reconfiguring | ||||||
|   var format: ReadableMap? = null |   var format: CameraDeviceFormat? = null | ||||||
|   var fps: Int? = null |   var fps: Int? = null | ||||||
|   var videoStabilizationMode: VideoStabilizationMode? = null |   var videoStabilizationMode: VideoStabilizationMode? = null | ||||||
|   var videoHdr = false |   var videoHdr = false | ||||||
|   var photoHdr = false |   var photoHdr = false | ||||||
|   var lowLightBoost: Boolean? = null // nullable bool |   var lowLightBoost = false | ||||||
|  |   var enableGpuBuffers = false | ||||||
|  |  | ||||||
|   // other props |   // other props | ||||||
|   var isActive = false |   var isActive = false | ||||||
| @@ -72,7 +72,11 @@ class CameraView(context: Context) : | |||||||
|   var zoom: Float = 1f // in "factor" |   var zoom: Float = 1f // in "factor" | ||||||
|   var exposure: Double = 1.0 |   var exposure: Double = 1.0 | ||||||
|   var orientation: Orientation = Orientation.PORTRAIT |   var orientation: Orientation = Orientation.PORTRAIT | ||||||
|   var enableZoomGesture: Boolean = false |       set(value) { | ||||||
|  |         field = value | ||||||
|  |         previewView.orientation = value | ||||||
|  |       } | ||||||
|  |   var enableZoomGesture = false | ||||||
|     set(value) { |     set(value) { | ||||||
|       field = value |       field = value | ||||||
|       updateZoomGesture() |       updateZoomGesture() | ||||||
| @@ -82,7 +86,7 @@ class CameraView(context: Context) : | |||||||
|       previewView.resizeMode = value |       previewView.resizeMode = value | ||||||
|       field = value |       field = value | ||||||
|     } |     } | ||||||
|   var enableFpsGraph: Boolean = false |   var enableFpsGraph = false | ||||||
|     set(value) { |     set(value) { | ||||||
|       field = value |       field = value | ||||||
|       updateFpsGraph() |       updateFpsGraph() | ||||||
| @@ -110,21 +114,26 @@ class CameraView(context: Context) : | |||||||
|     clipToOutline = true |     clipToOutline = true | ||||||
|     cameraSession = CameraSession(context, cameraManager, this) |     cameraSession = CameraSession(context, cameraManager, this) | ||||||
|     previewView = cameraSession.createPreviewView(context) |     previewView = cameraSession.createPreviewView(context) | ||||||
|  |     previewView.layoutParams = LayoutParams( | ||||||
|  |       LayoutParams.MATCH_PARENT, | ||||||
|  |       LayoutParams.MATCH_PARENT, | ||||||
|  |       Gravity.CENTER | ||||||
|  |     ) | ||||||
|     addView(previewView) |     addView(previewView) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun onAttachedToWindow() { |   override fun onAttachedToWindow() { | ||||||
|  |     super.onAttachedToWindow() | ||||||
|     if (!isMounted) { |     if (!isMounted) { | ||||||
|       isMounted = true |       isMounted = true | ||||||
|       invokeOnViewReady() |       invokeOnViewReady() | ||||||
|     } |     } | ||||||
|     update() |     update() | ||||||
|     super.onAttachedToWindow() |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun onDetachedFromWindow() { |   override fun onDetachedFromWindow() { | ||||||
|     update() |  | ||||||
|     super.onDetachedFromWindow() |     super.onDetachedFromWindow() | ||||||
|  |     update() | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   fun destroy() { |   fun destroy() { | ||||||
| @@ -149,19 +158,20 @@ class CameraView(context: Context) : | |||||||
|         config.cameraId = cameraId |         config.cameraId = cameraId | ||||||
|  |  | ||||||
|         // Photo |         // Photo | ||||||
|         if (photo == true) { |         if (photo) { | ||||||
|           config.photo = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Photo(photoHdr)) |           config.photo = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Photo(photoHdr)) | ||||||
|         } else { |         } else { | ||||||
|           config.photo = CameraConfiguration.Output.Disabled.create() |           config.photo = CameraConfiguration.Output.Disabled.create() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // Video/Frame Processor |         // Video/Frame Processor | ||||||
|         if (video == true || enableFrameProcessor) { |         if (video || enableFrameProcessor) { | ||||||
|           config.video = CameraConfiguration.Output.Enabled.create( |           config.video = CameraConfiguration.Output.Enabled.create( | ||||||
|             CameraConfiguration.Video( |             CameraConfiguration.Video( | ||||||
|               videoHdr, |               videoHdr, | ||||||
|               pixelFormat, |               pixelFormat, | ||||||
|               enableFrameProcessor |               enableFrameProcessor, | ||||||
|  |               enableGpuBuffers | ||||||
|             ) |             ) | ||||||
|           ) |           ) | ||||||
|         } else { |         } else { | ||||||
| @@ -169,7 +179,7 @@ class CameraView(context: Context) : | |||||||
|         } |         } | ||||||
|  |  | ||||||
|         // Audio |         // Audio | ||||||
|         if (audio == true) { |         if (audio) { | ||||||
|           config.audio = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Audio(Unit)) |           config.audio = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Audio(Unit)) | ||||||
|         } else { |         } else { | ||||||
|           config.audio = CameraConfiguration.Output.Disabled.create() |           config.audio = CameraConfiguration.Output.Disabled.create() | ||||||
| @@ -189,12 +199,7 @@ class CameraView(context: Context) : | |||||||
|         config.orientation = orientation |         config.orientation = orientation | ||||||
|  |  | ||||||
|         // Format |         // Format | ||||||
|         val format = format |         config.format = format | ||||||
|         if (format != null) { |  | ||||||
|           config.format = CameraDeviceFormat.fromJSValue(format) |  | ||||||
|         } else { |  | ||||||
|           config.format = null |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         // Side-Props |         // Side-Props | ||||||
|         config.fps = fps |         config.fps = fps | ||||||
|   | |||||||
| @@ -5,10 +5,12 @@ import com.facebook.react.common.MapBuilder | |||||||
| import com.facebook.react.uimanager.ThemedReactContext | import com.facebook.react.uimanager.ThemedReactContext | ||||||
| import com.facebook.react.uimanager.ViewGroupManager | import com.facebook.react.uimanager.ViewGroupManager | ||||||
| import com.facebook.react.uimanager.annotations.ReactProp | import com.facebook.react.uimanager.annotations.ReactProp | ||||||
|  | import com.mrousavy.camera.types.CameraDeviceFormat | ||||||
| import com.mrousavy.camera.types.CodeScannerOptions | import com.mrousavy.camera.types.CodeScannerOptions | ||||||
| import com.mrousavy.camera.types.Orientation | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.PixelFormat | import com.mrousavy.camera.types.PixelFormat | ||||||
| import com.mrousavy.camera.types.ResizeMode | import com.mrousavy.camera.types.ResizeMode | ||||||
|  | import android.util.Log | ||||||
| import com.mrousavy.camera.types.Torch | import com.mrousavy.camera.types.Torch | ||||||
| import com.mrousavy.camera.types.VideoStabilizationMode | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
|  |  | ||||||
| @@ -45,17 +47,17 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "photo") |   @ReactProp(name = "photo") | ||||||
|   fun setPhoto(view: CameraView, photo: Boolean?) { |   fun setPhoto(view: CameraView, photo: Boolean) { | ||||||
|     view.photo = photo |     view.photo = photo | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "video") |   @ReactProp(name = "video") | ||||||
|   fun setVideo(view: CameraView, video: Boolean?) { |   fun setVideo(view: CameraView, video: Boolean) { | ||||||
|     view.video = video |     view.video = video | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "audio") |   @ReactProp(name = "audio") | ||||||
|   fun setAudio(view: CameraView, audio: Boolean?) { |   fun setAudio(view: CameraView, audio: Boolean) { | ||||||
|     view.audio = audio |     view.audio = audio | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -66,8 +68,12 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|  |  | ||||||
|   @ReactProp(name = "pixelFormat") |   @ReactProp(name = "pixelFormat") | ||||||
|   fun setPixelFormat(view: CameraView, pixelFormat: String?) { |   fun setPixelFormat(view: CameraView, pixelFormat: String?) { | ||||||
|  |     if (pixelFormat != null) { | ||||||
|       val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) |       val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) | ||||||
|       view.pixelFormat = newPixelFormat |       view.pixelFormat = newPixelFormat | ||||||
|  |     } else { | ||||||
|  |       view.pixelFormat = PixelFormat.NATIVE | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "enableDepthData") |   @ReactProp(name = "enableDepthData") | ||||||
| @@ -85,15 +91,19 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|     view.enableFpsGraph = enableFpsGraph |     view.enableFpsGraph = enableFpsGraph | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "videoStabilizationMode") |   @ReactProp(name = "enableGpuBuffers") | ||||||
|   fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { |   fun setEnableGpuBuffers(view: CameraView, enableGpuBuffers: Boolean) { | ||||||
|     val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) |     view.enableGpuBuffers = enableGpuBuffers | ||||||
|     view.videoStabilizationMode = newMode |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "enableHighQualityPhotos") |   @ReactProp(name = "videoStabilizationMode") | ||||||
|   fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) { |   fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { | ||||||
|     view.enableHighQualityPhotos = enableHighQualityPhotos |     if (videoStabilizationMode != null) { | ||||||
|  |       val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) | ||||||
|  |       view.videoStabilizationMode = newMode | ||||||
|  |     } else { | ||||||
|  |       view.videoStabilizationMode = null | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "enablePortraitEffectsMatteDelivery") |   @ReactProp(name = "enablePortraitEffectsMatteDelivery") | ||||||
| @@ -103,13 +113,22 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|  |  | ||||||
|   @ReactProp(name = "format") |   @ReactProp(name = "format") | ||||||
|   fun setFormat(view: CameraView, format: ReadableMap?) { |   fun setFormat(view: CameraView, format: ReadableMap?) { | ||||||
|     view.format = format |     if (format != null) { | ||||||
|  |       val newFormat = CameraDeviceFormat.fromJSValue(format) | ||||||
|  |       view.format = newFormat | ||||||
|  |     } else { | ||||||
|  |       view.format = null | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "resizeMode") |   @ReactProp(name = "resizeMode") | ||||||
|   fun setResizeMode(view: CameraView, resizeMode: String) { |   fun setResizeMode(view: CameraView, resizeMode: String?) { | ||||||
|  |     if (resizeMode != null) { | ||||||
|       val newMode = ResizeMode.fromUnionValue(resizeMode) |       val newMode = ResizeMode.fromUnionValue(resizeMode) | ||||||
|       view.resizeMode = newMode |       view.resizeMode = newMode | ||||||
|  |     } else { | ||||||
|  |       view.resizeMode = ResizeMode.COVER | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   // TODO: Change when TurboModules release. |   // TODO: Change when TurboModules release. | ||||||
| @@ -120,30 +139,34 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|     view.fps = if (fps > 0) fps else null |     view.fps = if (fps > 0) fps else null | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "photoHdr", defaultBoolean = false) |   @ReactProp(name = "photoHdr") | ||||||
|   fun setPhotoHdr(view: CameraView, photoHdr: Boolean) { |   fun setPhotoHdr(view: CameraView, photoHdr: Boolean) { | ||||||
|     view.photoHdr = photoHdr |     view.photoHdr = photoHdr | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "videoHdr", defaultBoolean = false) |   @ReactProp(name = "videoHdr") | ||||||
|   fun setVideoHdr(view: CameraView, videoHdr: Boolean) { |   fun setVideoHdr(view: CameraView, videoHdr: Boolean) { | ||||||
|     view.videoHdr = videoHdr |     view.videoHdr = videoHdr | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "lowLightBoost") |   @ReactProp(name = "lowLightBoost") | ||||||
|   fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) { |   fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean) { | ||||||
|     view.lowLightBoost = lowLightBoost |     view.lowLightBoost = lowLightBoost | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "isActive", defaultBoolean = false) |   @ReactProp(name = "isActive") | ||||||
|   fun setIsActive(view: CameraView, isActive: Boolean) { |   fun setIsActive(view: CameraView, isActive: Boolean) { | ||||||
|     view.isActive = isActive |     view.isActive = isActive | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "torch") |   @ReactProp(name = "torch") | ||||||
|   fun setTorch(view: CameraView, torch: String) { |   fun setTorch(view: CameraView, torch: String?) { | ||||||
|  |     if (torch != null) { | ||||||
|       val newMode = Torch.fromUnionValue(torch) |       val newMode = Torch.fromUnionValue(torch) | ||||||
|       view.torch = newMode |       view.torch = newMode | ||||||
|  |     } else { | ||||||
|  |       view.torch = Torch.OFF | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "zoom") |   @ReactProp(name = "zoom") | ||||||
| @@ -158,14 +181,23 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|  |  | ||||||
|   @ReactProp(name = "orientation") |   @ReactProp(name = "orientation") | ||||||
|   fun setOrientation(view: CameraView, orientation: String?) { |   fun setOrientation(view: CameraView, orientation: String?) { | ||||||
|  |     if (orientation != null) { | ||||||
|       val newMode = Orientation.fromUnionValue(orientation) |       val newMode = Orientation.fromUnionValue(orientation) | ||||||
|  |       Log.i(TAG, "Orientation set to: $newMode") | ||||||
|       view.orientation = newMode |       view.orientation = newMode | ||||||
|  |     } else { | ||||||
|  |       view.orientation = Orientation.PORTRAIT | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "codeScannerOptions") |   @ReactProp(name = "codeScannerOptions") | ||||||
|   fun setCodeScanner(view: CameraView, codeScannerOptions: ReadableMap) { |   fun setCodeScanner(view: CameraView, codeScannerOptions: ReadableMap?) { | ||||||
|     val newCodeScannerOptions = CodeScannerOptions(codeScannerOptions) |     if (codeScannerOptions != null) { | ||||||
|  |       val newCodeScannerOptions = CodeScannerOptions.fromJSValue(codeScannerOptions) | ||||||
|       view.codeScannerOptions = newCodeScannerOptions |       view.codeScannerOptions = newCodeScannerOptions | ||||||
|  |     } else { | ||||||
|  |       view.codeScannerOptions = null | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   companion object { |   companion object { | ||||||
|   | |||||||
| @@ -44,7 +44,7 @@ data class CameraConfiguration( | |||||||
|   // Output<T> types, those need to be comparable |   // Output<T> types, those need to be comparable | ||||||
|   data class CodeScanner(val codeTypes: List<CodeType>) |   data class CodeScanner(val codeTypes: List<CodeType>) | ||||||
|   data class Photo(val enableHdr: Boolean) |   data class Photo(val enableHdr: Boolean) | ||||||
|   data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean) |   data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean, val enableGpuBuffers: Boolean) | ||||||
|   data class Audio(val nothing: Unit) |   data class Audio(val nothing: Unit) | ||||||
|   data class Preview(val surface: Surface) |   data class Preview(val surface: Surface) | ||||||
|  |  | ||||||
| @@ -67,7 +67,7 @@ data class CameraConfiguration( | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   data class Difference( |   data class Difference( | ||||||
|     // Input Camera (cameraId, isActive) |     // Input Camera (cameraId) | ||||||
|     val deviceChanged: Boolean, |     val deviceChanged: Boolean, | ||||||
|     // Outputs & Session (Photo, Video, CodeScanner, HDR, Format) |     // Outputs & Session (Photo, Video, CodeScanner, HDR, Format) | ||||||
|     val outputsChanged: Boolean, |     val outputsChanged: Boolean, | ||||||
| @@ -75,14 +75,17 @@ data class CameraConfiguration( | |||||||
|     val sidePropsChanged: Boolean, |     val sidePropsChanged: Boolean, | ||||||
|     // (isActive) changed |     // (isActive) changed | ||||||
|     val isActiveChanged: Boolean |     val isActiveChanged: Boolean | ||||||
|   ) |   ) { | ||||||
|  |     val hasChanges: Boolean | ||||||
|  |       get() = deviceChanged || outputsChanged || sidePropsChanged || isActiveChanged | ||||||
|  |   } | ||||||
|  |  | ||||||
|   companion object { |   companion object { | ||||||
|     fun copyOf(other: CameraConfiguration?): CameraConfiguration = other?.copy() ?: CameraConfiguration() |     fun copyOf(other: CameraConfiguration?): CameraConfiguration = other?.copy() ?: CameraConfiguration() | ||||||
|  |  | ||||||
|     fun difference(left: CameraConfiguration?, right: CameraConfiguration): Difference { |     fun difference(left: CameraConfiguration?, right: CameraConfiguration): Difference { | ||||||
|       // input device |       // input device | ||||||
|       val deviceChanged = left?.cameraId != right.cameraId || left?.isActive != right.isActive |       val deviceChanged = left?.cameraId != right.cameraId | ||||||
|  |  | ||||||
|       // outputs |       // outputs | ||||||
|       val outputsChanged = deviceChanged || |       val outputsChanged = deviceChanged || | ||||||
| @@ -101,7 +104,7 @@ data class CameraConfiguration( | |||||||
|         left.videoStabilizationMode != right.videoStabilizationMode || |         left.videoStabilizationMode != right.videoStabilizationMode || | ||||||
|         left.exposure != right.exposure |         left.exposure != right.exposure | ||||||
|  |  | ||||||
|       val isActiveChanged = left?.isActive != right.isActive |       val isActiveChanged = sidePropsChanged || left?.isActive != right.isActive | ||||||
|  |  | ||||||
|       return Difference( |       return Difference( | ||||||
|         deviceChanged, |         deviceChanged, | ||||||
|   | |||||||
| @@ -1,17 +1,24 @@ | |||||||
| package com.mrousavy.camera.core | package com.mrousavy.camera.core | ||||||
|  |  | ||||||
|  | import android.content.res.Resources | ||||||
| import android.graphics.ImageFormat | import android.graphics.ImageFormat | ||||||
| import android.hardware.camera2.CameraCharacteristics | import android.hardware.camera2.CameraCharacteristics | ||||||
|  | import android.hardware.camera2.CameraExtensionCharacteristics | ||||||
| import android.hardware.camera2.CameraManager | import android.hardware.camera2.CameraManager | ||||||
| import android.hardware.camera2.CameraMetadata | import android.hardware.camera2.CameraMetadata | ||||||
| import android.os.Build | import android.os.Build | ||||||
|  | import android.util.Log | ||||||
| import android.util.Range | import android.util.Range | ||||||
| import android.util.Size | import android.util.Size | ||||||
|  | import android.util.SizeF | ||||||
|  | import android.view.SurfaceHolder | ||||||
| import com.facebook.react.bridge.Arguments | import com.facebook.react.bridge.Arguments | ||||||
| import com.facebook.react.bridge.ReadableArray | import com.facebook.react.bridge.ReadableArray | ||||||
| import com.facebook.react.bridge.ReadableMap | import com.facebook.react.bridge.ReadableMap | ||||||
|  | import com.mrousavy.camera.extensions.bigger | ||||||
| import com.mrousavy.camera.extensions.getPhotoSizes | import com.mrousavy.camera.extensions.getPhotoSizes | ||||||
| import com.mrousavy.camera.extensions.getVideoSizes | import com.mrousavy.camera.extensions.getVideoSizes | ||||||
|  | import com.mrousavy.camera.extensions.smaller | ||||||
| import com.mrousavy.camera.extensions.toJSValue | import com.mrousavy.camera.extensions.toJSValue | ||||||
| import com.mrousavy.camera.types.AutoFocusSystem | import com.mrousavy.camera.types.AutoFocusSystem | ||||||
| import com.mrousavy.camera.types.DeviceType | import com.mrousavy.camera.types.DeviceType | ||||||
| @@ -20,65 +27,127 @@ import com.mrousavy.camera.types.LensFacing | |||||||
| import com.mrousavy.camera.types.Orientation | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.PixelFormat | import com.mrousavy.camera.types.PixelFormat | ||||||
| import com.mrousavy.camera.types.VideoStabilizationMode | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
|  | import com.mrousavy.camera.utils.CamcorderProfileUtils | ||||||
| import kotlin.math.atan2 | import kotlin.math.atan2 | ||||||
| import kotlin.math.sqrt | import kotlin.math.sqrt | ||||||
|  |  | ||||||
| class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String) { | class CameraDeviceDetails(private val cameraManager: CameraManager, val cameraId: String) { | ||||||
|   val characteristics = cameraManager.getCameraCharacteristics(cameraId) |   companion object { | ||||||
|   val hardwareLevel = HardwareLevel.fromCameraCharacteristics(characteristics) |     private const val TAG = "CameraDeviceDetails" | ||||||
|   val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0) |  | ||||||
|   val extensions = getSupportedExtensions() |     fun getMaximumPreviewSize(): Size { | ||||||
|  |       // See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap | ||||||
|  |       // According to the Android Developer documentation, PREVIEW streams can have a resolution | ||||||
|  |       // of up to the phone's display's resolution, with a maximum of 1920x1080. | ||||||
|  |       val display1080p = Size(1920, 1080) | ||||||
|  |       val displaySize = Size( | ||||||
|  |         Resources.getSystem().displayMetrics.widthPixels, | ||||||
|  |         Resources.getSystem().displayMetrics.heightPixels | ||||||
|  |       ) | ||||||
|  |       val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller | ||||||
|  |  | ||||||
|  |       return if (isHighResScreen) display1080p else displaySize | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   val characteristics by lazy { cameraManager.getCameraCharacteristics(cameraId) } | ||||||
|  |   val hardwareLevel by lazy { HardwareLevel.fromCameraCharacteristics(characteristics) } | ||||||
|  |   val capabilities by lazy { characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0) } | ||||||
|  |   val extensions by lazy { getSupportedExtensions() } | ||||||
|  |  | ||||||
|   // device characteristics |   // device characteristics | ||||||
|   val isMultiCam = capabilities.contains(11) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA |   val isMultiCam by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) } | ||||||
|   val supportsDepthCapture = capabilities.contains(8) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT |   val supportsDepthCapture by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) } | ||||||
|   val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) |   val supportsRawCapture by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) } | ||||||
|   val supportsLowLightBoost = extensions.contains(4) // TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT |   val supportsLowLightBoost by lazy { | ||||||
|   val lensFacing = LensFacing.fromCameraCharacteristics(characteristics) |     extensions.contains(CameraExtensionCharacteristics.EXTENSION_NIGHT) && | ||||||
|   val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false |       modes.contains(CameraCharacteristics.CONTROL_MODE_USE_SCENE_MODE) | ||||||
|   val focalLengths = |   } | ||||||
|     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) |   val lensFacing by lazy { LensFacing.fromCameraCharacteristics(characteristics) } | ||||||
|       // 35mm is the film standard sensor size |   val hasFlash by lazy { characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false } | ||||||
|       ?: floatArrayOf(35f) |   val focalLengths by lazy { | ||||||
|   val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!! |     // 35mm is the film standard sensor size | ||||||
|   val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! |     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f) | ||||||
|   val minFocusDistance = getMinFocusDistanceCm() |   } | ||||||
|   val name = ( |   val sensorSize by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE) ?: SizeF(0f, 0f) } | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { |   val activeSize | ||||||
|       characteristics.get(CameraCharacteristics.INFO_VERSION) |     get() = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! | ||||||
|     } else { |   val sensorOrientation by lazy { | ||||||
|       null |     val degrees = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0 | ||||||
|  |     return@lazy Orientation.fromRotationDegrees(degrees) | ||||||
|  |   } | ||||||
|  |   val minFocusDistance by lazy { getMinFocusDistanceCm() } | ||||||
|  |   val name by lazy { | ||||||
|  |     val info = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION) else null | ||||||
|  |     return@lazy info ?: "$lensFacing ($cameraId)" | ||||||
|   } |   } | ||||||
|     ) ?: "$lensFacing ($cameraId)" |  | ||||||
|  |  | ||||||
|   // "formats" (all possible configurations for this device) |   // "formats" (all possible configurations for this device) | ||||||
|   val zoomRange = ( |   val maxDigitalZoom by lazy { characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f } | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { |   val zoomRange by lazy { | ||||||
|  |     val range = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { | ||||||
|       characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) |       characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) | ||||||
|     } else { |     } else { | ||||||
|       null |       null | ||||||
|     } |     } | ||||||
|     ) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f) |     return@lazy range ?: Range(1f, maxDigitalZoom) | ||||||
|   val physicalDevices = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && characteristics.physicalCameraIds.isNotEmpty()) { |   } | ||||||
|  |   val physicalDevices by lazy { | ||||||
|  |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && characteristics.physicalCameraIds.isNotEmpty()) { | ||||||
|       characteristics.physicalCameraIds |       characteristics.physicalCameraIds | ||||||
|     } else { |     } else { | ||||||
|       setOf(cameraId) |       setOf(cameraId) | ||||||
|     } |     } | ||||||
|   val minZoom = zoomRange.lower.toDouble() |   } | ||||||
|   val maxZoom = zoomRange.upper.toDouble() |   val minZoom by lazy { zoomRange.lower.toDouble() } | ||||||
|  |   val maxZoom by lazy { zoomRange.upper.toDouble() } | ||||||
|  |  | ||||||
|   val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! |   val cameraConfig by lazy { characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! } | ||||||
|   val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) |   val isoRange by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) } | ||||||
|   val exposureRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE) ?: Range(0, 0) |   val exposureRange by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE) ?: Range(0, 0) } | ||||||
|   val digitalStabilizationModes = |   val digitalStabilizationModes by lazy { | ||||||
|     characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0) |     characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0) | ||||||
|   val opticalStabilizationModes = |   } | ||||||
|  |   val opticalStabilizationModes by lazy { | ||||||
|     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0) |     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0) | ||||||
|   val supportsPhotoHdr = extensions.contains(3) // TODO: CameraExtensionCharacteristics.EXTENSION_HDR |   } | ||||||
|   val supportsVideoHdr = getHasVideoHdr() |   val supportsPhotoHdr by lazy { extensions.contains(CameraExtensionCharacteristics.EXTENSION_HDR) } | ||||||
|   val autoFocusSystem = getAutoFocusSystemMode() |   val supportsVideoHdr by lazy { getHasVideoHdr() } | ||||||
|  |   val autoFocusSystem by lazy { getAutoFocusSystemMode() } | ||||||
|  |  | ||||||
|  |   val supportsYuvProcessing by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING) } | ||||||
|  |   val supportsPrivateProcessing by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING) } | ||||||
|  |   val supportsZsl by lazy { supportsYuvProcessing || supportsPrivateProcessing } | ||||||
|  |  | ||||||
|  |   val isBackwardsCompatible by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) } | ||||||
|  |   val supportsSnapshotCapture by lazy { supportsSnapshotCapture() } | ||||||
|  |  | ||||||
|  |   val supportsFocusRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) ?: 0) > 0 } | ||||||
|  |   val supportsExposureRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0) > 0 } | ||||||
|  |   val supportsWhiteBalanceRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB) ?: 0) > 0 } | ||||||
|  |  | ||||||
|  |   val modes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |   val afModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |   val aeModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |   val awbModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |  | ||||||
|  |   val availableAberrationModes by lazy { | ||||||
|  |     characteristics.get(CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES) | ||||||
|  |       ?: intArrayOf() | ||||||
|  |   } | ||||||
|  |   val availableHotPixelModes by lazy { characteristics.get(CameraCharacteristics.HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES) ?: intArrayOf() } | ||||||
|  |   val availableEdgeModes by lazy { characteristics.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES) ?: intArrayOf() } | ||||||
|  |   val availableDistortionCorrectionModes by lazy { getAvailableDistortionCorrectionModesOrEmptyArray() } | ||||||
|  |   val availableShadingModes by lazy { characteristics.get(CameraCharacteristics.SHADING_AVAILABLE_MODES) ?: intArrayOf() } | ||||||
|  |   val availableToneMapModes by lazy { characteristics.get(CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES) ?: intArrayOf() } | ||||||
|  |   val availableNoiseReductionModes by lazy { | ||||||
|  |     characteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES) | ||||||
|  |       ?: intArrayOf() | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // TODO: Also add 10-bit YUV here? | ||||||
|   val videoFormat = ImageFormat.YUV_420_888 |   val videoFormat = ImageFormat.YUV_420_888 | ||||||
|  |   val photoFormat = ImageFormat.JPEG | ||||||
|  |  | ||||||
|   // get extensions (HDR, Night Mode, ..) |   // get extensions (HDR, Night Mode, ..) | ||||||
|   private fun getSupportedExtensions(): List<Int> = |   private fun getSupportedExtensions(): List<Int> = | ||||||
| @@ -89,6 +158,13 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|       emptyList() |       emptyList() | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |   private fun getAvailableDistortionCorrectionModesOrEmptyArray(): IntArray = | ||||||
|  |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { | ||||||
|  |       characteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES) ?: intArrayOf() | ||||||
|  |     } else { | ||||||
|  |       intArrayOf() | ||||||
|  |     } | ||||||
|  |  | ||||||
|   private fun getHasVideoHdr(): Boolean { |   private fun getHasVideoHdr(): Boolean { | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { | ||||||
|       if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) { |       if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) { | ||||||
| @@ -102,10 +178,19 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|   private fun getMinFocusDistanceCm(): Double { |   private fun getMinFocusDistanceCm(): Double { | ||||||
|     val distance = characteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) |     val distance = characteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) | ||||||
|     if (distance == null || distance == 0f) return 0.0 |     if (distance == null || distance == 0f) return 0.0 | ||||||
|  |     if (distance.isNaN() || distance.isInfinite()) return 0.0 | ||||||
|     // distance is in "diopters", meaning 1/meter. Convert to meters, then centi-meters |     // distance is in "diopters", meaning 1/meter. Convert to meters, then centi-meters | ||||||
|     return 1.0 / distance * 100.0 |     return 1.0 / distance * 100.0 | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |   @Suppress("RedundantIf") | ||||||
|  |   private fun supportsSnapshotCapture(): Boolean { | ||||||
|  |     // As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation: | ||||||
|  |     if (hardwareLevel == HardwareLevel.LEGACY) return false | ||||||
|  |     if (supportsDepthCapture && !isBackwardsCompatible) return false | ||||||
|  |     return true | ||||||
|  |   } | ||||||
|  |  | ||||||
|   private fun createStabilizationModes(): ReadableArray { |   private fun createStabilizationModes(): ReadableArray { | ||||||
|     val array = Arguments.createArray() |     val array = Arguments.createArray() | ||||||
|     digitalStabilizationModes.forEach { videoStabilizationMode -> |     digitalStabilizationModes.forEach { videoStabilizationMode -> | ||||||
| @@ -146,6 +231,9 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun getFieldOfView(focalLength: Float): Double { |   private fun getFieldOfView(focalLength: Float): Double { | ||||||
|  |     if ((sensorSize.width == 0f) || (sensorSize.height == 0f)) { | ||||||
|  |       return 0.0 | ||||||
|  |     } | ||||||
|     val sensorDiagonal = sqrt((sensorSize.width * sensorSize.width + sensorSize.height * sensorSize.height).toDouble()) |     val sensorDiagonal = sqrt((sensorSize.width * sensorSize.width + sensorSize.height * sensorSize.height).toDouble()) | ||||||
|     val fovRadians = 2.0 * atan2(sensorDiagonal, (2.0 * focalLength)) |     val fovRadians = 2.0 * atan2(sensorDiagonal, (2.0 * focalLength)) | ||||||
|     return Math.toDegrees(fovRadians) |     return Math.toDegrees(fovRadians) | ||||||
| @@ -156,18 +244,31 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|     return getFieldOfView(smallestFocalLength) |     return getFieldOfView(smallestFocalLength) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun getVideoSizes(): List<Size> = characteristics.getVideoSizes(cameraId, videoFormat) |   fun getVideoSizes(format: Int): List<Size> = characteristics.getVideoSizes(cameraId, format) | ||||||
|   private fun getPhotoSizes(): List<Size> = characteristics.getPhotoSizes(ImageFormat.JPEG) |   fun getPhotoSizes(): List<Size> = characteristics.getPhotoSizes(photoFormat) | ||||||
|  |   fun getPreviewSizes(): List<Size> { | ||||||
|  |     val maximumPreviewSize = getMaximumPreviewSize() | ||||||
|  |     return cameraConfig.getOutputSizes(SurfaceHolder::class.java) | ||||||
|  |       .filter { it.bigger <= maximumPreviewSize.bigger && it.smaller <= maximumPreviewSize.smaller } | ||||||
|  |   } | ||||||
|  |  | ||||||
|   private fun getFormats(): ReadableArray { |   private fun getFormats(): ReadableArray { | ||||||
|     val array = Arguments.createArray() |     val array = Arguments.createArray() | ||||||
|  |  | ||||||
|     val videoSizes = getVideoSizes() |     val videoSizes = getVideoSizes(videoFormat) | ||||||
|     val photoSizes = getPhotoSizes() |     val photoSizes = getPhotoSizes() | ||||||
|  |  | ||||||
|     videoSizes.forEach { videoSize -> |     videoSizes.forEach { videoSize -> | ||||||
|       val frameDuration = cameraConfig.getOutputMinFrameDuration(videoFormat, videoSize) |       val frameDuration = cameraConfig.getOutputMinFrameDuration(videoFormat, videoSize) | ||||||
|       val maxFps = (1.0 / (frameDuration.toDouble() / 1_000_000_000)).toInt() |       var maxFps = (1.0 / (frameDuration.toDouble() / 1_000_000_000)).toInt() | ||||||
|  |       val maxEncoderFps = CamcorderProfileUtils.getMaximumFps(cameraId, videoSize) | ||||||
|  |       if (maxEncoderFps != null && maxEncoderFps < maxFps) { | ||||||
|  |         Log.i( | ||||||
|  |           TAG, | ||||||
|  |           "Camera could do $maxFps FPS at $videoSize, but Media Encoder can only do $maxEncoderFps FPS. Clamping to $maxEncoderFps FPS..." | ||||||
|  |         ) | ||||||
|  |         maxFps = maxEncoderFps | ||||||
|  |       } | ||||||
|  |  | ||||||
|       photoSizes.forEach { photoSize -> |       photoSizes.forEach { photoSize -> | ||||||
|         val map = buildFormatMap(photoSize, videoSize, Range(1, maxFps)) |         val map = buildFormatMap(photoSize, videoSize, Range(1, maxFps)) | ||||||
| @@ -175,8 +276,6 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|       } |       } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // TODO: Add high-speed video ranges (high-fps / slow-motion) |  | ||||||
|  |  | ||||||
|     return array |     return array | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -223,14 +322,14 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|     map.putBoolean("isMultiCam", isMultiCam) |     map.putBoolean("isMultiCam", isMultiCam) | ||||||
|     map.putBoolean("supportsRawCapture", supportsRawCapture) |     map.putBoolean("supportsRawCapture", supportsRawCapture) | ||||||
|     map.putBoolean("supportsLowLightBoost", supportsLowLightBoost) |     map.putBoolean("supportsLowLightBoost", supportsLowLightBoost) | ||||||
|     map.putBoolean("supportsFocus", true) // I believe every device here supports focussing |     map.putBoolean("supportsFocus", supportsFocusRegions) | ||||||
|     map.putDouble("minZoom", minZoom) |     map.putDouble("minZoom", minZoom) | ||||||
|     map.putDouble("maxZoom", maxZoom) |     map.putDouble("maxZoom", maxZoom) | ||||||
|     map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android |     map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android | ||||||
|     map.putDouble("minExposure", exposureRange.lower.toDouble()) |     map.putDouble("minExposure", exposureRange.lower.toDouble()) | ||||||
|     map.putDouble("maxExposure", exposureRange.upper.toDouble()) |     map.putDouble("maxExposure", exposureRange.upper.toDouble()) | ||||||
|     map.putString("hardwareLevel", hardwareLevel.unionValue) |     map.putString("hardwareLevel", hardwareLevel.unionValue) | ||||||
|     map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue) |     map.putString("sensorOrientation", sensorOrientation.unionValue) | ||||||
|     map.putArray("formats", getFormats()) |     map.putArray("formats", getFormats()) | ||||||
|     return map |     return map | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -62,6 +62,8 @@ class FlashUnavailableError : | |||||||
|     "flash-unavailable", |     "flash-unavailable", | ||||||
|     "The Camera Device does not have a flash unit! Make sure you select a device where `device.hasFlash`/`device.hasTorch` is true." |     "The Camera Device does not have a flash unit! Make sure you select a device where `device.hasFlash`/`device.hasTorch` is true." | ||||||
|   ) |   ) | ||||||
|  | class FocusNotSupportedError : | ||||||
|  |   CameraError("device", "focus-not-supported", "The currently selected camera device does not support focusing!") | ||||||
|  |  | ||||||
| class CameraNotReadyError : | class CameraNotReadyError : | ||||||
|   CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") |   CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") | ||||||
| @@ -71,6 +73,8 @@ class CameraSessionCannotBeConfiguredError(cameraId: String) : | |||||||
|   CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera #$cameraId!") |   CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera #$cameraId!") | ||||||
| class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : | class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : | ||||||
|   CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error") |   CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error") | ||||||
|  | class NoOutputsError : | ||||||
|  |   CameraError("session", "no-outputs", "Cannot create a CameraCaptureSession without any outputs! (PREVIEW, PHOTO, VIDEO, ...)") | ||||||
|  |  | ||||||
| class PropRequiresFormatToBeNonNullError(propName: String) : | class PropRequiresFormatToBeNonNullError(propName: String) : | ||||||
|   CameraError("format", "format-required", "The prop \"$propName\" requires a format to be set, but format was null!") |   CameraError("format", "format-required", "The prop \"$propName\" requires a format to be set, but format was null!") | ||||||
| @@ -100,6 +104,8 @@ class PhotoNotEnabledError : | |||||||
|   CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.") |   CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.") | ||||||
| class CaptureAbortedError(wasImageCaptured: Boolean) : | class CaptureAbortedError(wasImageCaptured: Boolean) : | ||||||
|   CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") |   CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") | ||||||
|  | class FocusCanceledError : CameraError("capture", "focus-canceled", "The focus operation was canceled.") | ||||||
|  | class CaptureTimedOutError : CameraError("capture", "timed-out", "The image capture was aborted because it timed out.") | ||||||
| class UnknownCaptureError(wasImageCaptured: Boolean) : | class UnknownCaptureError(wasImageCaptured: Boolean) : | ||||||
|   CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") |   CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") | ||||||
| class RecorderError(name: String, extra: Int) : | class RecorderError(name: String, extra: Int) : | ||||||
| @@ -113,6 +119,16 @@ class RecordingInProgressError : | |||||||
|     "recording-in-progress", |     "recording-in-progress", | ||||||
|     "There is already an active video recording in progress! Did you call startRecording() twice?" |     "There is already an active video recording in progress! Did you call startRecording() twice?" | ||||||
|   ) |   ) | ||||||
|  | class FrameInvalidError : | ||||||
|  |   CameraError( | ||||||
|  |     "capture", | ||||||
|  |     "frame-invalid", | ||||||
|  |     "Trying to access an already closed Frame! " + | ||||||
|  |       "Are you trying to access the Image data outside of a Frame Processor's lifetime?\n" + | ||||||
|  |       "- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n" + | ||||||
|  |       "- If you want to do async processing, use `runAsync(...)` instead.\n" + | ||||||
|  |       "- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`" | ||||||
|  |   ) | ||||||
|  |  | ||||||
| class CodeTypeNotSupportedError(codeType: String) : | class CodeTypeNotSupportedError(codeType: String) : | ||||||
|   CameraError( |   CameraError( | ||||||
|   | |||||||
| @@ -5,50 +5,33 @@ import android.content.Context | |||||||
| import android.content.pm.PackageManager | import android.content.pm.PackageManager | ||||||
| import android.graphics.ImageFormat | import android.graphics.ImageFormat | ||||||
| import android.graphics.Point | import android.graphics.Point | ||||||
| import android.hardware.camera2.CameraCaptureSession |  | ||||||
| import android.hardware.camera2.CameraCharacteristics | import android.hardware.camera2.CameraCharacteristics | ||||||
| import android.hardware.camera2.CameraDevice |  | ||||||
| import android.hardware.camera2.CameraManager | import android.hardware.camera2.CameraManager | ||||||
| import android.hardware.camera2.CameraMetadata |  | ||||||
| import android.hardware.camera2.CaptureRequest |  | ||||||
| import android.hardware.camera2.CaptureResult | import android.hardware.camera2.CaptureResult | ||||||
| import android.hardware.camera2.TotalCaptureResult | import android.hardware.camera2.TotalCaptureResult | ||||||
| import android.hardware.camera2.params.MeteringRectangle |  | ||||||
| import android.media.Image | import android.media.Image | ||||||
| import android.media.ImageReader | import android.media.ImageReader | ||||||
| import android.os.Build |  | ||||||
| import android.util.Log | import android.util.Log | ||||||
| import android.util.Range |  | ||||||
| import android.util.Size | import android.util.Size | ||||||
| import android.view.Surface | import android.view.Surface | ||||||
| import android.view.SurfaceHolder | import android.view.SurfaceHolder | ||||||
| import androidx.core.content.ContextCompat | import androidx.core.content.ContextCompat | ||||||
| import com.google.mlkit.vision.barcode.common.Barcode | import com.google.mlkit.vision.barcode.common.Barcode | ||||||
|  | import com.mrousavy.camera.core.capture.RepeatingCaptureRequest | ||||||
| import com.mrousavy.camera.core.outputs.BarcodeScannerOutput | import com.mrousavy.camera.core.outputs.BarcodeScannerOutput | ||||||
| import com.mrousavy.camera.core.outputs.PhotoOutput | import com.mrousavy.camera.core.outputs.PhotoOutput | ||||||
| import com.mrousavy.camera.core.outputs.SurfaceOutput | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
| import com.mrousavy.camera.core.outputs.VideoPipelineOutput | import com.mrousavy.camera.core.outputs.VideoPipelineOutput | ||||||
| import com.mrousavy.camera.extensions.capture |  | ||||||
| import com.mrousavy.camera.extensions.closestToOrMax | import com.mrousavy.camera.extensions.closestToOrMax | ||||||
| import com.mrousavy.camera.extensions.createCaptureSession |  | ||||||
| import com.mrousavy.camera.extensions.createPhotoCaptureRequest |  | ||||||
| import com.mrousavy.camera.extensions.getPhotoSizes |  | ||||||
| import com.mrousavy.camera.extensions.getPreviewTargetSize |  | ||||||
| import com.mrousavy.camera.extensions.getVideoSizes |  | ||||||
| import com.mrousavy.camera.extensions.openCamera |  | ||||||
| import com.mrousavy.camera.extensions.setZoom |  | ||||||
| import com.mrousavy.camera.frameprocessor.Frame | import com.mrousavy.camera.frameprocessor.Frame | ||||||
| import com.mrousavy.camera.frameprocessor.FrameProcessor |  | ||||||
| import com.mrousavy.camera.types.Flash | import com.mrousavy.camera.types.Flash | ||||||
|  | import com.mrousavy.camera.types.LensFacing | ||||||
| import com.mrousavy.camera.types.Orientation | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.QualityPrioritization | import com.mrousavy.camera.types.QualityPrioritization | ||||||
| import com.mrousavy.camera.types.RecordVideoOptions | import com.mrousavy.camera.types.RecordVideoOptions | ||||||
| import com.mrousavy.camera.types.Torch |  | ||||||
| import com.mrousavy.camera.types.VideoStabilizationMode |  | ||||||
| import com.mrousavy.camera.utils.ImageFormatUtils | import com.mrousavy.camera.utils.ImageFormatUtils | ||||||
| import java.io.Closeable | import java.io.Closeable | ||||||
| import java.lang.IllegalStateException | import kotlin.coroutines.cancellation.CancellationException | ||||||
| import java.util.concurrent.CancellationException |  | ||||||
| import kotlinx.coroutines.CoroutineScope | import kotlinx.coroutines.CoroutineScope | ||||||
| import kotlinx.coroutines.launch | import kotlinx.coroutines.launch | ||||||
| import kotlinx.coroutines.runBlocking | import kotlinx.coroutines.runBlocking | ||||||
| @@ -57,8 +40,8 @@ import kotlinx.coroutines.sync.withLock | |||||||
| import java.io.File | import java.io.File | ||||||
|  |  | ||||||
| class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) : | class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) : | ||||||
|   CameraManager.AvailabilityCallback(), |   Closeable, | ||||||
|   Closeable { |   PersistentCameraCaptureSession.Callback { | ||||||
|   companion object { |   companion object { | ||||||
|     private const val TAG = "CameraSession" |     private const val TAG = "CameraSession" | ||||||
|   } |   } | ||||||
| @@ -67,14 +50,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|   private var configuration: CameraConfiguration? = null |   private var configuration: CameraConfiguration? = null | ||||||
|  |  | ||||||
|   // Camera State |   // Camera State | ||||||
|   private var cameraDevice: CameraDevice? = null |   private val captureSession = PersistentCameraCaptureSession(cameraManager, this) | ||||||
|     set(value) { |  | ||||||
|       field = value |  | ||||||
|       cameraDeviceDetails = if (value != null) CameraDeviceDetails(cameraManager, value.id) else null |  | ||||||
|     } |  | ||||||
|   private var cameraDeviceDetails: CameraDeviceDetails? = null |  | ||||||
|   private var captureSession: CameraCaptureSession? = null |  | ||||||
|   private var previewRequest: CaptureRequest.Builder? = null |  | ||||||
|   private var photoOutput: PhotoOutput? = null |   private var photoOutput: PhotoOutput? = null | ||||||
|   private var videoOutput: VideoPipelineOutput? = null |   private var videoOutput: VideoPipelineOutput? = null | ||||||
|   private var codeScannerOutput: BarcodeScannerOutput? = null |   private var codeScannerOutput: BarcodeScannerOutput? = null | ||||||
| @@ -102,11 +78,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       field = value |       field = value | ||||||
|       updateVideoOutputs() |       updateVideoOutputs() | ||||||
|     } |     } | ||||||
|   var frameProcessor: FrameProcessor? = null |  | ||||||
|     set(value) { |  | ||||||
|       field = value |  | ||||||
|       updateVideoOutputs() |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|   val orientation: Orientation |   val orientation: Orientation | ||||||
|     get() { |     get() { | ||||||
| @@ -116,14 +87,9 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       return Orientation.fromRotationDegrees(sensorRotation) |       return Orientation.fromRotationDegrees(sensorRotation) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   init { |  | ||||||
|     cameraManager.registerAvailabilityCallback(this, CameraQueues.cameraQueue.handler) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   override fun close() { |   override fun close() { | ||||||
|     Log.i(TAG, "Closing CameraSession...") |     Log.i(TAG, "Closing CameraSession...") | ||||||
|     isDestroyed = true |     isDestroyed = true | ||||||
|     cameraManager.unregisterAvailabilityCallback(this) |  | ||||||
|     runBlocking { |     runBlocking { | ||||||
|       mutex.withLock { |       mutex.withLock { | ||||||
|         destroy() |         destroy() | ||||||
| @@ -133,18 +99,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     Log.i(TAG, "CameraSession closed!") |     Log.i(TAG, "CameraSession closed!") | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun onCameraAvailable(cameraId: String) { |  | ||||||
|     super.onCameraAvailable(cameraId) |  | ||||||
|     if (this.configuration?.cameraId == cameraId && cameraDevice == null && configuration?.isActive == true) { |  | ||||||
|       Log.i(TAG, "Camera #$cameraId is now available again, trying to re-open it now...") |  | ||||||
|       coroutineScope.launch { |  | ||||||
|         configure { |  | ||||||
|           // re-open CameraDevice if needed |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   suspend fun configure(lambda: (configuration: CameraConfiguration) -> Unit) { |   suspend fun configure(lambda: (configuration: CameraConfiguration) -> Unit) { | ||||||
|     Log.i(TAG, "configure { ... }: Waiting for lock...") |     Log.i(TAG, "configure { ... }: Waiting for lock...") | ||||||
|  |  | ||||||
| @@ -153,6 +107,12 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       val config = CameraConfiguration.copyOf(this.configuration) |       val config = CameraConfiguration.copyOf(this.configuration) | ||||||
|       lambda(config) |       lambda(config) | ||||||
|       val diff = CameraConfiguration.difference(this.configuration, config) |       val diff = CameraConfiguration.difference(this.configuration, config) | ||||||
|  |       this.configuration = config | ||||||
|  |  | ||||||
|  |       if (!diff.hasChanges) { | ||||||
|  |         Log.i(TAG, "Nothing changed, aborting configure { ... }") | ||||||
|  |         return@withLock | ||||||
|  |       } | ||||||
|  |  | ||||||
|       if (isDestroyed) { |       if (isDestroyed) { | ||||||
|         Log.i(TAG, "CameraSession is already destroyed. Skipping configure { ... }") |         Log.i(TAG, "CameraSession is already destroyed. Skipping configure { ... }") | ||||||
| @@ -162,29 +122,11 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       Log.i(TAG, "configure { ... }: Updating CameraSession Configuration... $diff") |       Log.i(TAG, "configure { ... }: Updating CameraSession Configuration... $diff") | ||||||
|  |  | ||||||
|       try { |       try { | ||||||
|         val needsRebuild = cameraDevice == null || captureSession == null |         captureSession.withConfiguration { | ||||||
|         if (needsRebuild) { |  | ||||||
|           Log.i(TAG, "Need to rebuild CameraDevice and CameraCaptureSession...") |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         // Since cameraDevice and captureSession are OS resources, we have three possible paths here: |  | ||||||
|         if (needsRebuild) { |  | ||||||
|           if (config.isActive) { |  | ||||||
|             // A: The Camera has been torn down by the OS and we want it to be active - rebuild everything |  | ||||||
|             Log.i(TAG, "Need to rebuild CameraDevice and CameraCaptureSession...") |  | ||||||
|             configureCameraDevice(config) |  | ||||||
|             configureOutputs(config) |  | ||||||
|             configureCaptureRequest(config) |  | ||||||
|           } else { |  | ||||||
|             // B: The Camera has been torn down by the OS but it's currently in the background - ignore this |  | ||||||
|             Log.i(TAG, "CameraDevice and CameraCaptureSession is torn down but Camera is not active, skipping update...") |  | ||||||
|           } |  | ||||||
|         } else { |  | ||||||
|           // C: The Camera has not been torn down and we just want to update some props - update incrementally |  | ||||||
|           // Build up session or update any props |           // Build up session or update any props | ||||||
|           if (diff.deviceChanged) { |           if (diff.deviceChanged) { | ||||||
|             // 1. cameraId changed, open device |             // 1. cameraId changed, open device | ||||||
|             configureCameraDevice(config) |             configureInput(config) | ||||||
|           } |           } | ||||||
|           if (diff.outputsChanged) { |           if (diff.outputsChanged) { | ||||||
|             // 2. outputs changed, build new session |             // 2. outputs changed, build new session | ||||||
| @@ -194,10 +136,18 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|             // 3. zoom etc changed, update repeating request |             // 3. zoom etc changed, update repeating request | ||||||
|             configureCaptureRequest(config) |             configureCaptureRequest(config) | ||||||
|           } |           } | ||||||
|  |           if (diff.isActiveChanged) { | ||||||
|  |             // 4. Either start or stop the session | ||||||
|  |             val isActive = config.isActive && config.preview.isEnabled | ||||||
|  |             captureSession.setIsActive(isActive) | ||||||
|  |           } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         Log.i(TAG, "Successfully updated CameraSession Configuration! isActive: ${config.isActive}") |         Log.i( | ||||||
|         this.configuration = config |           TAG, | ||||||
|  |           "configure { ... }: Completed CameraSession Configuration! (isActive: ${config.isActive}, isRunning: ${captureSession.isRunning})" | ||||||
|  |         ) | ||||||
|  |         isRunning = captureSession.isRunning | ||||||
|  |  | ||||||
|         // Notify about Camera initialization |         // Notify about Camera initialization | ||||||
|         if (diff.deviceChanged) { |         if (diff.deviceChanged) { | ||||||
| @@ -212,8 +162,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|  |  | ||||||
|   private fun destroy() { |   private fun destroy() { | ||||||
|     Log.i(TAG, "Destroying session..") |     Log.i(TAG, "Destroying session..") | ||||||
|     cameraDevice?.close() |     captureSession.close() | ||||||
|     cameraDevice = null |  | ||||||
|  |  | ||||||
|     photoOutput?.close() |     photoOutput?.close() | ||||||
|     photoOutput = null |     photoOutput = null | ||||||
| @@ -269,66 +218,20 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     Log.i(TAG, "Preview Output destroyed!") |     Log.i(TAG, "Preview Output destroyed!") | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |   private fun configureInput(configuration: CameraConfiguration) { | ||||||
|    * Set up the `CameraDevice` (`cameraId`) |     Log.i(TAG, "Configuring inputs for CameraSession...") | ||||||
|    */ |  | ||||||
|   private suspend fun configureCameraDevice(configuration: CameraConfiguration) { |  | ||||||
|     if (!configuration.isActive) { |  | ||||||
|       // If isActive=false, we don't care if the device is opened or closed. |  | ||||||
|       // Android OS can close the CameraDevice if it needs it, otherwise we keep it warm. |  | ||||||
|       Log.i(TAG, "isActive is false, skipping CameraDevice configuration.") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     if (cameraDevice != null) { |  | ||||||
|       // Close existing device |  | ||||||
|       Log.i(TAG, "Closing previous Camera #${cameraDevice?.id}...") |  | ||||||
|       cameraDevice?.close() |  | ||||||
|       cameraDevice = null |  | ||||||
|     } |  | ||||||
|     isRunning = false |  | ||||||
|  |  | ||||||
|     // Check Camera Permission |  | ||||||
|     val cameraPermission = ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) |  | ||||||
|     if (cameraPermission != PackageManager.PERMISSION_GRANTED) throw CameraPermissionError() |  | ||||||
|  |  | ||||||
|     // Open new device |  | ||||||
|     val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() |     val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() | ||||||
|     Log.i(TAG, "Configuring Camera #$cameraId...") |     val status = ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) | ||||||
|     cameraDevice = cameraManager.openCamera(cameraId, { device, error -> |     if (status != PackageManager.PERMISSION_GRANTED) throw CameraPermissionError() | ||||||
|       if (cameraDevice != device) { |  | ||||||
|         // a previous device has been disconnected, but we already have a new one. |  | ||||||
|         // this is just normal behavior |  | ||||||
|         return@openCamera |  | ||||||
|       } |  | ||||||
|  |  | ||||||
|       this.cameraDevice = null |  | ||||||
|     isRunning = false |     isRunning = false | ||||||
|  |     captureSession.setInput(cameraId) | ||||||
|       if (error != null) { |  | ||||||
|         Log.e(TAG, "Camera #${device.id} has been unexpectedly disconnected!", error) |  | ||||||
|         callback.onError(error) |  | ||||||
|       } else { |  | ||||||
|         Log.i(TAG, "Camera #${device.id} has been gracefully disconnected!") |  | ||||||
|       } |  | ||||||
|     }, CameraQueues.cameraQueue) |  | ||||||
|  |  | ||||||
|     Log.i(TAG, "Successfully configured Camera #$cameraId!") |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |   /** | ||||||
|    * Set up the `CaptureSession` with all outputs (preview, photo, video, codeScanner) and their HDR/Format settings. |    * Set up the `CaptureSession` with all outputs (preview, photo, video, codeScanner) and their HDR/Format settings. | ||||||
|    */ |    */ | ||||||
|   private suspend fun configureOutputs(configuration: CameraConfiguration) { |   private suspend fun configureOutputs(configuration: CameraConfiguration) { | ||||||
|     if (!configuration.isActive) { |     val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() | ||||||
|       Log.i(TAG, "isActive is false, skipping CameraCaptureSession configuration.") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|     val cameraDevice = cameraDevice |  | ||||||
|     if (cameraDevice == null) { |  | ||||||
|       Log.i(TAG, "CameraSession hasn't configured a CameraDevice, skipping session configuration...") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Destroy previous outputs |     // Destroy previous outputs | ||||||
|     Log.i(TAG, "Destroying previous outputs...") |     Log.i(TAG, "Destroying previous outputs...") | ||||||
| @@ -340,20 +243,20 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     codeScannerOutput = null |     codeScannerOutput = null | ||||||
|     isRunning = false |     isRunning = false | ||||||
|  |  | ||||||
|     val characteristics = cameraManager.getCameraCharacteristics(cameraDevice.id) |     val deviceDetails = CameraDeviceDetails(cameraManager, cameraId) | ||||||
|     val format = configuration.format |     val format = configuration.format | ||||||
|  |  | ||||||
|     Log.i(TAG, "Creating outputs for Camera #${cameraDevice.id}...") |     Log.i(TAG, "Creating outputs for Camera #$cameraId...") | ||||||
|  |  | ||||||
|     val isSelfie = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT |     val isSelfie = deviceDetails.lensFacing == LensFacing.FRONT | ||||||
|  |  | ||||||
|     val outputs = mutableListOf<SurfaceOutput>() |     val outputs = mutableListOf<SurfaceOutput>() | ||||||
|  |  | ||||||
|     // Photo Output |     // Photo Output | ||||||
|     val photo = configuration.photo as? CameraConfiguration.Output.Enabled<CameraConfiguration.Photo> |     val photo = configuration.photo as? CameraConfiguration.Output.Enabled<CameraConfiguration.Photo> | ||||||
|     if (photo != null) { |     if (photo != null) { | ||||||
|       val imageFormat = ImageFormat.JPEG |       val imageFormat = deviceDetails.photoFormat | ||||||
|       val sizes = characteristics.getPhotoSizes(imageFormat) |       val sizes = deviceDetails.getPhotoSizes() | ||||||
|       val size = sizes.closestToOrMax(format?.photoSize) |       val size = sizes.closestToOrMax(format?.photoSize) | ||||||
|       val maxImages = 10 |       val maxImages = 10 | ||||||
|  |  | ||||||
| @@ -373,7 +276,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     val video = configuration.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> |     val video = configuration.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> | ||||||
|     if (video != null) { |     if (video != null) { | ||||||
|       val imageFormat = video.config.pixelFormat.toImageFormat() |       val imageFormat = video.config.pixelFormat.toImageFormat() | ||||||
|       val sizes = characteristics.getVideoSizes(cameraDevice.id, imageFormat) |       val sizes = deviceDetails.getVideoSizes(imageFormat) | ||||||
|       val size = sizes.closestToOrMax(format?.videoSize) |       val size = sizes.closestToOrMax(format?.videoSize) | ||||||
|  |  | ||||||
|       Log.i(TAG, "Adding ${size.width}x${size.height} Video Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") |       Log.i(TAG, "Adding ${size.width}x${size.height} Video Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") | ||||||
| @@ -383,6 +286,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|         video.config.pixelFormat, |         video.config.pixelFormat, | ||||||
|         isSelfie, |         isSelfie, | ||||||
|         video.config.enableFrameProcessor, |         video.config.enableFrameProcessor, | ||||||
|  |         video.config.enableGpuBuffers, | ||||||
|         callback |         callback | ||||||
|       ) |       ) | ||||||
|       val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr) |       val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr) | ||||||
| @@ -395,7 +299,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     if (preview != null) { |     if (preview != null) { | ||||||
|       // Compute Preview Size based on chosen video size |       // Compute Preview Size based on chosen video size | ||||||
|       val videoSize = videoOutput?.size ?: format?.videoSize |       val videoSize = videoOutput?.size ?: format?.videoSize | ||||||
|       val size = characteristics.getPreviewTargetSize(videoSize) |       val sizes = deviceDetails.getPreviewSizes() | ||||||
|  |       val size = sizes.closestToOrMax(videoSize) | ||||||
|  |  | ||||||
|       val enableHdr = video?.config?.enableHdr ?: false |       val enableHdr = video?.config?.enableHdr ?: false | ||||||
|  |  | ||||||
| @@ -407,8 +312,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|         enableHdr |         enableHdr | ||||||
|       ) |       ) | ||||||
|       outputs.add(output) |       outputs.add(output) | ||||||
|       // Size is usually landscape, so we flip it here |  | ||||||
|       previewView?.size = Size(size.height, size.width) |       previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // CodeScanner Output |     // CodeScanner Output | ||||||
| @@ -421,7 +326,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       } |       } | ||||||
|  |  | ||||||
|       val imageFormat = ImageFormat.YUV_420_888 |       val imageFormat = ImageFormat.YUV_420_888 | ||||||
|       val sizes = characteristics.getVideoSizes(cameraDevice.id, imageFormat) |       val sizes = deviceDetails.getVideoSizes(imageFormat) | ||||||
|       val size = sizes.closestToOrMax(Size(1280, 720)) |       val size = sizes.closestToOrMax(Size(1280, 720)) | ||||||
|  |  | ||||||
|       Log.i(TAG, "Adding ${size.width}x${size.height} CodeScanner Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") |       Log.i(TAG, "Adding ${size.width}x${size.height} CodeScanner Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") | ||||||
| @@ -432,175 +337,63 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     // Create session |     // Create session | ||||||
|     captureSession = cameraDevice.createCaptureSession(cameraManager, outputs, { session -> |     captureSession.setOutputs(outputs) | ||||||
|       if (this.captureSession != session) { |  | ||||||
|         // a previous session has been closed, but we already have a new one. |  | ||||||
|         // this is just normal behavior |  | ||||||
|         return@createCaptureSession |  | ||||||
|       } |  | ||||||
|  |  | ||||||
|       // onClosed |     Log.i(TAG, "Successfully configured Session with ${outputs.size} outputs for Camera #$cameraId!") | ||||||
|       this.captureSession = null |  | ||||||
|       isRunning = false |  | ||||||
|  |  | ||||||
|       Log.i(TAG, "Camera Session $session has been closed.") |  | ||||||
|     }, CameraQueues.cameraQueue) |  | ||||||
|  |  | ||||||
|     Log.i(TAG, "Successfully configured Session with ${outputs.size} outputs for Camera #${cameraDevice.id}!") |  | ||||||
|  |  | ||||||
|     // Update Frame Processor and RecordingSession for newly changed output |     // Update Frame Processor and RecordingSession for newly changed output | ||||||
|     updateVideoOutputs() |     updateVideoOutputs() | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun createRepeatingRequest(device: CameraDevice, targets: List<Surface>, config: CameraConfiguration): CaptureRequest { |  | ||||||
|     val deviceDetails = cameraDeviceDetails ?: CameraDeviceDetails(cameraManager, device.id) |  | ||||||
|  |  | ||||||
|     val template = if (config.video.isEnabled) CameraDevice.TEMPLATE_RECORD else CameraDevice.TEMPLATE_PREVIEW |  | ||||||
|     val captureRequest = device.createCaptureRequest(template) |  | ||||||
|  |  | ||||||
|     targets.forEach { t -> captureRequest.addTarget(t) } |  | ||||||
|  |  | ||||||
|     val format = config.format |  | ||||||
|  |  | ||||||
|     // Set FPS |  | ||||||
|     val fps = config.fps |  | ||||||
|     if (fps != null) { |  | ||||||
|       if (format == null) throw PropRequiresFormatToBeNonNullError("fps") |  | ||||||
|       if (format.maxFps < fps) throw InvalidFpsError(fps) |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set Video Stabilization |  | ||||||
|     if (config.videoStabilizationMode != VideoStabilizationMode.OFF) { |  | ||||||
|       if (format == null) throw PropRequiresFormatToBeNonNullError("videoStabilizationMode") |  | ||||||
|       if (!format.videoStabilizationModes.contains( |  | ||||||
|           config.videoStabilizationMode |  | ||||||
|         ) |  | ||||||
|       ) { |  | ||||||
|         throw InvalidVideoStabilizationMode(config.videoStabilizationMode) |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|     when (config.videoStabilizationMode) { |  | ||||||
|       VideoStabilizationMode.OFF -> { |  | ||||||
|         // do nothing |  | ||||||
|       } |  | ||||||
|       VideoStabilizationMode.STANDARD -> { |  | ||||||
|         val mode = if (Build.VERSION.SDK_INT >= |  | ||||||
|           Build.VERSION_CODES.TIRAMISU |  | ||||||
|         ) { |  | ||||||
|           CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION |  | ||||||
|         } else { |  | ||||||
|           CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON |  | ||||||
|         } |  | ||||||
|         captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode) |  | ||||||
|       } |  | ||||||
|       VideoStabilizationMode.CINEMATIC, VideoStabilizationMode.CINEMATIC_EXTENDED -> { |  | ||||||
|         captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set HDR |  | ||||||
|     val video = config.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> |  | ||||||
|     val videoHdr = video?.config?.enableHdr |  | ||||||
|     if (videoHdr == true) { |  | ||||||
|       if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr") |  | ||||||
|       if (!format.supportsVideoHdr) throw InvalidVideoHdrError() |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) |  | ||||||
|     } else if (config.enableLowLightBoost) { |  | ||||||
|       if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError() |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set Exposure Bias |  | ||||||
|     val exposure = config.exposure?.toInt() |  | ||||||
|     if (exposure != null) { |  | ||||||
|       val clamped = deviceDetails.exposureRange.clamp(exposure) |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, clamped) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set Zoom |  | ||||||
|     // TODO: Cache camera characteristics? Check perf. |  | ||||||
|     val cameraCharacteristics = cameraManager.getCameraCharacteristics(device.id) |  | ||||||
|     captureRequest.setZoom(config.zoom, cameraCharacteristics) |  | ||||||
|  |  | ||||||
|     // Set Torch |  | ||||||
|     if (config.torch == Torch.ON) { |  | ||||||
|       if (!deviceDetails.hasFlash) throw FlashUnavailableError() |  | ||||||
|       captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Start repeating request if the Camera is active |  | ||||||
|     return captureRequest.build() |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   private fun configureCaptureRequest(config: CameraConfiguration) { |   private fun configureCaptureRequest(config: CameraConfiguration) { | ||||||
|     val captureSession = captureSession |     val video = config.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> | ||||||
|  |     val enableVideo = video != null | ||||||
|  |     val enableVideoHdr = video?.config?.enableHdr == true | ||||||
|  |  | ||||||
|     if (!config.isActive) { |     captureSession.setRepeatingRequest( | ||||||
|       isRunning = false |       RepeatingCaptureRequest( | ||||||
|       try { |         enableVideo, | ||||||
|         captureSession?.stopRepeating() |         config.torch, | ||||||
|       } catch (e: IllegalStateException) { |         config.fps, | ||||||
|         // ignore - captureSession is already closed. |         config.videoStabilizationMode, | ||||||
|       } |         enableVideoHdr, | ||||||
|       return |         config.enableLowLightBoost, | ||||||
|     } |         config.exposure, | ||||||
|     if (captureSession == null) { |         config.zoom, | ||||||
|       Log.i(TAG, "CameraSession hasn't configured the capture session, skipping CaptureRequest...") |         config.format | ||||||
|       return |       ) | ||||||
|     } |     ) | ||||||
|  |  | ||||||
|     val preview = config.preview as? CameraConfiguration.Output.Enabled<CameraConfiguration.Preview> |  | ||||||
|     val previewSurface = preview?.config?.surface |  | ||||||
|     val targets = listOfNotNull(previewSurface, videoOutput?.surface, codeScannerOutput?.surface) |  | ||||||
|     if (targets.isEmpty()) { |  | ||||||
|       Log.i(TAG, "CameraSession has no repeating outputs (Preview, Video, CodeScanner), skipping CaptureRequest...") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     val request = createRepeatingRequest(captureSession.device, targets, config) |  | ||||||
|     captureSession.setRepeatingRequest(request, null, null) |  | ||||||
|     isRunning = true |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   suspend fun takePhoto( |   suspend fun takePhoto( | ||||||
|     qualityPrioritization: QualityPrioritization, |     qualityPrioritization: QualityPrioritization, | ||||||
|     flashMode: Flash, |     flash: Flash, | ||||||
|     enableShutterSound: Boolean, |     enableShutterSound: Boolean, | ||||||
|     enableRedEyeReduction: Boolean, |  | ||||||
|     enableAutoStabilization: Boolean, |     enableAutoStabilization: Boolean, | ||||||
|  |     enablePrecapture: Boolean, | ||||||
|     outputOrientation: Orientation |     outputOrientation: Orientation | ||||||
|   ): CapturedPhoto { |   ): CapturedPhoto { | ||||||
|     val captureSession = captureSession ?: throw CameraNotReadyError() |  | ||||||
|     val photoOutput = photoOutput ?: throw PhotoNotEnabledError() |     val photoOutput = photoOutput ?: throw PhotoNotEnabledError() | ||||||
|  |  | ||||||
|     Log.i(TAG, "Photo capture 0/3 - preparing capture request (${photoOutput.size.width}x${photoOutput.size.height})...") |     Log.i(TAG, "Photo capture 1/3 - capturing ${photoOutput.size.width}x${photoOutput.size.height} image...") | ||||||
|  |     val result = captureSession.capture( | ||||||
|     val zoom = configuration?.zoom ?: 1f |  | ||||||
|  |  | ||||||
|     val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) |  | ||||||
|     val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics) |  | ||||||
|     val captureRequest = captureSession.device.createPhotoCaptureRequest( |  | ||||||
|       cameraManager, |  | ||||||
|       photoOutput.surface, |  | ||||||
|       zoom, |  | ||||||
|       qualityPrioritization, |       qualityPrioritization, | ||||||
|       flashMode, |       flash, | ||||||
|       enableRedEyeReduction, |  | ||||||
|       enableAutoStabilization, |       enableAutoStabilization, | ||||||
|       photoOutput.enableHdr, |       photoOutput.enableHdr, | ||||||
|       orientation |       outputOrientation, | ||||||
|  |       enableShutterSound, | ||||||
|  |       enablePrecapture | ||||||
|     ) |     ) | ||||||
|     Log.i(TAG, "Photo capture 1/3 - starting capture...") |  | ||||||
|     val result = captureSession.capture(captureRequest, enableShutterSound) |  | ||||||
|     val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! |  | ||||||
|     Log.i(TAG, "Photo capture 2/3 complete - received metadata with timestamp $timestamp") |  | ||||||
|     try { |     try { | ||||||
|  |       val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! | ||||||
|  |       Log.i(TAG, "Photo capture 2/3 - waiting for image with timestamp $timestamp now...") | ||||||
|       val image = photoOutputSynchronizer.await(timestamp) |       val image = photoOutputSynchronizer.await(timestamp) | ||||||
|  |  | ||||||
|       val isMirrored = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT |       Log.i(TAG, "Photo capture 3/3 - received ${image.width} x ${image.height} image, preparing result...") | ||||||
|  |       val deviceDetails = captureSession.getActiveDeviceDetails() | ||||||
|       Log.i(TAG, "Photo capture 3/3 complete - received ${image.width} x ${image.height} image.") |       val isMirrored = deviceDetails?.lensFacing == LensFacing.FRONT | ||||||
|       return CapturedPhoto(image, result, orientation, isMirrored, image.format) |       return CapturedPhoto(image, result, orientation, isMirrored, image.format) | ||||||
|     } catch (e: CancellationException) { |     } catch (e: CancellationException) { | ||||||
|       throw CaptureAbortedError(false) |       throw CaptureAbortedError(false) | ||||||
| @@ -628,13 +421,13 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     mutex.withLock { |     mutex.withLock { | ||||||
|       if (recording != null) throw RecordingInProgressError() |       if (recording != null) throw RecordingInProgressError() | ||||||
|       val videoOutput = videoOutput ?: throw VideoNotEnabledError() |       val videoOutput = videoOutput ?: throw VideoNotEnabledError() | ||||||
|       val cameraDevice = cameraDevice ?: throw CameraNotReadyError() |       val cameraId = configuration?.cameraId ?: throw NoCameraDeviceError() | ||||||
|  |  | ||||||
|       val fps = configuration?.fps ?: 30 |       val fps = configuration?.fps ?: 30 | ||||||
|  |  | ||||||
|       val recording = RecordingSession( |       val recording = RecordingSession( | ||||||
|         context, |         context, | ||||||
|         cameraDevice.id, |         cameraId, | ||||||
|         videoOutput.size, |         videoOutput.size, | ||||||
|         enableAudio, |         enableAudio, | ||||||
|         fps, |         fps, | ||||||
| @@ -674,40 +467,16 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   suspend fun focus(x: Int, y: Int): Unit = throw NotImplementedError("focus() is not yet implemented!") |   override fun onError(error: Throwable) { | ||||||
|  |     callback.onError(error) | ||||||
|   private suspend fun focus(point: Point) { |  | ||||||
|     mutex.withLock { |  | ||||||
|       // TODO: Fix this method |  | ||||||
|       val captureSession = captureSession ?: throw CameraNotReadyError() |  | ||||||
|       val request = previewRequest ?: throw CameraNotReadyError() |  | ||||||
|  |  | ||||||
|       val weight = MeteringRectangle.METERING_WEIGHT_MAX - 1 |  | ||||||
|       val focusAreaTouch = MeteringRectangle(point, Size(150, 150), weight) |  | ||||||
|  |  | ||||||
|       // Quickly pause preview |  | ||||||
|       captureSession.stopRepeating() |  | ||||||
|  |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL) |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF) |  | ||||||
|       captureSession.capture(request.build(), null, null) |  | ||||||
|  |  | ||||||
|       // Add AF trigger with focus region |  | ||||||
|       val characteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) |  | ||||||
|       val maxSupportedFocusRegions = characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0 |  | ||||||
|       if (maxSupportedFocusRegions >= 1) { |  | ||||||
|         request.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(focusAreaTouch)) |  | ||||||
|   } |   } | ||||||
|       request.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO) |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START) |  | ||||||
|  |  | ||||||
|       captureSession.capture(request.build(), false) |   suspend fun focus(x: Int, y: Int) { | ||||||
|  |     val previewView = previewView ?: throw CameraNotReadyError() | ||||||
|  |     val deviceDetails = captureSession.getActiveDeviceDetails() ?: throw CameraNotReadyError() | ||||||
|  |  | ||||||
|       // Resume preview |     val cameraPoint = previewView.convertLayerPointToCameraCoordinates(Point(x, y), deviceDetails) | ||||||
|       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE) |     captureSession.focus(cameraPoint) | ||||||
|       captureSession.setRepeatingRequest(request.build(), null, null) |  | ||||||
|     } |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   data class CapturedPhoto( |   data class CapturedPhoto( | ||||||
|   | |||||||
| @@ -83,7 +83,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   // Muxer specific |   // Muxer specific | ||||||
|   private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat) { |   private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat, val callbacks: CameraSession.Callback,) { | ||||||
|     val videoTrack: Int = muxer.addTrack(encodedFormat) |     val videoTrack: Int = muxer.addTrack(encodedFormat) | ||||||
|     val startTimeUs: Long = startTimeUs |     val startTimeUs: Long = startTimeUs | ||||||
|  |  | ||||||
| @@ -95,16 +95,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|     fun finish() { |     fun finish() { | ||||||
|       muxer.stop() |       muxer.stop() | ||||||
|       muxer.release() |       muxer.release() | ||||||
|  |       callbacks.onVideoChunkReady(filepath, chunkIndex) | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private var muxerContext: MuxerContext? = null |   private var muxerContext: MuxerContext? = null | ||||||
|  |  | ||||||
|   private fun createNextMuxer(bufferInfo: BufferInfo) { |   private fun createNextMuxer(bufferInfo: BufferInfo) { | ||||||
|     muxerContext?.let { |     muxerContext?.finish() | ||||||
|       it.finish() |  | ||||||
|       this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex) |  | ||||||
|     } |  | ||||||
|     chunkIndex++ |     chunkIndex++ | ||||||
|  |  | ||||||
|     val newFileName = "$chunkIndex.mp4" |     val newFileName = "$chunkIndex.mp4" | ||||||
| @@ -116,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|     ) |     ) | ||||||
|     muxer.setOrientationHint(orientationHint) |     muxer.setOrientationHint(orientationHint) | ||||||
|     muxerContext = MuxerContext( |     muxerContext = MuxerContext( | ||||||
|       muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!! |         muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks | ||||||
|     ) |     ) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -39,7 +39,7 @@ class CodeScannerPipeline( | |||||||
|     var isBusy = false |     var isBusy = false | ||||||
|     imageReader = ImageReader.newInstance(size.width, size.height, format, MAX_IMAGES) |     imageReader = ImageReader.newInstance(size.width, size.height, format, MAX_IMAGES) | ||||||
|     imageReader.setOnImageAvailableListener({ reader -> |     imageReader.setOnImageAvailableListener({ reader -> | ||||||
|       val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener |       val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener | ||||||
|  |  | ||||||
|       if (isBusy) { |       if (isBusy) { | ||||||
|         // We're currently executing on a previous Frame, so we skip this one. |         // We're currently executing on a previous Frame, so we skip this one. | ||||||
|   | |||||||
| @@ -0,0 +1,378 @@ | |||||||
|  | package com.mrousavy.camera.core | ||||||
|  |  | ||||||
|  | import android.graphics.Point | ||||||
|  | import android.hardware.camera2.CameraAccessException | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CameraManager | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.hardware.camera2.TotalCaptureResult | ||||||
|  | import android.util.Log | ||||||
|  | import com.mrousavy.camera.core.capture.PhotoCaptureRequest | ||||||
|  | import com.mrousavy.camera.core.capture.RepeatingCaptureRequest | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.extensions.PrecaptureOptions | ||||||
|  | import com.mrousavy.camera.extensions.PrecaptureTrigger | ||||||
|  | import com.mrousavy.camera.extensions.capture | ||||||
|  | import com.mrousavy.camera.extensions.createCaptureSession | ||||||
|  | import com.mrousavy.camera.extensions.isValid | ||||||
|  | import com.mrousavy.camera.extensions.openCamera | ||||||
|  | import com.mrousavy.camera.extensions.precapture | ||||||
|  | import com.mrousavy.camera.extensions.tryAbortCaptures | ||||||
|  | import com.mrousavy.camera.extensions.tryStopRepeating | ||||||
|  | import com.mrousavy.camera.types.Flash | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
|  | import com.mrousavy.camera.types.QualityPrioritization | ||||||
|  | import java.io.Closeable | ||||||
|  | import kotlinx.coroutines.CoroutineScope | ||||||
|  | import kotlinx.coroutines.Job | ||||||
|  | import kotlinx.coroutines.coroutineScope | ||||||
|  | import kotlinx.coroutines.delay | ||||||
|  | import kotlinx.coroutines.isActive | ||||||
|  | import kotlinx.coroutines.launch | ||||||
|  | import kotlinx.coroutines.sync.Mutex | ||||||
|  | import kotlinx.coroutines.sync.withLock | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * A [CameraCaptureSession] wrapper that safely handles interruptions and remains open whenever available. | ||||||
|  |  * | ||||||
|  |  * This class aims to be similar to Apple's `AVCaptureSession`. | ||||||
|  |  */ | ||||||
|  | class PersistentCameraCaptureSession(private val cameraManager: CameraManager, private val callback: Callback) : Closeable { | ||||||
|  |   companion object { | ||||||
|  |     private const val TAG = "PersistentCameraCaptureSession" | ||||||
|  |     private const val FOCUS_RESET_TIMEOUT = 3000L | ||||||
|  |     private const val PRECAPTURE_LOCK_TIMEOUT = 5000L | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Inputs/Dependencies | ||||||
|  |   private var cameraId: String? = null | ||||||
|  |   private var outputs: List<SurfaceOutput> = emptyList() | ||||||
|  |   private var repeatingRequest: RepeatingCaptureRequest? = null | ||||||
|  |   private var isActive = false | ||||||
|  |  | ||||||
|  |   // State/Dependants | ||||||
|  |   private var device: CameraDevice? = null // depends on [cameraId] | ||||||
|  |   private var session: CameraCaptureSession? = null // depends on [device, surfaceOutputs] | ||||||
|  |   private var cameraDeviceDetails: CameraDeviceDetails? = null // depends on [device] | ||||||
|  |  | ||||||
|  |   private val mutex = Mutex() | ||||||
|  |   private var didDestroyFromOutside = false | ||||||
|  |   private var focusJob: Job? = null | ||||||
|  |   private val coroutineScope = CoroutineScope(CameraQueues.cameraQueue.coroutineDispatcher) | ||||||
|  |  | ||||||
|  |   val isRunning: Boolean | ||||||
|  |     get() = isActive && session != null && device != null && !didDestroyFromOutside | ||||||
|  |  | ||||||
|  |   override fun close() { | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     session?.tryAbortCaptures() | ||||||
|  |     device?.close() | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun assertLocked(method: String) { | ||||||
|  |     if (!mutex.isLocked) { | ||||||
|  |       throw SessionIsNotLockedError("Failed to call $method, session is not locked! Call beginConfiguration() first.") | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun withConfiguration(block: suspend () -> Unit) { | ||||||
|  |     // Cancel any ongoing focus jobs | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     focusJob = null | ||||||
|  |  | ||||||
|  |     mutex.withLock { | ||||||
|  |       block() | ||||||
|  |       configure() | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setInput(cameraId: String) { | ||||||
|  |     Log.d(TAG, "--> setInput($cameraId)") | ||||||
|  |     assertLocked("setInput") | ||||||
|  |     if (this.cameraId != cameraId || device?.id != cameraId) { | ||||||
|  |       this.cameraId = cameraId | ||||||
|  |  | ||||||
|  |       // Abort any captures in the session so we get the onCaptureFailed handler for any outstanding photos | ||||||
|  |       session?.tryAbortCaptures() | ||||||
|  |       session = null | ||||||
|  |       // Closing the device will also close the session above - even faster than manually closing it. | ||||||
|  |       device?.close() | ||||||
|  |       device = null | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setOutputs(outputs: List<SurfaceOutput>) { | ||||||
|  |     Log.d(TAG, "--> setOutputs($outputs)") | ||||||
|  |     assertLocked("setOutputs") | ||||||
|  |     if (this.outputs != outputs) { | ||||||
|  |       this.outputs = outputs | ||||||
|  |  | ||||||
|  |       if (outputs.isNotEmpty()) { | ||||||
|  |         // Outputs have changed to something else, we don't wanna destroy the session directly | ||||||
|  |         // so the outputs can be kept warm. The session that gets created next will take over the outputs. | ||||||
|  |         session?.tryAbortCaptures() | ||||||
|  |       } else { | ||||||
|  |         // Just stop it, we don't have any outputs | ||||||
|  |         session?.close() | ||||||
|  |       } | ||||||
|  |       session = null | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setRepeatingRequest(request: RepeatingCaptureRequest) { | ||||||
|  |     assertLocked("setRepeatingRequest") | ||||||
|  |     Log.d(TAG, "--> setRepeatingRequest(...)") | ||||||
|  |     if (this.repeatingRequest != request) { | ||||||
|  |       this.repeatingRequest = request | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setIsActive(isActive: Boolean) { | ||||||
|  |     assertLocked("setIsActive") | ||||||
|  |     Log.d(TAG, "--> setIsActive($isActive)") | ||||||
|  |     if (this.isActive != isActive) { | ||||||
|  |       this.isActive = isActive | ||||||
|  |     } | ||||||
|  |     if (isActive && didDestroyFromOutside) { | ||||||
|  |       didDestroyFromOutside = false | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun capture( | ||||||
|  |     qualityPrioritization: QualityPrioritization, | ||||||
|  |     flash: Flash, | ||||||
|  |     enableAutoStabilization: Boolean, | ||||||
|  |     enablePhotoHdr: Boolean, | ||||||
|  |     orientation: Orientation, | ||||||
|  |     enableShutterSound: Boolean, | ||||||
|  |     enablePrecapture: Boolean | ||||||
|  |   ): TotalCaptureResult { | ||||||
|  |     // Cancel any ongoing focus jobs | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     focusJob = null | ||||||
|  |  | ||||||
|  |     mutex.withLock { | ||||||
|  |       Log.i(TAG, "Capturing photo...") | ||||||
|  |       val session = session ?: throw CameraNotReadyError() | ||||||
|  |       val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() | ||||||
|  |       val photoRequest = PhotoCaptureRequest( | ||||||
|  |         repeatingRequest, | ||||||
|  |         qualityPrioritization, | ||||||
|  |         enableAutoStabilization, | ||||||
|  |         enablePhotoHdr, | ||||||
|  |         orientation | ||||||
|  |       ) | ||||||
|  |       val device = session.device | ||||||
|  |       val deviceDetails = getOrCreateCameraDeviceDetails(device) | ||||||
|  |  | ||||||
|  |       // Submit a single high-res capture to photo output as well as all preview outputs | ||||||
|  |       val outputs = outputs | ||||||
|  |       val repeatingOutputs = outputs.filter { it.isRepeating } | ||||||
|  |  | ||||||
|  |       val skipPrecapture = !enablePrecapture || qualityPrioritization == QualityPrioritization.SPEED | ||||||
|  |       if (skipPrecapture && flash == Flash.OFF) { | ||||||
|  |         // 0. We want to take a picture as fast as possible, so skip any precapture sequence and just capture one Frame. | ||||||
|  |         Log.i(TAG, "Using fast capture path without pre-capture sequence...") | ||||||
|  |         val singleRequest = photoRequest.createCaptureRequest(device, deviceDetails, outputs) | ||||||
|  |         return session.capture(singleRequest.build(), enableShutterSound) | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       Log.i(TAG, "Locking AF/AE/AWB...") | ||||||
|  |  | ||||||
|  |       // 1. Run precapture sequence | ||||||
|  |       var needsFlash: Boolean | ||||||
|  |       try { | ||||||
|  |         val precaptureRequest = repeatingRequest.createCaptureRequest(device, deviceDetails, repeatingOutputs) | ||||||
|  |         val skipIfPassivelyFocused = flash == Flash.OFF | ||||||
|  |         val options = PrecaptureOptions( | ||||||
|  |           listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE, PrecaptureTrigger.AWB), | ||||||
|  |           flash, | ||||||
|  |           emptyList(), | ||||||
|  |           skipIfPassivelyFocused, | ||||||
|  |           PRECAPTURE_LOCK_TIMEOUT | ||||||
|  |         ) | ||||||
|  |         val result = session.precapture(precaptureRequest, deviceDetails, options) | ||||||
|  |         needsFlash = result.needsFlash | ||||||
|  |       } catch (e: CaptureTimedOutError) { | ||||||
|  |         // the precapture just timed out after 5 seconds, take picture anyways without focus. | ||||||
|  |         needsFlash = false | ||||||
|  |       } catch (e: FocusCanceledError) { | ||||||
|  |         throw CaptureAbortedError(false) | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       try { | ||||||
|  |         // 2. Once precapture AF/AE/AWB successfully locked, capture the actual photo | ||||||
|  |         val singleRequest = photoRequest.createCaptureRequest(device, deviceDetails, outputs) | ||||||
|  |         if (needsFlash) { | ||||||
|  |           singleRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) | ||||||
|  |           singleRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE) | ||||||
|  |         } | ||||||
|  |         return session.capture(singleRequest.build(), enableShutterSound) | ||||||
|  |       } finally { | ||||||
|  |         // 3. After taking a photo we set the repeating request back to idle to remove the AE/AF/AWB locks again | ||||||
|  |         val idleRequest = repeatingRequest.createCaptureRequest(device, deviceDetails, repeatingOutputs) | ||||||
|  |         session.setRepeatingRequest(idleRequest.build(), null, null) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun focus(point: Point) { | ||||||
|  |     // Cancel any previous focus jobs | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     focusJob = null | ||||||
|  |  | ||||||
|  |     mutex.withLock { | ||||||
|  |       Log.i(TAG, "Focusing to $point...") | ||||||
|  |       val session = session ?: throw CameraNotReadyError() | ||||||
|  |       val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() | ||||||
|  |       val device = session.device | ||||||
|  |       val deviceDetails = getOrCreateCameraDeviceDetails(device) | ||||||
|  |       if (!deviceDetails.supportsFocusRegions) { | ||||||
|  |         throw FocusNotSupportedError() | ||||||
|  |       } | ||||||
|  |       val outputs = outputs.filter { it.isRepeating } | ||||||
|  |  | ||||||
|  |       // 1. Run a precapture sequence for AF, AE and AWB. | ||||||
|  |       focusJob = coroutineScope.launch { | ||||||
|  |         val request = repeatingRequest.createCaptureRequest(device, deviceDetails, outputs) | ||||||
|  |         val options = | ||||||
|  |           PrecaptureOptions(listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE), Flash.OFF, listOf(point), false, FOCUS_RESET_TIMEOUT) | ||||||
|  |         session.precapture(request, deviceDetails, options) | ||||||
|  |       } | ||||||
|  |       focusJob?.join() | ||||||
|  |  | ||||||
|  |       // 2. Reset AF/AE/AWB again after 3 seconds timeout | ||||||
|  |       focusJob = coroutineScope.launch { | ||||||
|  |         delay(FOCUS_RESET_TIMEOUT) | ||||||
|  |         if (!this.isActive) { | ||||||
|  |           // this job got canceled from the outside | ||||||
|  |           return@launch | ||||||
|  |         } | ||||||
|  |         if (!isRunning || this@PersistentCameraCaptureSession.session != session) { | ||||||
|  |           // the view/session has already been destroyed in the meantime | ||||||
|  |           return@launch | ||||||
|  |         } | ||||||
|  |         Log.i(TAG, "Resetting focus to auto-focus...") | ||||||
|  |         repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request -> | ||||||
|  |           session.setRepeatingRequest(request.build(), null, null) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun getActiveDeviceDetails(): CameraDeviceDetails? { | ||||||
|  |     val device = device ?: return null | ||||||
|  |     return getOrCreateCameraDeviceDetails(device) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private suspend fun configure() { | ||||||
|  |     if (didDestroyFromOutside && !isActive) { | ||||||
|  |       Log.d(TAG, "CameraCaptureSession has been destroyed by Android, skipping configuration until isActive is set to `true` again.") | ||||||
|  |       return | ||||||
|  |     } | ||||||
|  |     Log.d(TAG, "Configure() with isActive: $isActive, ID: $cameraId, device: $device, session: $session") | ||||||
|  |     val cameraId = cameraId ?: throw NoCameraDeviceError() | ||||||
|  |     val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() | ||||||
|  |     val outputs = outputs | ||||||
|  |  | ||||||
|  |     try { | ||||||
|  |       didDestroyFromOutside = false | ||||||
|  |  | ||||||
|  |       val device = getOrCreateDevice(cameraId) | ||||||
|  |       if (didDestroyFromOutside) return | ||||||
|  |  | ||||||
|  |       if (outputs.isEmpty()) return | ||||||
|  |       val session = getOrCreateSession(device, outputs) | ||||||
|  |       if (didDestroyFromOutside) return | ||||||
|  |  | ||||||
|  |       if (isActive) { | ||||||
|  |         Log.d(TAG, "Updating repeating request...") | ||||||
|  |         val details = getOrCreateCameraDeviceDetails(device) | ||||||
|  |         val repeatingOutputs = outputs.filter { it.isRepeating } | ||||||
|  |         val builder = repeatingRequest.createCaptureRequest(device, details, repeatingOutputs) | ||||||
|  |         session.setRepeatingRequest(builder.build(), null, null) | ||||||
|  |       } else { | ||||||
|  |         Log.d(TAG, "Stopping repeating request...") | ||||||
|  |         session.tryStopRepeating() | ||||||
|  |       } | ||||||
|  |       Log.d(TAG, "Configure() done! isActive: $isActive, ID: $cameraId, device: $device, session: $session") | ||||||
|  |     } catch (e: CameraAccessException) { | ||||||
|  |       if (didDestroyFromOutside) { | ||||||
|  |         // Camera device has been destroyed in the meantime, that's fine. | ||||||
|  |         Log.d(TAG, "Configure() canceled, session has been destroyed in the meantime!") | ||||||
|  |       } else { | ||||||
|  |         // Camera should still be active, so not sure what went wrong. Rethrow | ||||||
|  |         throw e | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private suspend fun getOrCreateDevice(cameraId: String): CameraDevice { | ||||||
|  |     val currentDevice = device | ||||||
|  |     if (currentDevice?.id == cameraId && currentDevice.isValid) { | ||||||
|  |       return currentDevice | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     this.session?.tryAbortCaptures() | ||||||
|  |     this.device?.close() | ||||||
|  |     this.device = null | ||||||
|  |     this.session = null | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Creating new device...") | ||||||
|  |     val newDevice = cameraManager.openCamera(cameraId, { device, error -> | ||||||
|  |       Log.i(TAG, "Camera $device closed!") | ||||||
|  |       if (this.device == device) { | ||||||
|  |         this.didDestroyFromOutside = true | ||||||
|  |         this.session?.tryAbortCaptures() | ||||||
|  |         this.session = null | ||||||
|  |         this.device = null | ||||||
|  |         this.isActive = false | ||||||
|  |       } | ||||||
|  |       if (error != null) { | ||||||
|  |         callback.onError(error) | ||||||
|  |       } | ||||||
|  |     }, CameraQueues.videoQueue) | ||||||
|  |     this.device = newDevice | ||||||
|  |     return newDevice | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private suspend fun getOrCreateSession(device: CameraDevice, outputs: List<SurfaceOutput>): CameraCaptureSession { | ||||||
|  |     val currentSession = session | ||||||
|  |     if (currentSession?.device == device) { | ||||||
|  |       return currentSession | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if (outputs.isEmpty()) throw NoOutputsError() | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Creating new session...") | ||||||
|  |     val newSession = device.createCaptureSession(cameraManager, outputs, { session -> | ||||||
|  |       Log.i(TAG, "Session $session closed!") | ||||||
|  |       if (this.session == session) { | ||||||
|  |         this.didDestroyFromOutside = true | ||||||
|  |         this.session?.tryAbortCaptures() | ||||||
|  |         this.session = null | ||||||
|  |         this.isActive = false | ||||||
|  |       } | ||||||
|  |     }, CameraQueues.videoQueue) | ||||||
|  |     session = newSession | ||||||
|  |     return newSession | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun getOrCreateCameraDeviceDetails(device: CameraDevice): CameraDeviceDetails { | ||||||
|  |     val currentDetails = cameraDeviceDetails | ||||||
|  |     if (currentDetails?.cameraId == device.id) { | ||||||
|  |       return currentDetails | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     val newDetails = CameraDeviceDetails(cameraManager, device.id) | ||||||
|  |     cameraDeviceDetails = newDetails | ||||||
|  |     return newDetails | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   interface Callback { | ||||||
|  |     fun onError(error: Throwable) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   class SessionIsNotLockedError(message: String) : Error(message) | ||||||
|  | } | ||||||
| @@ -2,51 +2,123 @@ package com.mrousavy.camera.core | |||||||
|  |  | ||||||
| import android.annotation.SuppressLint | import android.annotation.SuppressLint | ||||||
| import android.content.Context | import android.content.Context | ||||||
|  | import android.graphics.Point | ||||||
| import android.util.Log | import android.util.Log | ||||||
| import android.util.Size | import android.util.Size | ||||||
| import android.view.Gravity |  | ||||||
| import android.view.SurfaceHolder | import android.view.SurfaceHolder | ||||||
| import android.view.SurfaceView | import android.view.SurfaceView | ||||||
| import android.widget.FrameLayout |  | ||||||
| import com.facebook.react.bridge.UiThreadUtil | import com.facebook.react.bridge.UiThreadUtil | ||||||
| import com.mrousavy.camera.extensions.getMaximumPreviewSize | import com.mrousavy.camera.extensions.resize | ||||||
|  | import com.mrousavy.camera.extensions.rotatedBy | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.ResizeMode | import com.mrousavy.camera.types.ResizeMode | ||||||
| import kotlin.math.roundToInt | import kotlin.math.roundToInt | ||||||
|  | import kotlinx.coroutines.Dispatchers | ||||||
|  | import kotlinx.coroutines.withContext | ||||||
|  |  | ||||||
| @SuppressLint("ViewConstructor") | @SuppressLint("ViewConstructor") | ||||||
| class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceView(context) { | class PreviewView(context: Context, callback: SurfaceHolder.Callback) : | ||||||
|   var size: Size = getMaximumPreviewSize() |   SurfaceView(context), | ||||||
|  |   SurfaceHolder.Callback { | ||||||
|  |   var size: Size = CameraDeviceDetails.getMaximumPreviewSize() | ||||||
|     set(value) { |     set(value) { | ||||||
|  |       if (field != value) { | ||||||
|  |         Log.i(TAG, "Surface Size changed: $field -> $value") | ||||||
|         field = value |         field = value | ||||||
|       UiThreadUtil.runOnUiThread { |         updateLayout() | ||||||
|         Log.i(TAG, "Setting PreviewView Surface Size to $width x $height...") |  | ||||||
|         holder.setFixedSize(value.height, value.width) |  | ||||||
|         requestLayout() |  | ||||||
|         invalidate() |  | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   var resizeMode: ResizeMode = ResizeMode.COVER |   var resizeMode: ResizeMode = ResizeMode.COVER | ||||||
|     set(value) { |     set(value) { | ||||||
|  |       if (field != value) { | ||||||
|  |         Log.i(TAG, "Resize Mode changed: $field -> $value") | ||||||
|         field = value |         field = value | ||||||
|  |         updateLayout() | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   var orientation: Orientation = Orientation.PORTRAIT | ||||||
|  |       set(value) { | ||||||
|  |         if (field != value) { | ||||||
|  |           Log.i(TAG, "View Orientation changed: $field -> $value") | ||||||
|  |           field = value | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |   private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT | ||||||
|  |     set(value) { | ||||||
|  |       if (field != value) { | ||||||
|  |         Log.i(TAG, "Input Orientation changed: $field -> $value") | ||||||
|  |         field = value | ||||||
|  |         updateLayout() | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   private val viewSize: Size | ||||||
|  |     get() { | ||||||
|  |       val displayMetrics = context.resources.displayMetrics | ||||||
|  |       val dpX = width / displayMetrics.density | ||||||
|  |       val dpY = height / displayMetrics.density | ||||||
|  |       return Size(dpX.toInt(), dpY.toInt()) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |   init { | ||||||
|  |     Log.i(TAG, "Creating PreviewView...") | ||||||
|  |     holder.setKeepScreenOn(true) | ||||||
|  |     holder.addCallback(this) | ||||||
|  |     holder.addCallback(callback) | ||||||
|  |     holder.setFixedSize(size.width, size.height) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun surfaceCreated(holder: SurfaceHolder) = Unit | ||||||
|  |   override fun surfaceDestroyed(holder: SurfaceHolder) = Unit | ||||||
|  |   override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { | ||||||
|  |     size = Size(width, height) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun setSurfaceSize(width: Int, height: Int, cameraSensorOrientation: Orientation) { | ||||||
|  |     withContext(Dispatchers.Main) { | ||||||
|  |       inputOrientation = cameraSensorOrientation | ||||||
|  |       holder.resize(width, height) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point { | ||||||
|  |     val sensorOrientation = cameraDeviceDetails.sensorOrientation | ||||||
|  |     val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height()) | ||||||
|  |     val viewOrientation = Orientation.PORTRAIT | ||||||
|  |  | ||||||
|  |     val rotated = point.rotatedBy(viewSize, cameraSize, viewOrientation, sensorOrientation) | ||||||
|  |     Log.i(TAG, "Converted layer point $point to camera point $rotated! ($sensorOrientation, $cameraSize -> $viewSize)") | ||||||
|  |     return rotated | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun updateLayout() { | ||||||
|     UiThreadUtil.runOnUiThread { |     UiThreadUtil.runOnUiThread { | ||||||
|       requestLayout() |       requestLayout() | ||||||
|       invalidate() |       invalidate() | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   init { |   override fun requestLayout() { | ||||||
|     Log.i(TAG, "Creating PreviewView...") |     super.requestLayout() | ||||||
|     layoutParams = FrameLayout.LayoutParams( |     // Manually trigger measure & layout, as RN on Android skips those. | ||||||
|       FrameLayout.LayoutParams.MATCH_PARENT, |     // See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427 | ||||||
|       FrameLayout.LayoutParams.MATCH_PARENT, |     post { | ||||||
|       Gravity.CENTER |       measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)) | ||||||
|     ) |       layout(left, top, right, bottom) | ||||||
|     holder.addCallback(callback) |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { |   private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { | ||||||
|  |     var contentSize = contentSize | ||||||
|  |     // Swap dimensions if orientation is landscape | ||||||
|  |     if (orientation.isLandscape()) { | ||||||
|  |       contentSize = Size(contentSize.height, contentSize.width) | ||||||
|  |     } | ||||||
|     val contentAspectRatio = contentSize.width.toDouble() / contentSize.height |     val contentAspectRatio = contentSize.width.toDouble() / contentSize.height | ||||||
|     val containerAspectRatio = containerSize.width.toDouble() / containerSize.height |     val containerAspectRatio = containerSize.width.toDouble() / containerSize.height | ||||||
|  |     if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { | ||||||
|  |       // One of the aspect ratios is 0 or NaN, maybe the view hasn't been laid out yet. | ||||||
|  |       return contentSize | ||||||
|  |     } | ||||||
|  |  | ||||||
|     val widthOverHeight = when (resizeMode) { |     val widthOverHeight = when (resizeMode) { | ||||||
|       ResizeMode.COVER -> contentAspectRatio > containerAspectRatio |       ResizeMode.COVER -> contentAspectRatio > containerAspectRatio | ||||||
| @@ -68,10 +140,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceV | |||||||
|   override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { |   override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { | ||||||
|     super.onMeasure(widthMeasureSpec, heightMeasureSpec) |     super.onMeasure(widthMeasureSpec, heightMeasureSpec) | ||||||
|  |  | ||||||
|     val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) |     val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) | ||||||
|     val fittedSize = getSize(size, viewSize, resizeMode) |     val surfaceSize = size.rotatedBy(inputOrientation) | ||||||
|  |     val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode) | ||||||
|  |  | ||||||
|     Log.i(TAG, "PreviewView is $viewSize, rendering $size content. Resizing to: $fittedSize ($resizeMode)") |     Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)") | ||||||
|     setMeasuredDimension(fittedSize.width, fittedSize.height) |     setMeasuredDimension(fittedSize.width, fittedSize.height) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -33,6 +33,7 @@ class VideoPipeline( | |||||||
|   val format: PixelFormat = PixelFormat.NATIVE, |   val format: PixelFormat = PixelFormat.NATIVE, | ||||||
|   private val isMirrored: Boolean = false, |   private val isMirrored: Boolean = false, | ||||||
|   private val enableFrameProcessor: Boolean = false, |   private val enableFrameProcessor: Boolean = false, | ||||||
|  |   enableGpuBuffers: Boolean = false, | ||||||
|   private val callback: CameraSession.Callback |   private val callback: CameraSession.Callback | ||||||
| ) : SurfaceTexture.OnFrameAvailableListener, | ) : SurfaceTexture.OnFrameAvailableListener, | ||||||
|   Closeable { |   Closeable { | ||||||
| @@ -79,17 +80,25 @@ class VideoPipeline( | |||||||
|       val format = getImageReaderFormat() |       val format = getImageReaderFormat() | ||||||
|       Log.i(TAG, "Using ImageReader round-trip (format: #$format)") |       Log.i(TAG, "Using ImageReader round-trip (format: #$format)") | ||||||
|  |  | ||||||
|       if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { |       // Create ImageReader | ||||||
|         Log.i(TAG, "Using API 29 for GPU ImageReader...") |       if (enableGpuBuffers && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { | ||||||
|         val usageFlags = getRecommendedHardwareBufferFlags() |         val usageFlags = getRecommendedHardwareBufferFlags() | ||||||
|         Log.i(TAG, "Using ImageReader flags: $usageFlags") |         Log.i(TAG, "Creating ImageReader with GPU-optimized usage flags: $usageFlags") | ||||||
|         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags) |         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags) | ||||||
|  |       } else { | ||||||
|  |         Log.i(TAG, "Creating ImageReader with default usage flags...") | ||||||
|  |         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES) | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       // Create ImageWriter | ||||||
|  |       if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { | ||||||
|  |         Log.i(TAG, "Creating ImageWriter with format #$format...") | ||||||
|         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format) |         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format) | ||||||
|       } else { |       } else { | ||||||
|         Log.i(TAG, "Using legacy API for CPU ImageReader...") |         Log.i(TAG, "Creating ImageWriter with default format...") | ||||||
|         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES) |  | ||||||
|         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES) |         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES) | ||||||
|       } |       } | ||||||
|  |  | ||||||
|       imageReader!!.setOnImageAvailableListener({ reader -> |       imageReader!!.setOnImageAvailableListener({ reader -> | ||||||
|         // Log.i(TAG, "ImageReader::onImageAvailable!")s |         // Log.i(TAG, "ImageReader::onImageAvailable!")s | ||||||
|         val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener |         val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener | ||||||
| @@ -107,7 +116,7 @@ class VideoPipeline( | |||||||
|           } |           } | ||||||
|         } catch (e: Throwable) { |         } catch (e: Throwable) { | ||||||
|           Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e) |           Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e) | ||||||
|           throw e |           callback.onError(e) | ||||||
|         } finally { |         } finally { | ||||||
|           frame.decrementRefCount() |           frame.decrementRefCount() | ||||||
|         } |         } | ||||||
| @@ -125,8 +134,11 @@ class VideoPipeline( | |||||||
|       isActive = false |       isActive = false | ||||||
|       imageWriter?.close() |       imageWriter?.close() | ||||||
|       imageReader?.close() |       imageReader?.close() | ||||||
|  |       removeRecordingSessionOutputSurface() | ||||||
|       recordingSession = null |       recordingSession = null | ||||||
|  |       surfaceTexture.setOnFrameAvailableListener(null, null) | ||||||
|       surfaceTexture.release() |       surfaceTexture.release() | ||||||
|  |       surface.release() | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -173,7 +185,7 @@ class VideoPipeline( | |||||||
|     synchronized(this) { |     synchronized(this) { | ||||||
|       if (recordingSession != null) { |       if (recordingSession != null) { | ||||||
|         // Configure OpenGL pipeline to stream Frames into the Recording Session's surface |         // Configure OpenGL pipeline to stream Frames into the Recording Session's surface | ||||||
|         Log.i(TAG, "Setting $width x $height RecordingSession Output...") |         Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...") | ||||||
|         setRecordingSessionOutputSurface(recordingSession.surface) |         setRecordingSessionOutputSurface(recordingSession.surface) | ||||||
|         this.recordingSession = recordingSession |         this.recordingSession = recordingSession | ||||||
|       } else { |       } else { | ||||||
| @@ -228,7 +240,11 @@ class VideoPipeline( | |||||||
|   @RequiresApi(Build.VERSION_CODES.Q) |   @RequiresApi(Build.VERSION_CODES.Q) | ||||||
|   private fun supportsHardwareBufferFlags(flags: Long): Boolean { |   private fun supportsHardwareBufferFlags(flags: Long): Boolean { | ||||||
|     val hardwareBufferFormat = format.toHardwareBufferFormat() |     val hardwareBufferFormat = format.toHardwareBufferFormat() | ||||||
|  |     try { | ||||||
|       return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags) |       return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags) | ||||||
|  |     } catch (_: Throwable) { | ||||||
|  |       return false | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private external fun getInputTextureId(): Int |   private external fun getInputTextureId(): Int | ||||||
|   | |||||||
| @@ -0,0 +1,88 @@ | |||||||
|  | package com.mrousavy.camera.core.capture | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.FlashUnavailableError | ||||||
|  | import com.mrousavy.camera.core.InvalidVideoHdrError | ||||||
|  | import com.mrousavy.camera.core.LowLightBoostNotSupportedError | ||||||
|  | import com.mrousavy.camera.core.PropRequiresFormatToBeNonNullError | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.extensions.setZoom | ||||||
|  | import com.mrousavy.camera.types.CameraDeviceFormat | ||||||
|  | import com.mrousavy.camera.types.Torch | ||||||
|  |  | ||||||
|  | abstract class CameraCaptureRequest( | ||||||
|  |   private val torch: Torch = Torch.OFF, | ||||||
|  |   private val enableVideoHdr: Boolean = false, | ||||||
|  |   val enableLowLightBoost: Boolean = false, | ||||||
|  |   val exposureBias: Double? = null, | ||||||
|  |   val zoom: Float = 1.0f, | ||||||
|  |   val format: CameraDeviceFormat? = null | ||||||
|  | ) { | ||||||
|  |   enum class Template { | ||||||
|  |     RECORD, | ||||||
|  |     PHOTO, | ||||||
|  |     PHOTO_ZSL, | ||||||
|  |     PHOTO_SNAPSHOT, | ||||||
|  |     PREVIEW; | ||||||
|  |  | ||||||
|  |     fun toRequestTemplate(): Int = | ||||||
|  |       when (this) { | ||||||
|  |         RECORD -> CameraDevice.TEMPLATE_RECORD | ||||||
|  |         PHOTO -> CameraDevice.TEMPLATE_STILL_CAPTURE | ||||||
|  |         PHOTO_ZSL -> CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG | ||||||
|  |         PHOTO_SNAPSHOT -> CameraDevice.TEMPLATE_VIDEO_SNAPSHOT | ||||||
|  |         PREVIEW -> CameraDevice.TEMPLATE_PREVIEW | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   abstract fun createCaptureRequest( | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder | ||||||
|  |  | ||||||
|  |   protected open fun createCaptureRequest( | ||||||
|  |     template: Template, | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val builder = device.createCaptureRequest(template.toRequestTemplate()) | ||||||
|  |  | ||||||
|  |     // Add all repeating output surfaces | ||||||
|  |     outputs.forEach { output -> | ||||||
|  |       builder.addTarget(output.surface) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set HDR | ||||||
|  |     if (enableVideoHdr) { | ||||||
|  |       if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr") | ||||||
|  |       if (!format.supportsVideoHdr) throw InvalidVideoHdrError() | ||||||
|  |       builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE) | ||||||
|  |     } else if (enableLowLightBoost) { | ||||||
|  |       if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError() | ||||||
|  |       builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set Exposure Bias | ||||||
|  |     if (exposureBias != null) { | ||||||
|  |       val clamped = deviceDetails.exposureRange.clamp(exposureBias.toInt()) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, clamped) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set Zoom | ||||||
|  |     builder.setZoom(zoom, deviceDetails) | ||||||
|  |  | ||||||
|  |     // Set Torch | ||||||
|  |     if (torch == Torch.ON) { | ||||||
|  |       if (!deviceDetails.hasFlash) throw FlashUnavailableError() | ||||||
|  |       builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return builder | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,149 @@ | |||||||
|  | package com.mrousavy.camera.core.capture | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCharacteristics | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.os.Build | ||||||
|  | import android.util.Log | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
|  | import com.mrousavy.camera.types.QualityPrioritization | ||||||
|  | import com.mrousavy.camera.types.Torch | ||||||
|  |  | ||||||
|  | class PhotoCaptureRequest( | ||||||
|  |   repeatingRequest: RepeatingCaptureRequest, | ||||||
|  |   private val qualityPrioritization: QualityPrioritization, | ||||||
|  |   private val enableAutoStabilization: Boolean, | ||||||
|  |   enablePhotoHdr: Boolean, | ||||||
|  |   private val outputOrientation: Orientation | ||||||
|  | ) : CameraCaptureRequest( | ||||||
|  |   Torch.OFF, | ||||||
|  |   enablePhotoHdr, | ||||||
|  |   repeatingRequest.enableLowLightBoost, | ||||||
|  |   repeatingRequest.exposureBias, | ||||||
|  |   repeatingRequest.zoom, | ||||||
|  |   repeatingRequest.format | ||||||
|  | ) { | ||||||
|  |   companion object { | ||||||
|  |     private const val TAG = "PhotoCaptureRequest" | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val template = when (qualityPrioritization) { | ||||||
|  |       QualityPrioritization.QUALITY -> Template.PHOTO | ||||||
|  |       QualityPrioritization.BALANCED -> { | ||||||
|  |         if (deviceDetails.supportsZsl) { | ||||||
|  |           Template.PHOTO_ZSL | ||||||
|  |         } else { | ||||||
|  |           Template.PHOTO | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       QualityPrioritization.SPEED -> { | ||||||
|  |         if (deviceDetails.supportsSnapshotCapture) { | ||||||
|  |           Template.PHOTO_SNAPSHOT | ||||||
|  |         } else if (deviceDetails.supportsZsl) { | ||||||
|  |           Template.PHOTO_ZSL | ||||||
|  |         } else { | ||||||
|  |           Template.PHOTO | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     Log.i(TAG, "Using CaptureRequest Template $template...") | ||||||
|  |     return this.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     template: Template, | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val builder = super.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |  | ||||||
|  |     // Set various speed vs quality optimization flags | ||||||
|  |     when (qualityPrioritization) { | ||||||
|  |       QualityPrioritization.SPEED -> { | ||||||
|  |         if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.FULL)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_FAST) | ||||||
|  |           if (deviceDetails.availableEdgeModes.contains(CaptureRequest.EDGE_MODE_FAST)) { | ||||||
|  |             builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_FAST) | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableAberrationModes.contains(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableHotPixelModes.contains(CaptureRequest.HOT_PIXEL_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableDistortionCorrectionModes.contains(CaptureRequest.DISTORTION_CORRECTION_MODE_FAST) && | ||||||
|  |           Build.VERSION.SDK_INT >= Build.VERSION_CODES.P | ||||||
|  |         ) { | ||||||
|  |           builder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableNoiseReductionModes.contains(CaptureRequest.NOISE_REDUCTION_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableShadingModes.contains(CaptureRequest.SHADING_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         builder.set(CaptureRequest.JPEG_QUALITY, 85) | ||||||
|  |       } | ||||||
|  |       QualityPrioritization.BALANCED -> { | ||||||
|  |         builder.set(CaptureRequest.JPEG_QUALITY, 92) | ||||||
|  |       } | ||||||
|  |       QualityPrioritization.QUALITY -> { | ||||||
|  |         if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.FULL)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY) | ||||||
|  |           if (deviceDetails.availableEdgeModes.contains(CaptureRequest.EDGE_MODE_HIGH_QUALITY)) { | ||||||
|  |             builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY) | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableAberrationModes.contains(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableHotPixelModes.contains(CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableDistortionCorrectionModes.contains(CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY) && | ||||||
|  |           Build.VERSION.SDK_INT >= Build.VERSION_CODES.P | ||||||
|  |         ) { | ||||||
|  |           builder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableNoiseReductionModes.contains(CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableShadingModes.contains(CaptureRequest.SHADING_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         builder.set(CaptureRequest.JPEG_QUALITY, 100) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set JPEG Orientation | ||||||
|  |     val targetOrientation = outputOrientation.toSensorRelativeOrientation(deviceDetails) | ||||||
|  |     builder.set(CaptureRequest.JPEG_ORIENTATION, targetOrientation.toDegrees()) | ||||||
|  |  | ||||||
|  |     // Set stabilization for this Frame | ||||||
|  |     if (enableAutoStabilization) { | ||||||
|  |       if (deviceDetails.opticalStabilizationModes.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON)) { | ||||||
|  |         builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) | ||||||
|  |       } else if (deviceDetails.digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) { | ||||||
|  |         builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return builder | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,113 @@ | |||||||
|  | package com.mrousavy.camera.core.capture | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCharacteristics | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.os.Build | ||||||
|  | import android.util.Range | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.InvalidFpsError | ||||||
|  | import com.mrousavy.camera.core.InvalidVideoStabilizationMode | ||||||
|  | import com.mrousavy.camera.core.PropRequiresFormatToBeNonNullError | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.types.CameraDeviceFormat | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  | import com.mrousavy.camera.types.Torch | ||||||
|  | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
|  |  | ||||||
|  | class RepeatingCaptureRequest( | ||||||
|  |   private val enableVideoPipeline: Boolean, | ||||||
|  |   torch: Torch = Torch.OFF, | ||||||
|  |   private val fps: Int? = null, | ||||||
|  |   private val videoStabilizationMode: VideoStabilizationMode = VideoStabilizationMode.OFF, | ||||||
|  |   enableVideoHdr: Boolean = false, | ||||||
|  |   enableLowLightBoost: Boolean = false, | ||||||
|  |   exposureBias: Double? = null, | ||||||
|  |   zoom: Float = 1.0f, | ||||||
|  |   format: CameraDeviceFormat? = null | ||||||
|  | ) : CameraCaptureRequest(torch, enableVideoHdr, enableLowLightBoost, exposureBias, zoom, format) { | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val template = if (enableVideoPipeline) Template.RECORD else Template.PREVIEW | ||||||
|  |     return this.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun getBestDigitalStabilizationMode(deviceDetails: CameraDeviceDetails): Int { | ||||||
|  |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { | ||||||
|  |       if (deviceDetails.digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION)) { | ||||||
|  |         return CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     return CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     template: Template, | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val builder = super.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |  | ||||||
|  |     if (deviceDetails.modes.contains(CameraCharacteristics.CONTROL_MODE_AUTO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set AF | ||||||
|  |     if (enableVideoPipeline && deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) | ||||||
|  |     } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) | ||||||
|  |     } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_AUTO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) | ||||||
|  |     } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_OFF)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF) | ||||||
|  |       builder.set(CaptureRequest.LENS_FOCUS_DISTANCE, 0f) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set AE | ||||||
|  |     if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_ON)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) | ||||||
|  |     } else if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_OFF)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set AWB | ||||||
|  |     if (deviceDetails.awbModes.contains(CameraCharacteristics.CONTROL_AWB_MODE_AUTO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set FPS | ||||||
|  |     if (fps != null) { | ||||||
|  |       if (format == null) throw PropRequiresFormatToBeNonNullError("fps") | ||||||
|  |       if (format.maxFps < fps) throw InvalidFpsError(fps) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set Video Stabilization | ||||||
|  |     if (videoStabilizationMode != VideoStabilizationMode.OFF) { | ||||||
|  |       if (format == null) throw PropRequiresFormatToBeNonNullError("videoStabilizationMode") | ||||||
|  |       if (!format.videoStabilizationModes.contains(videoStabilizationMode)) { | ||||||
|  |         throw InvalidVideoStabilizationMode(videoStabilizationMode) | ||||||
|  |       } | ||||||
|  |       when (videoStabilizationMode) { | ||||||
|  |         VideoStabilizationMode.STANDARD -> { | ||||||
|  |           builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, getBestDigitalStabilizationMode(deviceDetails)) | ||||||
|  |         } | ||||||
|  |         VideoStabilizationMode.CINEMATIC, VideoStabilizationMode.CINEMATIC_EXTENDED -> { | ||||||
|  |           if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED)) { | ||||||
|  |             builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) | ||||||
|  |           } else { | ||||||
|  |             builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, getBestDigitalStabilizationMode(deviceDetails)) | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         else -> throw InvalidVideoStabilizationMode(videoStabilizationMode) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return builder | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -10,13 +10,7 @@ import android.view.Surface | |||||||
| import androidx.annotation.RequiresApi | import androidx.annotation.RequiresApi | ||||||
| import java.io.Closeable | import java.io.Closeable | ||||||
|  |  | ||||||
| open class SurfaceOutput( | open class SurfaceOutput(val surface: Surface, val size: Size, val outputType: OutputType, val enableHdr: Boolean = false) : Closeable { | ||||||
|   val surface: Surface, |  | ||||||
|   val size: Size, |  | ||||||
|   val outputType: OutputType, |  | ||||||
|   val enableHdr: Boolean = false, |  | ||||||
|   private val closeSurfaceOnEnd: Boolean = false |  | ||||||
| ) : Closeable { |  | ||||||
|   companion object { |   companion object { | ||||||
|     const val TAG = "SurfaceOutput" |     const val TAG = "SurfaceOutput" | ||||||
|  |  | ||||||
| @@ -52,12 +46,18 @@ open class SurfaceOutput( | |||||||
|     return result |     return result | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |   val isRepeating: Boolean | ||||||
|  |     get() { | ||||||
|  |       return when (outputType) { | ||||||
|  |         OutputType.VIDEO, OutputType.PREVIEW, OutputType.VIDEO_AND_PREVIEW -> true | ||||||
|  |         OutputType.PHOTO -> false | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|   override fun toString(): String = "$outputType (${size.width} x ${size.height})" |   override fun toString(): String = "$outputType (${size.width} x ${size.height})" | ||||||
|  |  | ||||||
|   override fun close() { |   override fun close() { | ||||||
|     if (closeSurfaceOnEnd) { |     // close() does nothing by default | ||||||
|       surface.release() |  | ||||||
|     } |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   enum class OutputType { |   enum class OutputType { | ||||||
|   | |||||||
| @@ -5,38 +5,60 @@ import android.hardware.camera2.CaptureFailure | |||||||
| import android.hardware.camera2.CaptureRequest | import android.hardware.camera2.CaptureRequest | ||||||
| import android.hardware.camera2.TotalCaptureResult | import android.hardware.camera2.TotalCaptureResult | ||||||
| import android.media.MediaActionSound | import android.media.MediaActionSound | ||||||
| import com.mrousavy.camera.core.CameraQueues | import android.util.Log | ||||||
| import com.mrousavy.camera.core.CaptureAbortedError | import com.mrousavy.camera.core.CaptureAbortedError | ||||||
|  | import com.mrousavy.camera.core.CaptureTimedOutError | ||||||
| import com.mrousavy.camera.core.UnknownCaptureError | import com.mrousavy.camera.core.UnknownCaptureError | ||||||
| import kotlin.coroutines.resume | import kotlin.coroutines.resume | ||||||
| import kotlin.coroutines.resumeWithException | import kotlin.coroutines.resumeWithException | ||||||
| import kotlin.coroutines.suspendCoroutine | import kotlinx.coroutines.CoroutineScope | ||||||
|  | import kotlinx.coroutines.Dispatchers | ||||||
|  | import kotlinx.coroutines.delay | ||||||
|  | import kotlinx.coroutines.launch | ||||||
|  | import kotlinx.coroutines.suspendCancellableCoroutine | ||||||
|  |  | ||||||
|  | private const val TAG = "CameraCaptureSession" | ||||||
|  |  | ||||||
| suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult = | suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult = | ||||||
|   suspendCoroutine { continuation -> |   suspendCancellableCoroutine { continuation -> | ||||||
|     val shutterSound = if (enableShutterSound) MediaActionSound() else null |     val shutterSound = if (enableShutterSound) MediaActionSound() else null | ||||||
|     shutterSound?.load(MediaActionSound.SHUTTER_CLICK) |     shutterSound?.load(MediaActionSound.SHUTTER_CLICK) | ||||||
|  |  | ||||||
|  |     CoroutineScope(Dispatchers.Default).launch { | ||||||
|  |       delay(5000) // after 5s, cancel capture | ||||||
|  |       if (continuation.isActive) { | ||||||
|  |         Log.e(TAG, "Capture timed out after 5 seconds!") | ||||||
|  |         continuation.resumeWithException(CaptureTimedOutError()) | ||||||
|  |         tryAbortCaptures() | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|     this.capture( |     this.capture( | ||||||
|       captureRequest, |       captureRequest, | ||||||
|       object : CameraCaptureSession.CaptureCallback() { |       object : CameraCaptureSession.CaptureCallback() { | ||||||
|         override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { |         override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { | ||||||
|           super.onCaptureCompleted(session, request, result) |           super.onCaptureCompleted(session, request, result) | ||||||
|  |  | ||||||
|  |           if (request == captureRequest) { | ||||||
|             continuation.resume(result) |             continuation.resume(result) | ||||||
|             shutterSound?.release() |             shutterSound?.release() | ||||||
|           } |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|         override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) { |         override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) { | ||||||
|           super.onCaptureStarted(session, request, timestamp, frameNumber) |           super.onCaptureStarted(session, request, timestamp, frameNumber) | ||||||
|  |  | ||||||
|  |           if (request == captureRequest) { | ||||||
|             if (enableShutterSound) { |             if (enableShutterSound) { | ||||||
|               shutterSound?.play(MediaActionSound.SHUTTER_CLICK) |               shutterSound?.play(MediaActionSound.SHUTTER_CLICK) | ||||||
|             } |             } | ||||||
|           } |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|         override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { |         override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { | ||||||
|           super.onCaptureFailed(session, request, failure) |           super.onCaptureFailed(session, request, failure) | ||||||
|  |  | ||||||
|  |           if (request == captureRequest) { | ||||||
|             val wasImageCaptured = failure.wasImageCaptured() |             val wasImageCaptured = failure.wasImageCaptured() | ||||||
|             val error = when (failure.reason) { |             val error = when (failure.reason) { | ||||||
|               CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured) |               CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured) | ||||||
| @@ -45,7 +67,8 @@ suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableS | |||||||
|             } |             } | ||||||
|             continuation.resumeWithException(error) |             continuation.resumeWithException(error) | ||||||
|           } |           } | ||||||
|  |         } | ||||||
|       }, |       }, | ||||||
|       CameraQueues.cameraQueue.handler |       null | ||||||
|     ) |     ) | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -0,0 +1,151 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.graphics.Point | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.hardware.camera2.CaptureResult | ||||||
|  | import android.hardware.camera2.params.MeteringRectangle | ||||||
|  | import android.util.Log | ||||||
|  | import android.util.Size | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.FocusCanceledError | ||||||
|  | import com.mrousavy.camera.types.Flash | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  | import kotlin.coroutines.coroutineContext | ||||||
|  | import kotlinx.coroutines.isActive | ||||||
|  |  | ||||||
|  | data class PrecaptureOptions( | ||||||
|  |   val modes: List<PrecaptureTrigger>, | ||||||
|  |   val flash: Flash = Flash.OFF, | ||||||
|  |   val pointsOfInterest: List<Point>, | ||||||
|  |   val skipIfPassivelyFocused: Boolean, | ||||||
|  |   val timeoutMs: Long | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | data class PrecaptureResult(val needsFlash: Boolean) | ||||||
|  |  | ||||||
|  | private const val TAG = "Precapture" | ||||||
|  | private val DEFAULT_METERING_SIZE = Size(100, 100) | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Run a precapture sequence to trigger an AF, AE or AWB scan and lock to the optimal values. | ||||||
|  |  * After this function completes, you can capture high quality photos as AF/AE/AWB are in focused state. | ||||||
|  |  * | ||||||
|  |  * To reset to auto-focus again, create a new `RepeatingRequest` with a fresh set of CONTROL_MODEs set. | ||||||
|  |  */ | ||||||
|  | suspend fun CameraCaptureSession.precapture( | ||||||
|  |   request: CaptureRequest.Builder, | ||||||
|  |   deviceDetails: CameraDeviceDetails, | ||||||
|  |   options: PrecaptureOptions | ||||||
|  | ): PrecaptureResult { | ||||||
|  |   Log.i(TAG, "Running precapture sequence... ($options)") | ||||||
|  |   request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) | ||||||
|  |  | ||||||
|  |   var enableFlash = options.flash == Flash.ON | ||||||
|  |   var afState = FocusState.Inactive | ||||||
|  |   var aeState = ExposureState.Inactive | ||||||
|  |   var awbState = WhiteBalanceState.Inactive | ||||||
|  |   val precaptureModes = options.modes.toMutableList() | ||||||
|  |  | ||||||
|  |   // 1. Cancel any ongoing precapture sequences | ||||||
|  |   request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL) | ||||||
|  |   request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) | ||||||
|  |   if (options.flash == Flash.AUTO || options.skipIfPassivelyFocused) { | ||||||
|  |     // We want to read the current AE/AF/AWB values to determine if we need flash or can skip AF/AE/AWB precapture | ||||||
|  |     val result = this.capture(request.build(), false) | ||||||
|  |  | ||||||
|  |     afState = FocusState.fromAFState(result.get(CaptureResult.CONTROL_AF_STATE) ?: CaptureResult.CONTROL_AF_STATE_INACTIVE) | ||||||
|  |     aeState = ExposureState.fromAEState(result.get(CaptureResult.CONTROL_AE_STATE) ?: CaptureResult.CONTROL_AE_STATE_INACTIVE) | ||||||
|  |     awbState = WhiteBalanceState.fromAWBState(result.get(CaptureResult.CONTROL_AWB_STATE) ?: CaptureResult.CONTROL_AWB_STATE_INACTIVE) | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Precapture current states: AF: $afState, AE: $aeState, AWB: $awbState") | ||||||
|  |     enableFlash = aeState == ExposureState.FlashRequired && options.flash == Flash.AUTO | ||||||
|  |   } else { | ||||||
|  |     // we either want Flash ON or OFF, so we don't care about lighting conditions - do a fast capture. | ||||||
|  |     this.capture(request.build(), null, null) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   if (!coroutineContext.isActive) throw FocusCanceledError() | ||||||
|  |  | ||||||
|  |   val meteringWeight = MeteringRectangle.METERING_WEIGHT_MAX - 1 | ||||||
|  |   val meteringRectangles = options.pointsOfInterest.map { point -> | ||||||
|  |     MeteringRectangle(point, DEFAULT_METERING_SIZE, meteringWeight) | ||||||
|  |   }.toTypedArray() | ||||||
|  |  | ||||||
|  |   if (options.skipIfPassivelyFocused) { | ||||||
|  |     // If user allows us to skip precapture for values that are already focused, remove them from the precapture modes. | ||||||
|  |     if (afState.isPassivelyFocused) { | ||||||
|  |       Log.i(TAG, "AF is already focused, skipping...") | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AF) | ||||||
|  |     } | ||||||
|  |     if (aeState.isPassivelyFocused) { | ||||||
|  |       Log.i(TAG, "AE is already focused, skipping...") | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AE) | ||||||
|  |     } | ||||||
|  |     if (awbState.isPassivelyFocused) { | ||||||
|  |       Log.i(TAG, "AWB is already focused, skipping...") | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AWB) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // 2. Submit a precapture start sequence | ||||||
|  |   if (enableFlash && deviceDetails.hasFlash) { | ||||||
|  |     request.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) | ||||||
|  |   } | ||||||
|  |   if (precaptureModes.contains(PrecaptureTrigger.AF)) { | ||||||
|  |     // AF Precapture | ||||||
|  |     if (deviceDetails.afModes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO)) { | ||||||
|  |       request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) | ||||||
|  |       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START) | ||||||
|  |       if (meteringRectangles.isNotEmpty() && deviceDetails.supportsFocusRegions) { | ||||||
|  |         request.set(CaptureRequest.CONTROL_AF_REGIONS, meteringRectangles) | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // AF is not supported on this device. | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AF) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |   if (precaptureModes.contains(PrecaptureTrigger.AE)) { | ||||||
|  |     // AE Precapture | ||||||
|  |     if (deviceDetails.aeModes.contains(CaptureRequest.CONTROL_AE_MODE_ON) && deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED)) { | ||||||
|  |       request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) | ||||||
|  |       request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) | ||||||
|  |       if (meteringRectangles.isNotEmpty() && | ||||||
|  |         deviceDetails.supportsExposureRegions && | ||||||
|  |         deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED) | ||||||
|  |       ) { | ||||||
|  |         request.set(CaptureRequest.CONTROL_AE_REGIONS, meteringRectangles) | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // AE is not supported on this device. | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AE) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |   if (precaptureModes.contains(PrecaptureTrigger.AWB)) { | ||||||
|  |     // AWB Precapture | ||||||
|  |     if (deviceDetails.awbModes.contains(CaptureRequest.CONTROL_AWB_MODE_AUTO)) { | ||||||
|  |       request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) | ||||||
|  |       if (meteringRectangles.isNotEmpty() && deviceDetails.supportsWhiteBalanceRegions) { | ||||||
|  |         request.set(CaptureRequest.CONTROL_AWB_REGIONS, meteringRectangles) | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // AWB is not supported on this device. | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AWB) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |   this.capture(request.build(), null, null) | ||||||
|  |  | ||||||
|  |   if (!coroutineContext.isActive) throw FocusCanceledError() | ||||||
|  |  | ||||||
|  |   // 3. Start a repeating request without the trigger and wait until AF/AE/AWB locks | ||||||
|  |   request.set(CaptureRequest.CONTROL_AF_TRIGGER, null) | ||||||
|  |   request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, null) | ||||||
|  |   val result = this.setRepeatingRequestAndWaitForPrecapture(request.build(), options.timeoutMs, *precaptureModes.toTypedArray()) | ||||||
|  |  | ||||||
|  |   if (!coroutineContext.isActive) throw FocusCanceledError() | ||||||
|  |  | ||||||
|  |   Log.i(TAG, "AF/AE/AWB successfully locked!") | ||||||
|  |  | ||||||
|  |   val needsFlash = result.exposureState == ExposureState.FlashRequired | ||||||
|  |   return PrecaptureResult(needsFlash) | ||||||
|  | } | ||||||
| @@ -0,0 +1,193 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  | import android.hardware.camera2.CaptureFailure | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.hardware.camera2.CaptureResult | ||||||
|  | import android.hardware.camera2.TotalCaptureResult | ||||||
|  | import android.util.Log | ||||||
|  | import com.mrousavy.camera.core.CaptureAbortedError | ||||||
|  | import com.mrousavy.camera.core.CaptureTimedOutError | ||||||
|  | import kotlin.coroutines.resume | ||||||
|  | import kotlin.coroutines.resumeWithException | ||||||
|  | import kotlinx.coroutines.CoroutineScope | ||||||
|  | import kotlinx.coroutines.Dispatchers | ||||||
|  | import kotlinx.coroutines.delay | ||||||
|  | import kotlinx.coroutines.launch | ||||||
|  | import kotlinx.coroutines.suspendCancellableCoroutine | ||||||
|  |  | ||||||
|  | private const val TAG = "CameraCaptureSession" | ||||||
|  |  | ||||||
|  | enum class PrecaptureTrigger { | ||||||
|  |   AE, | ||||||
|  |   AF, | ||||||
|  |   AWB | ||||||
|  | } | ||||||
|  |  | ||||||
|  | interface AutoState { | ||||||
|  |   val isCompleted: Boolean | ||||||
|  |   val isPassivelyFocused: Boolean | ||||||
|  | } | ||||||
|  |  | ||||||
|  | enum class FocusState : AutoState { | ||||||
|  |   Unknown, | ||||||
|  |   Inactive, | ||||||
|  |   Scanning, | ||||||
|  |   Focused, | ||||||
|  |   Unfocused, | ||||||
|  |   PassiveScanning, | ||||||
|  |   PassiveFocused, | ||||||
|  |   PassiveUnfocused; | ||||||
|  |  | ||||||
|  |   override val isCompleted: Boolean | ||||||
|  |     get() = this == Focused || this == Unfocused | ||||||
|  |   override val isPassivelyFocused: Boolean | ||||||
|  |     get() = this == PassiveFocused | ||||||
|  |  | ||||||
|  |   companion object { | ||||||
|  |     fun fromAFState(afState: Int): FocusState = | ||||||
|  |       when (afState) { | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_INACTIVE -> Inactive | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN -> Scanning | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED -> Focused | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED -> Unfocused | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN -> PassiveScanning | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED -> PassiveFocused | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED -> PassiveUnfocused | ||||||
|  |         else -> Unknown | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  | enum class ExposureState : AutoState { | ||||||
|  |   Unknown, | ||||||
|  |   Locked, | ||||||
|  |   Inactive, | ||||||
|  |   Precapture, | ||||||
|  |   Searching, | ||||||
|  |   Converged, | ||||||
|  |   FlashRequired; | ||||||
|  |  | ||||||
|  |   override val isCompleted: Boolean | ||||||
|  |     get() = this == Converged || this == FlashRequired | ||||||
|  |   override val isPassivelyFocused: Boolean | ||||||
|  |     get() = this == Converged | ||||||
|  |  | ||||||
|  |   companion object { | ||||||
|  |     fun fromAEState(aeState: Int): ExposureState = | ||||||
|  |       when (aeState) { | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_INACTIVE -> Inactive | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_SEARCHING -> Searching | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_PRECAPTURE -> Precapture | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_CONVERGED -> Converged | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED -> FlashRequired | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_LOCKED -> Locked | ||||||
|  |         else -> Unknown | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | enum class WhiteBalanceState : AutoState { | ||||||
|  |   Unknown, | ||||||
|  |   Inactive, | ||||||
|  |   Locked, | ||||||
|  |   Searching, | ||||||
|  |   Converged; | ||||||
|  |  | ||||||
|  |   override val isCompleted: Boolean | ||||||
|  |     get() = this == Converged | ||||||
|  |   override val isPassivelyFocused: Boolean | ||||||
|  |     get() = this == Converged | ||||||
|  |  | ||||||
|  |   companion object { | ||||||
|  |     fun fromAWBState(awbState: Int): WhiteBalanceState = | ||||||
|  |       when (awbState) { | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_INACTIVE -> Inactive | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_SEARCHING -> Searching | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_CONVERGED -> Converged | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_LOCKED -> Locked | ||||||
|  |         else -> Unknown | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | data class ResultState(val focusState: FocusState, val exposureState: ExposureState, val whiteBalanceState: WhiteBalanceState) | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Set a new repeating request for the [CameraCaptureSession] that contains a precapture trigger, and wait until the given precaptures have locked. | ||||||
|  |  */ | ||||||
|  | suspend fun CameraCaptureSession.setRepeatingRequestAndWaitForPrecapture( | ||||||
|  |   request: CaptureRequest, | ||||||
|  |   timeoutMs: Long, | ||||||
|  |   vararg precaptureTriggers: PrecaptureTrigger | ||||||
|  | ): ResultState = | ||||||
|  |   suspendCancellableCoroutine { continuation -> | ||||||
|  |     // Map<PrecaptureTrigger, Boolean> of all completed precaptures | ||||||
|  |     val completed = precaptureTriggers.associateWith { false }.toMutableMap() | ||||||
|  |  | ||||||
|  |     CoroutineScope(Dispatchers.Default).launch { | ||||||
|  |       delay(timeoutMs) // after timeout, cancel capture | ||||||
|  |       if (continuation.isActive) { | ||||||
|  |         Log.e(TAG, "Precapture timed out after ${timeoutMs / 1000} seconds!") | ||||||
|  |         continuation.resumeWithException(CaptureTimedOutError()) | ||||||
|  |         try { | ||||||
|  |           setRepeatingRequest(request, null, null) | ||||||
|  |         } catch (e: Throwable) { | ||||||
|  |           // session might have already been closed | ||||||
|  |           Log.e(TAG, "Error resetting session repeating request..", e) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     this.setRepeatingRequest( | ||||||
|  |       request, | ||||||
|  |       object : CameraCaptureSession.CaptureCallback() { | ||||||
|  |         override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { | ||||||
|  |           super.onCaptureCompleted(session, request, result) | ||||||
|  |  | ||||||
|  |           if (continuation.isActive) { | ||||||
|  |             val afState = FocusState.fromAFState(result.get(CaptureResult.CONTROL_AF_STATE) ?: CaptureResult.CONTROL_AF_STATE_INACTIVE) | ||||||
|  |             val aeState = ExposureState.fromAEState( | ||||||
|  |               result.get(CaptureResult.CONTROL_AE_STATE) ?: CaptureResult.CONTROL_AE_STATE_INACTIVE | ||||||
|  |             ) | ||||||
|  |             val awbState = WhiteBalanceState.fromAWBState( | ||||||
|  |               result.get(CaptureResult.CONTROL_AWB_STATE) ?: CaptureResult.CONTROL_AWB_STATE_INACTIVE | ||||||
|  |             ) | ||||||
|  |             Log.i(TAG, "Precapture state: AF: $afState, AE: $aeState, AWB: $awbState") | ||||||
|  |  | ||||||
|  |             // AF Precapture | ||||||
|  |             if (precaptureTriggers.contains(PrecaptureTrigger.AF)) { | ||||||
|  |               completed[PrecaptureTrigger.AF] = afState.isCompleted | ||||||
|  |             } | ||||||
|  |             // AE Precapture | ||||||
|  |             if (precaptureTriggers.contains(PrecaptureTrigger.AE)) { | ||||||
|  |               completed[PrecaptureTrigger.AE] = aeState.isCompleted | ||||||
|  |             } | ||||||
|  |             // AWB Precapture | ||||||
|  |             if (precaptureTriggers.contains(PrecaptureTrigger.AWB)) { | ||||||
|  |               completed[PrecaptureTrigger.AWB] = awbState.isCompleted | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             if (completed.values.all { it == true }) { | ||||||
|  |               // All precaptures did complete! | ||||||
|  |               continuation.resume(ResultState(afState, aeState, awbState)) | ||||||
|  |               session.setRepeatingRequest(request, null, null) | ||||||
|  |             } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { | ||||||
|  |           super.onCaptureFailed(session, request, failure) | ||||||
|  |  | ||||||
|  |           if (continuation.isActive) { | ||||||
|  |             // Capture failed or session closed. | ||||||
|  |             continuation.resumeWithException(CaptureAbortedError(failure.wasImageCaptured())) | ||||||
|  |             try { | ||||||
|  |               session.setRepeatingRequest(request, null, null) | ||||||
|  |             } catch (e: Throwable) { | ||||||
|  |               Log.e(TAG, "Failed to continue repeating request!", e) | ||||||
|  |             } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |       }, | ||||||
|  |       null | ||||||
|  |     ) | ||||||
|  |   } | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  |  | ||||||
|  | fun CameraCaptureSession.tryAbortCaptures() { | ||||||
|  |   try { | ||||||
|  |     abortCaptures() | ||||||
|  |   } catch (_: Throwable) {} | ||||||
|  | } | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  |  | ||||||
|  | fun CameraCaptureSession.tryStopRepeating() { | ||||||
|  |   try { | ||||||
|  |     stopRepeating() | ||||||
|  |   } catch (_: Throwable) {} | ||||||
|  | } | ||||||
| @@ -1,39 +1,13 @@ | |||||||
| package com.mrousavy.camera.extensions | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics | import android.hardware.camera2.CameraCharacteristics | ||||||
| import android.media.CamcorderProfile |  | ||||||
| import android.os.Build |  | ||||||
| import android.util.Size | import android.util.Size | ||||||
|  | import com.mrousavy.camera.utils.CamcorderProfileUtils | ||||||
| private fun getMaximumVideoSize(cameraId: String): Size? { |  | ||||||
|   try { |  | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { |  | ||||||
|       val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH) |  | ||||||
|       if (profiles != null) { |  | ||||||
|         val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height } |  | ||||||
|         if (largestProfile != null) { |  | ||||||
|           return Size(largestProfile.width, largestProfile.height) |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     val cameraIdInt = cameraId.toIntOrNull() |  | ||||||
|     if (cameraIdInt != null) { |  | ||||||
|       val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH) |  | ||||||
|       return Size(profile.videoFrameWidth, profile.videoFrameHeight) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     return null |  | ||||||
|   } catch (e: Throwable) { |  | ||||||
|     // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. |  | ||||||
|     return null |  | ||||||
|   } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List<Size> { | fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List<Size> { | ||||||
|   val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! |   val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! | ||||||
|   val sizes = config.getOutputSizes(format) ?: emptyArray() |   val sizes = config.getOutputSizes(format) ?: emptyArray() | ||||||
|   val maxVideoSize = getMaximumVideoSize(cameraId) |   val maxVideoSize = CamcorderProfileUtils.getMaximumVideoSize(cameraId) | ||||||
|   if (maxVideoSize != null) { |   if (maxVideoSize != null) { | ||||||
|     return sizes.filter { it.bigger <= maxVideoSize.bigger } |     return sizes.filter { it.bigger <= maxVideoSize.bigger } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -1,29 +0,0 @@ | |||||||
| package com.mrousavy.camera.extensions |  | ||||||
|  |  | ||||||
| import android.content.res.Resources |  | ||||||
| import android.hardware.camera2.CameraCharacteristics |  | ||||||
| import android.util.Size |  | ||||||
| import android.view.SurfaceHolder |  | ||||||
|  |  | ||||||
| fun getMaximumPreviewSize(): Size { |  | ||||||
|   // See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap |  | ||||||
|   // According to the Android Developer documentation, PREVIEW streams can have a resolution |  | ||||||
|   // of up to the phone's display's resolution, with a maximum of 1920x1080. |  | ||||||
|   val display1080p = Size(1080, 1920) |  | ||||||
|   val displaySize = Size( |  | ||||||
|     Resources.getSystem().displayMetrics.widthPixels, |  | ||||||
|     Resources.getSystem().displayMetrics.heightPixels |  | ||||||
|   ) |  | ||||||
|   val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller |  | ||||||
|  |  | ||||||
|   return if (isHighResScreen) display1080p else displaySize |  | ||||||
| } |  | ||||||
|  |  | ||||||
| fun CameraCharacteristics.getPreviewTargetSize(targetSize: Size?): Size { |  | ||||||
|   val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! |  | ||||||
|   val maximumPreviewSize = getMaximumPreviewSize() |  | ||||||
|   val outputSizes = config.getOutputSizes(SurfaceHolder::class.java) |  | ||||||
|     .filter { it.bigger <= maximumPreviewSize.bigger && it.smaller <= maximumPreviewSize.smaller } |  | ||||||
|  |  | ||||||
|   return outputSizes.closestToOrMax(targetSize) |  | ||||||
| } |  | ||||||
| @@ -1,104 +0,0 @@ | |||||||
| package com.mrousavy.camera.extensions |  | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics |  | ||||||
| import android.hardware.camera2.CameraDevice |  | ||||||
| import android.hardware.camera2.CameraManager |  | ||||||
| import android.hardware.camera2.CaptureRequest |  | ||||||
| import android.view.Surface |  | ||||||
| import com.mrousavy.camera.types.Flash |  | ||||||
| import com.mrousavy.camera.types.Orientation |  | ||||||
| import com.mrousavy.camera.types.QualityPrioritization |  | ||||||
|  |  | ||||||
| private fun supportsSnapshotCapture(cameraCharacteristics: CameraCharacteristics): Boolean { |  | ||||||
|   // As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation: |  | ||||||
|   val hardwareLevel = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!! |  | ||||||
|   if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) return false |  | ||||||
|  |  | ||||||
|   val capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!! |  | ||||||
|   val hasDepth = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) |  | ||||||
|   val isBackwardsCompatible = !capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) |  | ||||||
|   if (hasDepth && !isBackwardsCompatible) return false |  | ||||||
|  |  | ||||||
|   return true |  | ||||||
| } |  | ||||||
|  |  | ||||||
| fun CameraDevice.createPhotoCaptureRequest( |  | ||||||
|   cameraManager: CameraManager, |  | ||||||
|   surface: Surface, |  | ||||||
|   zoom: Float, |  | ||||||
|   qualityPrioritization: QualityPrioritization, |  | ||||||
|   flashMode: Flash, |  | ||||||
|   enableRedEyeReduction: Boolean, |  | ||||||
|   enableAutoStabilization: Boolean, |  | ||||||
|   enableHdr: Boolean, |  | ||||||
|   orientation: Orientation |  | ||||||
| ): CaptureRequest { |  | ||||||
|   val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id) |  | ||||||
|  |  | ||||||
|   val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) { |  | ||||||
|     CameraDevice.TEMPLATE_VIDEO_SNAPSHOT |  | ||||||
|   } else { |  | ||||||
|     CameraDevice.TEMPLATE_STILL_CAPTURE |  | ||||||
|   } |  | ||||||
|   val captureRequest = this.createCaptureRequest(template) |  | ||||||
|   captureRequest.addTarget(surface) |  | ||||||
|  |  | ||||||
|   // TODO: Maybe we can even expose that prop directly? |  | ||||||
|   val jpegQuality = when (qualityPrioritization) { |  | ||||||
|     QualityPrioritization.SPEED -> 85 |  | ||||||
|     QualityPrioritization.BALANCED -> 92 |  | ||||||
|     QualityPrioritization.QUALITY -> 100 |  | ||||||
|   } |  | ||||||
|   captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte()) |  | ||||||
|  |  | ||||||
|   captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees()) |  | ||||||
|  |  | ||||||
|   // TODO: Use the same options as from the preview request. This is duplicate code! |  | ||||||
|  |  | ||||||
|   when (flashMode) { |  | ||||||
|     // Set the Flash Mode |  | ||||||
|     Flash.OFF -> { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) |  | ||||||
|       captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF) |  | ||||||
|     } |  | ||||||
|     Flash.ON -> { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) |  | ||||||
|       captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) |  | ||||||
|     } |  | ||||||
|     Flash.AUTO -> { |  | ||||||
|       if (enableRedEyeReduction) { |  | ||||||
|         captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) |  | ||||||
|       } else { |  | ||||||
|         captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH) |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   if (enableAutoStabilization) { |  | ||||||
|     // Enable optical or digital image stabilization |  | ||||||
|     val digitalStabilization = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) |  | ||||||
|     val hasDigitalStabilization = digitalStabilization?.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON) ?: false |  | ||||||
|  |  | ||||||
|     val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) |  | ||||||
|     val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false |  | ||||||
|     if (hasOpticalStabilization) { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF) |  | ||||||
|       captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) |  | ||||||
|     } else if (hasDigitalStabilization) { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) |  | ||||||
|     } else { |  | ||||||
|       // no stabilization is supported. ignore it |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   // TODO: Check if that zoom value is even supported. |  | ||||||
|   captureRequest.setZoom(zoom, cameraCharacteristics) |  | ||||||
|  |  | ||||||
|   // Set HDR |  | ||||||
|   // TODO: Check if that value is even supported |  | ||||||
|   if (enableHdr) { |  | ||||||
|     captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   return captureRequest.build() |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,13 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  |  | ||||||
|  | val CameraDevice.isValid: Boolean | ||||||
|  |   get() { | ||||||
|  |     try { | ||||||
|  |       this.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW) | ||||||
|  |       return true | ||||||
|  |     } catch (e: Throwable) { | ||||||
|  |       return false | ||||||
|  |     } | ||||||
|  |   } | ||||||
| @@ -1,20 +1,18 @@ | |||||||
| package com.mrousavy.camera.extensions | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics |  | ||||||
| import android.hardware.camera2.CaptureRequest | import android.hardware.camera2.CaptureRequest | ||||||
| import android.os.Build | import android.os.Build | ||||||
| import android.util.Range | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  |  | ||||||
| fun CaptureRequest.Builder.setZoom(zoom: Float, cameraCharacteristics: CameraCharacteristics) { | fun CaptureRequest.Builder.setZoom(zoom: Float, deviceDetails: CameraDeviceDetails) { | ||||||
|   if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { |   val zoomRange = deviceDetails.zoomRange | ||||||
|     val zoomRange = cameraCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) ?: Range(1f, 1f) |  | ||||||
|   val zoomClamped = zoomRange.clamp(zoom) |   val zoomClamped = zoomRange.clamp(zoom) | ||||||
|  |  | ||||||
|  |   if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { | ||||||
|     this.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomClamped) |     this.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomClamped) | ||||||
|   } else { |   } else { | ||||||
|     val maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) |     val size = deviceDetails.activeSize | ||||||
|     val zoomRange = Range(1f, maxZoom ?: 1f) |  | ||||||
|     val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! |  | ||||||
|     val zoomClamped = zoomRange.clamp(zoom) |  | ||||||
|     this.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoomClamped)) |     this.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoomClamped)) | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,25 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.graphics.Point | ||||||
|  | import android.graphics.PointF | ||||||
|  | import android.util.Log | ||||||
|  | import android.util.Size | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
|  |  | ||||||
|  | fun Point.rotatedBy(fromSize: Size, toSize: Size, fromOrientation: Orientation, toOrientation: Orientation): Point { | ||||||
|  |   val differenceDegrees = (fromOrientation.toDegrees() + toOrientation.toDegrees()) % 360 | ||||||
|  |   val difference = Orientation.fromRotationDegrees(differenceDegrees) | ||||||
|  |   val normalizedPoint = PointF(this.x / fromSize.width.toFloat(), this.y / fromSize.height.toFloat()) | ||||||
|  |  | ||||||
|  |   val rotatedNormalizedPoint = when (difference) { | ||||||
|  |     Orientation.PORTRAIT -> normalizedPoint | ||||||
|  |     Orientation.PORTRAIT_UPSIDE_DOWN -> PointF(1 - normalizedPoint.x, 1 - normalizedPoint.y) | ||||||
|  |     Orientation.LANDSCAPE_LEFT -> PointF(normalizedPoint.y, 1 - normalizedPoint.x) | ||||||
|  |     Orientation.LANDSCAPE_RIGHT -> PointF(1 - normalizedPoint.y, normalizedPoint.x) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   val rotatedX = rotatedNormalizedPoint.x * toSize.width | ||||||
|  |   val rotatedY = rotatedNormalizedPoint.y * toSize.height | ||||||
|  |   Log.i("ROTATE", "$this -> $normalizedPoint -> $difference -> $rotatedX, $rotatedY") | ||||||
|  |   return Point(rotatedX.toInt(), rotatedY.toInt()) | ||||||
|  | } | ||||||
| @@ -4,9 +4,9 @@ import android.media.CamcorderProfile | |||||||
| import android.media.MediaRecorder.VideoEncoder | import android.media.MediaRecorder.VideoEncoder | ||||||
| import android.os.Build | import android.os.Build | ||||||
| import android.util.Log | import android.util.Log | ||||||
| import android.util.Size |  | ||||||
| import com.mrousavy.camera.core.RecordingSession | import com.mrousavy.camera.core.RecordingSession | ||||||
| import com.mrousavy.camera.types.VideoCodec | import com.mrousavy.camera.types.VideoCodec | ||||||
|  | import com.mrousavy.camera.utils.CamcorderProfileUtils | ||||||
| import kotlin.math.abs | import kotlin.math.abs | ||||||
|  |  | ||||||
| data class RecommendedProfile( | data class RecommendedProfile( | ||||||
| @@ -23,7 +23,7 @@ fun RecordingSession.getRecommendedBitRate(fps: Int, codec: VideoCodec, hdr: Boo | |||||||
|   val targetResolution = size |   val targetResolution = size | ||||||
|   val encoder = codec.toVideoEncoder() |   val encoder = codec.toVideoEncoder() | ||||||
|   val bitDepth = if (hdr) 10 else 8 |   val bitDepth = if (hdr) 10 else 8 | ||||||
|   val quality = findClosestCamcorderProfileQuality(cameraId, targetResolution) |   val quality = CamcorderProfileUtils.findClosestCamcorderProfileQuality(cameraId, targetResolution, true) | ||||||
|   Log.i("CamcorderProfile", "Closest matching CamcorderProfile: $quality") |   Log.i("CamcorderProfile", "Closest matching CamcorderProfile: $quality") | ||||||
|  |  | ||||||
|   var recommendedProfile: RecommendedProfile? = null |   var recommendedProfile: RecommendedProfile? = null | ||||||
| @@ -75,39 +75,3 @@ fun RecordingSession.getRecommendedBitRate(fps: Int, codec: VideoCodec, hdr: Boo | |||||||
|   } |   } | ||||||
|   return bitRate.toInt() |   return bitRate.toInt() | ||||||
| } | } | ||||||
|  |  | ||||||
| private fun getResolutionForCamcorderProfileQuality(camcorderProfile: Int): Int = |  | ||||||
|   when (camcorderProfile) { |  | ||||||
|     CamcorderProfile.QUALITY_QCIF -> 176 * 144 |  | ||||||
|     CamcorderProfile.QUALITY_QVGA -> 320 * 240 |  | ||||||
|     CamcorderProfile.QUALITY_CIF -> 352 * 288 |  | ||||||
|     CamcorderProfile.QUALITY_VGA -> 640 * 480 |  | ||||||
|     CamcorderProfile.QUALITY_480P -> 720 * 480 |  | ||||||
|     CamcorderProfile.QUALITY_720P -> 1280 * 720 |  | ||||||
|     CamcorderProfile.QUALITY_1080P -> 1920 * 1080 |  | ||||||
|     CamcorderProfile.QUALITY_2K -> 2048 * 1080 |  | ||||||
|     CamcorderProfile.QUALITY_QHD -> 2560 * 1440 |  | ||||||
|     CamcorderProfile.QUALITY_2160P -> 3840 * 2160 |  | ||||||
|     CamcorderProfile.QUALITY_4KDCI -> 4096 * 2160 |  | ||||||
|     CamcorderProfile.QUALITY_8KUHD -> 7680 * 4320 |  | ||||||
|     else -> throw Error("Invalid CamcorderProfile \"$camcorderProfile\"!") |  | ||||||
|   } |  | ||||||
|  |  | ||||||
| private fun findClosestCamcorderProfileQuality(cameraId: String, resolution: Size): Int { |  | ||||||
|   // Iterate through all available CamcorderProfiles and find the one that matches the closest |  | ||||||
|   val targetResolution = resolution.width * resolution.height |  | ||||||
|   val cameraIdInt = cameraId.toIntOrNull() |  | ||||||
|  |  | ||||||
|   val profiles = (CamcorderProfile.QUALITY_QCIF..CamcorderProfile.QUALITY_8KUHD).filter { profile -> |  | ||||||
|     if (cameraIdInt != null) { |  | ||||||
|       return@filter CamcorderProfile.hasProfile(cameraIdInt, profile) |  | ||||||
|     } else { |  | ||||||
|       return@filter CamcorderProfile.hasProfile(profile) |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|   val closestProfile = profiles.minBy { profile -> |  | ||||||
|     val currentResolution = getResolutionForCamcorderProfileQuality(profile) |  | ||||||
|     return@minBy abs(currentResolution - targetResolution) |  | ||||||
|   } |  | ||||||
|   return closestProfile |  | ||||||
| } |  | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ package com.mrousavy.camera.extensions | |||||||
|  |  | ||||||
| import android.util.Size | import android.util.Size | ||||||
| import android.util.SizeF | import android.util.SizeF | ||||||
| import android.view.Surface | import com.mrousavy.camera.types.Orientation | ||||||
| import kotlin.math.abs | import kotlin.math.abs | ||||||
| import kotlin.math.max | import kotlin.math.max | ||||||
| import kotlin.math.min | import kotlin.math.min | ||||||
| @@ -14,13 +14,10 @@ fun List<Size>.closestToOrMax(size: Size?): Size = | |||||||
|     this.maxBy { it.width * it.height } |     this.maxBy { it.width * it.height } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| fun Size.rotated(surfaceRotation: Int): Size = | fun Size.rotatedBy(orientation: Orientation): Size = | ||||||
|   when (surfaceRotation) { |   when (orientation) { | ||||||
|     Surface.ROTATION_0 -> Size(width, height) |     Orientation.PORTRAIT, Orientation.PORTRAIT_UPSIDE_DOWN -> this | ||||||
|     Surface.ROTATION_90 -> Size(height, width) |     Orientation.LANDSCAPE_LEFT, Orientation.LANDSCAPE_RIGHT -> Size(height, width) | ||||||
|     Surface.ROTATION_180 -> Size(width, height) |  | ||||||
|     Surface.ROTATION_270 -> Size(height, width) |  | ||||||
|     else -> Size(width, height) |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
| val Size.bigger: Int | val Size.bigger: Int | ||||||
|   | |||||||
| @@ -0,0 +1,41 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.util.Log | ||||||
|  | import android.view.SurfaceHolder | ||||||
|  | import androidx.annotation.UiThread | ||||||
|  | import kotlin.coroutines.resume | ||||||
|  | import kotlinx.coroutines.suspendCancellableCoroutine | ||||||
|  |  | ||||||
|  | private const val TAG = "SurfaceHolder" | ||||||
|  |  | ||||||
|  | @UiThread | ||||||
|  | suspend fun SurfaceHolder.resize(targetWidth: Int, targetHeight: Int) { | ||||||
|  |   return suspendCancellableCoroutine { continuation -> | ||||||
|  |     val currentSize = this.surfaceFrame | ||||||
|  |     if (currentSize.width() == targetWidth && currentSize.height() == targetHeight) { | ||||||
|  |       // Already in target size | ||||||
|  |       continuation.resume(Unit) | ||||||
|  |       return@suspendCancellableCoroutine | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Resizing SurfaceHolder to $targetWidth x $targetHeight...") | ||||||
|  |  | ||||||
|  |     val callback = object : SurfaceHolder.Callback { | ||||||
|  |       override fun surfaceCreated(holder: SurfaceHolder) = Unit | ||||||
|  |       override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { | ||||||
|  |         if (width == targetWidth && height == targetHeight) { | ||||||
|  |           holder.removeCallback(this) | ||||||
|  |           Log.i(TAG, "Resized SurfaceHolder to $width x $height!") | ||||||
|  |           continuation.resume(Unit) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       override fun surfaceDestroyed(holder: SurfaceHolder) { | ||||||
|  |         holder.removeCallback(this) | ||||||
|  |         Log.e(TAG, "Failed to resize SurfaceHolder to $targetWidth x $targetHeight!") | ||||||
|  |         continuation.cancel(Error("Tried to resize SurfaceView, but Surface has been destroyed!")) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     this.addCallback(callback) | ||||||
|  |     this.setFixedSize(targetWidth, targetHeight) | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -4,6 +4,7 @@ import android.hardware.HardwareBuffer; | |||||||
| import android.media.Image; | import android.media.Image; | ||||||
| import android.os.Build; | import android.os.Build; | ||||||
| import com.facebook.proguard.annotations.DoNotStrip; | import com.facebook.proguard.annotations.DoNotStrip; | ||||||
|  | import com.mrousavy.camera.core.FrameInvalidError; | ||||||
| import com.mrousavy.camera.core.HardwareBuffersNotAvailableError; | import com.mrousavy.camera.core.HardwareBuffersNotAvailableError; | ||||||
| import com.mrousavy.camera.types.PixelFormat; | import com.mrousavy.camera.types.PixelFormat; | ||||||
| import com.mrousavy.camera.types.Orientation; | import com.mrousavy.camera.types.Orientation; | ||||||
| @@ -23,42 +24,17 @@ public class Frame { | |||||||
|         this.isMirrored = isMirrored; |         this.isMirrored = isMirrored; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public Image getImage() { |     private void assertIsValid() throws FrameInvalidError { | ||||||
|         synchronized (this) { |         if (!getIsImageValid(image)) { | ||||||
|             Image img = image; |             throw new FrameInvalidError(); | ||||||
|             if (!getIsImageValid(img)) { |  | ||||||
|                 throw new RuntimeException("Frame is already closed! " + |  | ||||||
|                     "Are you trying to access the Image data outside of a Frame Processor's lifetime?\n" + |  | ||||||
|                     "- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n" + |  | ||||||
|                     "- If you want to do async processing, use `runAsync(...)` instead.\n" + |  | ||||||
|                     "- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`"); |  | ||||||
|             } |  | ||||||
|             return img; |  | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     private synchronized boolean getIsImageValid(Image image) { | ||||||
|     @DoNotStrip |         if (refCount <= 0) return false; | ||||||
|     public int getWidth() { |  | ||||||
|         return getImage().getWidth(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |  | ||||||
|     @DoNotStrip |  | ||||||
|     public int getHeight() { |  | ||||||
|         return getImage().getHeight(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |  | ||||||
|     @DoNotStrip |  | ||||||
|     public boolean getIsValid() { |  | ||||||
|         return getIsImageValid(getImage()); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private boolean getIsImageValid(Image image) { |  | ||||||
|         try { |         try { | ||||||
|             // will throw an exception if the image is already closed |             // will throw an exception if the image is already closed | ||||||
|             synchronized (this) { image.getFormat(); } |             image.getFormat(); | ||||||
|             // no exception thrown, image must still be valid. |             // no exception thrown, image must still be valid. | ||||||
|             return true; |             return true; | ||||||
|         } catch (IllegalStateException e) { |         } catch (IllegalStateException e) { | ||||||
| @@ -67,78 +43,104 @@ public class Frame { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public synchronized Image getImage() { | ||||||
|  |         return image; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public boolean getIsMirrored() { |     public synchronized int getWidth() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|  |         return image.getWidth(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SuppressWarnings("unused") | ||||||
|  |     @DoNotStrip | ||||||
|  |     public synchronized int getHeight() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|  |         return image.getHeight(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SuppressWarnings("unused") | ||||||
|  |     @DoNotStrip | ||||||
|  |     public synchronized boolean getIsValid() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|  |         return getIsImageValid(image); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SuppressWarnings("unused") | ||||||
|  |     @DoNotStrip | ||||||
|  |     public synchronized boolean getIsMirrored() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|         return isMirrored; |         return isMirrored; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public long getTimestamp() { |     public synchronized long getTimestamp() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|         return timestamp; |         return timestamp; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public Orientation getOrientation() { |     public synchronized Orientation getOrientation() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|         return orientation; |         return orientation; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public PixelFormat getPixelFormat() { |     public synchronized PixelFormat getPixelFormat() throws FrameInvalidError { | ||||||
|         return PixelFormat.Companion.fromImageFormat(getImage().getFormat()); |         assertIsValid(); | ||||||
|  |         return PixelFormat.Companion.fromImageFormat(image.getFormat()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public int getPlanesCount() { |     public synchronized int getPlanesCount() throws FrameInvalidError { | ||||||
|         return getImage().getPlanes().length; |         assertIsValid(); | ||||||
|  |         return image.getPlanes().length; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public int getBytesPerRow() { |     public synchronized int getBytesPerRow() throws FrameInvalidError { | ||||||
|         return getImage().getPlanes()[0].getRowStride(); |         assertIsValid(); | ||||||
|  |         return image.getPlanes()[0].getRowStride(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError { |     private Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError, FrameInvalidError { | ||||||
|         return getHardwareBuffer(); |         return getHardwareBuffer(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError { |     public synchronized HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError, FrameInvalidError { | ||||||
|         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { |         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { | ||||||
|             throw new HardwareBuffersNotAvailableError(); |             throw new HardwareBuffersNotAvailableError(); | ||||||
|         } |         } | ||||||
|         return getImage().getHardwareBuffer(); |         assertIsValid(); | ||||||
|  |         return image.getHardwareBuffer(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public void incrementRefCount() { |     public synchronized void incrementRefCount() { | ||||||
|         synchronized (this) { |  | ||||||
|         refCount++; |         refCount++; | ||||||
|     } |     } | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public void decrementRefCount() { |     public synchronized void decrementRefCount() { | ||||||
|         synchronized (this) { |  | ||||||
|         refCount--; |         refCount--; | ||||||
|         if (refCount <= 0) { |         if (refCount <= 0) { | ||||||
|             // If no reference is held on this Image, close it. |             // If no reference is held on this Image, close it. | ||||||
|             close(); |             close(); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private void close() { |     private synchronized void close() { | ||||||
|         synchronized (this) { |  | ||||||
|         image.close(); |         image.close(); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| } |  | ||||||
|   | |||||||
| @@ -21,6 +21,8 @@ public final class FrameProcessor { | |||||||
|     @Keep |     @Keep | ||||||
|     private final HybridData mHybridData; |     private final HybridData mHybridData; | ||||||
|  |  | ||||||
|  |     @DoNotStrip | ||||||
|  |     @Keep | ||||||
|     public FrameProcessor(HybridData hybridData) { |     public FrameProcessor(HybridData hybridData) { | ||||||
|         mHybridData = hybridData; |         mHybridData = hybridData; | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ import com.mrousavy.camera.core.ViewNotFoundError | |||||||
| import java.lang.ref.WeakReference | import java.lang.ref.WeakReference | ||||||
|  |  | ||||||
| @Suppress("KotlinJniMissingFunction") // we use fbjni. | @Suppress("KotlinJniMissingFunction") // we use fbjni. | ||||||
| class VisionCameraProxy(context: ReactApplicationContext) { | class VisionCameraProxy(private val reactContext: ReactApplicationContext) { | ||||||
|   companion object { |   companion object { | ||||||
|     const val TAG = "VisionCameraProxy" |     const val TAG = "VisionCameraProxy" | ||||||
|   } |   } | ||||||
| @@ -24,6 +24,8 @@ class VisionCameraProxy(context: ReactApplicationContext) { | |||||||
|   private var mHybridData: HybridData |   private var mHybridData: HybridData | ||||||
|   private var mContext: WeakReference<ReactApplicationContext> |   private var mContext: WeakReference<ReactApplicationContext> | ||||||
|   private var mScheduler: VisionCameraScheduler |   private var mScheduler: VisionCameraScheduler | ||||||
|  |   val context: ReactApplicationContext | ||||||
|  |     get() = reactContext | ||||||
|  |  | ||||||
|   init { |   init { | ||||||
|     val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl |     val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl | ||||||
|   | |||||||
| @@ -3,20 +3,12 @@ package com.mrousavy.camera.types | |||||||
| import com.facebook.react.bridge.ReadableMap | import com.facebook.react.bridge.ReadableMap | ||||||
| import com.mrousavy.camera.core.InvalidTypeScriptUnionError | import com.mrousavy.camera.core.InvalidTypeScriptUnionError | ||||||
|  |  | ||||||
| class CodeScannerOptions(map: ReadableMap) { | data class CodeScannerOptions(val codeTypes: List<CodeType>) { | ||||||
|   val codeTypes: List<CodeType> |   companion object { | ||||||
|  |     fun fromJSValue(value: ReadableMap): CodeScannerOptions { | ||||||
|   init { |       val jsCodeTypes = value.getArray("codeTypes") ?: throw InvalidTypeScriptUnionError("codeScanner", value.toString()) | ||||||
|     val codeTypes = map.getArray("codeTypes")?.toArrayList() ?: throw InvalidTypeScriptUnionError("codeScanner", map.toString()) |       val codeTypes = jsCodeTypes.toArrayList().map { CodeType.fromUnionValue(it as String) } | ||||||
|     this.codeTypes = codeTypes.map { |       return CodeScannerOptions(codeTypes) | ||||||
|       return@map CodeType.fromUnionValue(it as String) |  | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun equals(other: Any?): Boolean { |  | ||||||
|     if (other !is CodeScannerOptions) return false |  | ||||||
|     return codeTypes.size == other.codeTypes.size && codeTypes.containsAll(other.codeTypes) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   override fun hashCode(): Int = codeTypes.hashCode() |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -13,6 +13,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|   EAN_8("ean-8"), |   EAN_8("ean-8"), | ||||||
|   ITF("itf"), |   ITF("itf"), | ||||||
|   UPC_E("upc-e"), |   UPC_E("upc-e"), | ||||||
|  |   UPC_A("upc-a"), | ||||||
|   QR("qr"), |   QR("qr"), | ||||||
|   PDF_417("pdf-417"), |   PDF_417("pdf-417"), | ||||||
|   AZTEC("aztec"), |   AZTEC("aztec"), | ||||||
| @@ -29,6 +30,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|       EAN_8 -> Barcode.FORMAT_EAN_8 |       EAN_8 -> Barcode.FORMAT_EAN_8 | ||||||
|       ITF -> Barcode.FORMAT_ITF |       ITF -> Barcode.FORMAT_ITF | ||||||
|       UPC_E -> Barcode.FORMAT_UPC_E |       UPC_E -> Barcode.FORMAT_UPC_E | ||||||
|  |       UPC_A -> Barcode.FORMAT_UPC_A | ||||||
|       QR -> Barcode.FORMAT_QR_CODE |       QR -> Barcode.FORMAT_QR_CODE | ||||||
|       PDF_417 -> Barcode.FORMAT_PDF417 |       PDF_417 -> Barcode.FORMAT_PDF417 | ||||||
|       AZTEC -> Barcode.FORMAT_AZTEC |       AZTEC -> Barcode.FORMAT_AZTEC | ||||||
| @@ -47,6 +49,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|         Barcode.FORMAT_EAN_8 -> EAN_8 |         Barcode.FORMAT_EAN_8 -> EAN_8 | ||||||
|         Barcode.FORMAT_ITF -> ITF |         Barcode.FORMAT_ITF -> ITF | ||||||
|         Barcode.FORMAT_UPC_E -> UPC_E |         Barcode.FORMAT_UPC_E -> UPC_E | ||||||
|  |         Barcode.FORMAT_UPC_A -> UPC_A | ||||||
|         Barcode.FORMAT_QR_CODE -> QR |         Barcode.FORMAT_QR_CODE -> QR | ||||||
|         Barcode.FORMAT_PDF417 -> PDF_417 |         Barcode.FORMAT_PDF417 -> PDF_417 | ||||||
|         Barcode.FORMAT_AZTEC -> AZTEC |         Barcode.FORMAT_AZTEC -> AZTEC | ||||||
| @@ -64,6 +67,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|         "ean-8" -> EAN_8 |         "ean-8" -> EAN_8 | ||||||
|         "itf" -> ITF |         "itf" -> ITF | ||||||
|         "upc-e" -> UPC_E |         "upc-e" -> UPC_E | ||||||
|  |         "upc-a" -> UPC_A | ||||||
|         "qr" -> QR |         "qr" -> QR | ||||||
|         "pdf-417" -> PDF_417 |         "pdf-417" -> PDF_417 | ||||||
|         "aztec" -> AZTEC |         "aztec" -> AZTEC | ||||||
|   | |||||||
| @@ -0,0 +1,36 @@ | |||||||
|  | package com.mrousavy.camera.types | ||||||
|  |  | ||||||
|  | import com.facebook.react.bridge.Arguments | ||||||
|  | import com.facebook.react.bridge.WritableMap | ||||||
|  | import com.facebook.react.uimanager.events.Event | ||||||
|  |  | ||||||
|  | class CameraInitializedEvent(surfaceId: Int, viewId: Int) : Event<CameraInitializedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraInitialized" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraStartedEvent(surfaceId: Int, viewId: Int) : Event<CameraStartedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraStarted" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraStoppedEvent(surfaceId: Int, viewId: Int) : Event<CameraStoppedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraStopped" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraErrorEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : Event<CameraErrorEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraError" | ||||||
|  |   override fun getEventData() = data | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraViewReadyEvent(surfaceId: Int, viewId: Int) : Event<CameraViewReadyEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraViewReady" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraCodeScannedEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : | ||||||
|  |   Event<CameraCodeScannedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraCodeScanned" | ||||||
|  |   override fun getEventData() = data | ||||||
|  | } | ||||||
| @@ -9,6 +9,19 @@ enum class HardwareLevel(override val unionValue: String) : JSUnionValue { | |||||||
|   FULL("full"), |   FULL("full"), | ||||||
|   LEVEL_3("full"); |   LEVEL_3("full"); | ||||||
|  |  | ||||||
|  |   private val rank: Int | ||||||
|  |     get() { | ||||||
|  |       return when (this) { | ||||||
|  |         LEGACY -> 0 | ||||||
|  |         LIMITED -> 1 | ||||||
|  |         EXTERNAL -> 1 | ||||||
|  |         FULL -> 2 | ||||||
|  |         LEVEL_3 -> 3 | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |   fun isAtLeast(level: HardwareLevel): Boolean = this.rank >= level.rank | ||||||
|  |  | ||||||
|   companion object { |   companion object { | ||||||
|     fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel = |     fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel = | ||||||
|       when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { |       when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| package com.mrousavy.camera.types | package com.mrousavy.camera.types | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  |  | ||||||
| enum class Orientation(override val unionValue: String) : JSUnionValue { | enum class Orientation(override val unionValue: String) : JSUnionValue { | ||||||
|   PORTRAIT("portrait"), |   PORTRAIT("portrait"), | ||||||
| @@ -19,23 +19,22 @@ enum class Orientation(override val unionValue: String) : JSUnionValue { | |||||||
|   fun toDegrees(): Int = |   fun toDegrees(): Int = | ||||||
|     when (this) { |     when (this) { | ||||||
|       PORTRAIT -> 0 |       PORTRAIT -> 0 | ||||||
|       LANDSCAPE_RIGHT -> 90 |       LANDSCAPE_LEFT -> 90 | ||||||
|       PORTRAIT_UPSIDE_DOWN -> 180 |       PORTRAIT_UPSIDE_DOWN -> 180 | ||||||
|       LANDSCAPE_LEFT -> 270 |       LANDSCAPE_RIGHT -> 270 | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   fun toSensorRelativeOrientation(cameraCharacteristics: CameraCharacteristics): Orientation { |   fun toSensorRelativeOrientation(deviceDetails: CameraDeviceDetails): Orientation { | ||||||
|     val sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! |  | ||||||
|  |  | ||||||
|     // Convert target orientation to rotation degrees (0, 90, 180, 270) |     // Convert target orientation to rotation degrees (0, 90, 180, 270) | ||||||
|     var rotationDegrees = this.toDegrees() |     var rotationDegrees = this.toDegrees() | ||||||
|  |  | ||||||
|     // Reverse device orientation for front-facing cameras |     // Reverse device orientation for front-facing cameras | ||||||
|     val facingFront = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT |     if (deviceDetails.lensFacing == LensFacing.FRONT) { | ||||||
|     if (facingFront) rotationDegrees = -rotationDegrees |       rotationDegrees = -rotationDegrees | ||||||
|  |     } | ||||||
|  |  | ||||||
|     // Rotate sensor rotation by target rotation |     // Rotate sensor rotation by target rotation | ||||||
|     val newRotationDegrees = (sensorOrientation + rotationDegrees + 360) % 360 |     val newRotationDegrees = (deviceDetails.sensorOrientation.toDegrees() + rotationDegrees + 360) % 360 | ||||||
|  |  | ||||||
|     return fromRotationDegrees(newRotationDegrees) |     return fromRotationDegrees(newRotationDegrees) | ||||||
|   } |   } | ||||||
| @@ -52,9 +51,9 @@ enum class Orientation(override val unionValue: String) : JSUnionValue { | |||||||
|  |  | ||||||
|     fun fromRotationDegrees(rotationDegrees: Int): Orientation = |     fun fromRotationDegrees(rotationDegrees: Int): Orientation = | ||||||
|       when (rotationDegrees) { |       when (rotationDegrees) { | ||||||
|         in 45..135 -> LANDSCAPE_RIGHT |         in 45..135 -> LANDSCAPE_LEFT | ||||||
|         in 135..225 -> PORTRAIT_UPSIDE_DOWN |         in 135..225 -> PORTRAIT_UPSIDE_DOWN | ||||||
|         in 225..315 -> LANDSCAPE_LEFT |         in 225..315 -> LANDSCAPE_RIGHT | ||||||
|         else -> PORTRAIT |         else -> PORTRAIT | ||||||
|       } |       } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -1,5 +1,7 @@ | |||||||
| package com.mrousavy.camera.types | package com.mrousavy.camera.types | ||||||
|  |  | ||||||
|  | import com.mrousavy.camera.core.InvalidTypeScriptUnionError | ||||||
|  |  | ||||||
| enum class ResizeMode(override val unionValue: String) : JSUnionValue { | enum class ResizeMode(override val unionValue: String) : JSUnionValue { | ||||||
|   COVER("cover"), |   COVER("cover"), | ||||||
|   CONTAIN("contain"); |   CONTAIN("contain"); | ||||||
| @@ -9,7 +11,7 @@ enum class ResizeMode(override val unionValue: String) : JSUnionValue { | |||||||
|       when (unionValue) { |       when (unionValue) { | ||||||
|         "cover" -> COVER |         "cover" -> COVER | ||||||
|         "contain" -> CONTAIN |         "contain" -> CONTAIN | ||||||
|         else -> COVER |         else -> throw InvalidTypeScriptUnionError("resizeMode", unionValue) | ||||||
|       } |       } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -13,21 +13,6 @@ enum class VideoStabilizationMode(override val unionValue: String) : JSUnionValu | |||||||
|   CINEMATIC("cinematic"), |   CINEMATIC("cinematic"), | ||||||
|   CINEMATIC_EXTENDED("cinematic-extended"); |   CINEMATIC_EXTENDED("cinematic-extended"); | ||||||
|  |  | ||||||
|   fun toDigitalStabilizationMode(): Int = |  | ||||||
|     when (this) { |  | ||||||
|       OFF -> CONTROL_VIDEO_STABILIZATION_MODE_OFF |  | ||||||
|       STANDARD -> CONTROL_VIDEO_STABILIZATION_MODE_ON |  | ||||||
|       CINEMATIC -> 2 // TODO: CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION |  | ||||||
|       else -> CONTROL_VIDEO_STABILIZATION_MODE_OFF |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|   fun toOpticalStabilizationMode(): Int = |  | ||||||
|     when (this) { |  | ||||||
|       OFF -> LENS_OPTICAL_STABILIZATION_MODE_OFF |  | ||||||
|       CINEMATIC_EXTENDED -> LENS_OPTICAL_STABILIZATION_MODE_ON |  | ||||||
|       else -> LENS_OPTICAL_STABILIZATION_MODE_OFF |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|   companion object : JSUnionValue.Companion<VideoStabilizationMode> { |   companion object : JSUnionValue.Companion<VideoStabilizationMode> { | ||||||
|     override fun fromUnionValue(unionValue: String?): VideoStabilizationMode = |     override fun fromUnionValue(unionValue: String?): VideoStabilizationMode = | ||||||
|       when (unionValue) { |       when (unionValue) { | ||||||
|   | |||||||
| @@ -0,0 +1,101 @@ | |||||||
|  | package com.mrousavy.camera.utils | ||||||
|  |  | ||||||
|  | import android.media.CamcorderProfile | ||||||
|  | import android.os.Build | ||||||
|  | import android.util.Size | ||||||
|  | import kotlin.math.abs | ||||||
|  |  | ||||||
|  | class CamcorderProfileUtils { | ||||||
|  |   companion object { | ||||||
|  |     private fun getResolutionForCamcorderProfileQuality(camcorderProfile: Int): Int = | ||||||
|  |       when (camcorderProfile) { | ||||||
|  |         CamcorderProfile.QUALITY_QCIF -> 176 * 144 | ||||||
|  |         CamcorderProfile.QUALITY_QVGA -> 320 * 240 | ||||||
|  |         CamcorderProfile.QUALITY_CIF -> 352 * 288 | ||||||
|  |         CamcorderProfile.QUALITY_VGA -> 640 * 480 | ||||||
|  |         CamcorderProfile.QUALITY_480P -> 720 * 480 | ||||||
|  |         CamcorderProfile.QUALITY_720P -> 1280 * 720 | ||||||
|  |         CamcorderProfile.QUALITY_1080P -> 1920 * 1080 | ||||||
|  |         CamcorderProfile.QUALITY_2K -> 2048 * 1080 | ||||||
|  |         CamcorderProfile.QUALITY_QHD -> 2560 * 1440 | ||||||
|  |         CamcorderProfile.QUALITY_2160P -> 3840 * 2160 | ||||||
|  |         CamcorderProfile.QUALITY_4KDCI -> 4096 * 2160 | ||||||
|  |         CamcorderProfile.QUALITY_8KUHD -> 7680 * 4320 | ||||||
|  |         else -> throw Error("Invalid CamcorderProfile \"$camcorderProfile\"!") | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |     fun findClosestCamcorderProfileQuality(cameraId: String, resolution: Size, allowLargerSize: Boolean): Int { | ||||||
|  |       // Iterate through all available CamcorderProfiles and find the one that matches the closest | ||||||
|  |       val targetResolution = resolution.width * resolution.height | ||||||
|  |       val cameraIdInt = cameraId.toIntOrNull() | ||||||
|  |  | ||||||
|  |       var profiles = (CamcorderProfile.QUALITY_QCIF..CamcorderProfile.QUALITY_8KUHD).filter { profile -> | ||||||
|  |         if (cameraIdInt != null) { | ||||||
|  |           return@filter CamcorderProfile.hasProfile(cameraIdInt, profile) | ||||||
|  |         } else { | ||||||
|  |           return@filter CamcorderProfile.hasProfile(profile) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       if (!allowLargerSize) { | ||||||
|  |         profiles = profiles.filter { profile -> | ||||||
|  |           val currentResolution = getResolutionForCamcorderProfileQuality(profile) | ||||||
|  |           return@filter currentResolution <= targetResolution | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       val closestProfile = profiles.minBy { profile -> | ||||||
|  |         val currentResolution = getResolutionForCamcorderProfileQuality(profile) | ||||||
|  |         return@minBy abs(currentResolution - targetResolution) | ||||||
|  |       } | ||||||
|  |       return closestProfile | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     fun getMaximumVideoSize(cameraId: String): Size? { | ||||||
|  |       try { | ||||||
|  |         if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { | ||||||
|  |           val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH) | ||||||
|  |           if (profiles != null) { | ||||||
|  |             val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height } | ||||||
|  |             if (largestProfile != null) { | ||||||
|  |               return Size(largestProfile.width, largestProfile.height) | ||||||
|  |             } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         val cameraIdInt = cameraId.toIntOrNull() | ||||||
|  |         if (cameraIdInt != null) { | ||||||
|  |           val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH) | ||||||
|  |           return Size(profile.videoFrameWidth, profile.videoFrameHeight) | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return null | ||||||
|  |       } catch (e: Throwable) { | ||||||
|  |         // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. | ||||||
|  |         return null | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     fun getMaximumFps(cameraId: String, size: Size): Int? { | ||||||
|  |       try { | ||||||
|  |         val quality = findClosestCamcorderProfileQuality(cameraId, size, false) | ||||||
|  |  | ||||||
|  |         if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { | ||||||
|  |           val profiles = CamcorderProfile.getAll(cameraId, quality) | ||||||
|  |           if (profiles != null) { | ||||||
|  |             return profiles.videoProfiles.maxOf { profile -> profile.frameRate } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         val cameraIdInt = cameraId.toIntOrNull() | ||||||
|  |         if (cameraIdInt != null) { | ||||||
|  |           val profile = CamcorderProfile.get(cameraIdInt, quality) | ||||||
|  |           return profile.videoFrameRate | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return null | ||||||
|  |       } catch (e: Throwable) { | ||||||
|  |         // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. | ||||||
|  |         return null | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -30,7 +30,7 @@ To try the playground out for yourself, run the following commands: | |||||||
|  |  | ||||||
| ```sh | ```sh | ||||||
| git clone https://github.com/mrousavy/react-native-vision-camera | git clone https://github.com/mrousavy/react-native-vision-camera | ||||||
| cd react-native-vision-camera | cd react-native-vision-camera/package | ||||||
| yarn bootstrap | yarn bootstrap | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
|   | |||||||
| @@ -484,7 +484,7 @@ PODS: | |||||||
|     - libwebp (~> 1.0) |     - libwebp (~> 1.0) | ||||||
|     - SDWebImage/Core (~> 5.10) |     - SDWebImage/Core (~> 5.10) | ||||||
|   - SocketRocket (0.6.1) |   - SocketRocket (0.6.1) | ||||||
|   - VisionCamera (3.8.2): |   - VisionCamera (3.9.0-beta.6): | ||||||
|     - React |     - React | ||||||
|     - React-callinvoker |     - React-callinvoker | ||||||
|     - React-Core |     - React-Core | ||||||
| @@ -724,9 +724,9 @@ SPEC CHECKSUMS: | |||||||
|   SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d |   SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d | ||||||
|   SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d |   SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d | ||||||
|   SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 |   SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 | ||||||
|   VisionCamera: edbcd00e27a438b2228f67823e2b8d15a189065f |   VisionCamera: 33c90675adf75528199f840f81dfbe74a2fe6c3f | ||||||
|   Yoga: 4c3aa327e4a6a23eeacd71f61c81df1bcdf677d5 |   Yoga: 4c3aa327e4a6a23eeacd71f61c81df1bcdf677d5 | ||||||
|  |  | ||||||
| PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb | PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb | ||||||
|  |  | ||||||
| COCOAPODS: 1.11.3 | COCOAPODS: 1.14.3 | ||||||
|   | |||||||
| @@ -304,6 +304,15 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|     onInitialized([:]) |     onInitialized([:]) | ||||||
|   } |   } | ||||||
|    |    | ||||||
|  |   func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) { | ||||||
|  |     guard let configuration, let difference else { return } | ||||||
|  |      | ||||||
|  |     if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection { | ||||||
|  |       let videoPreviewLayer = previewView.videoPreviewLayer | ||||||
|  |       connection.setOrientation(configuration.orientation) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|   func onCameraStarted() { |   func onCameraStarted() { | ||||||
|     ReactLogger.log(level: .info, message: "Camera started!") |     ReactLogger.log(level: .info, message: "Camera started!") | ||||||
|     guard let onStarted = onStarted else { |     guard let onStarted = onStarted else { | ||||||
|   | |||||||
| @@ -93,7 +93,7 @@ enum DeviceError: String { | |||||||
|     case .lowLightBoostNotSupported: |     case .lowLightBoostNotSupported: | ||||||
|       return "The currently selected camera device does not support low-light boost! Select a device where `device.supportsLowLightBoost` is true." |       return "The currently selected camera device does not support low-light boost! Select a device where `device.supportsLowLightBoost` is true." | ||||||
|     case .focusNotSupported: |     case .focusNotSupported: | ||||||
|       return "The currently selected camera device does not support focussing!" |       return "The currently selected camera device does not support focusing!" | ||||||
|     case .microphoneUnavailable: |     case .microphoneUnavailable: | ||||||
|       return "The microphone was unavailable." |       return "The microphone was unavailable." | ||||||
|     case .notAvailableOnSimulator: |     case .notAvailableOnSimulator: | ||||||
|   | |||||||
| @@ -109,6 +109,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|         try lambda(config) |         try lambda(config) | ||||||
|       } catch { |       } catch { | ||||||
|         self.onConfigureError(error) |         self.onConfigureError(error) | ||||||
|  |         return | ||||||
|       } |       } | ||||||
|       let difference = CameraConfiguration.Difference(between: self.configuration, and: config) |       let difference = CameraConfiguration.Difference(between: self.configuration, and: config) | ||||||
|  |  | ||||||
| @@ -117,7 +118,8 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|       do { |       do { | ||||||
|         // If needed, configure the AVCaptureSession (inputs, outputs) |         // If needed, configure the AVCaptureSession (inputs, outputs) | ||||||
|         if difference.isSessionConfigurationDirty { |         if difference.isSessionConfigurationDirty { | ||||||
|           try self.withSessionLock { |           self.captureSession.beginConfiguration() | ||||||
|  |  | ||||||
|           // 1. Update input device |           // 1. Update input device | ||||||
|           if difference.inputChanged { |           if difference.inputChanged { | ||||||
|             try self.configureDevice(configuration: config) |             try self.configureDevice(configuration: config) | ||||||
| @@ -135,11 +137,18 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|             self.configureOrientation(configuration: config) |             self.configureOrientation(configuration: config) | ||||||
|           } |           } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         guard let device = self.videoDeviceInput?.device else { | ||||||
|  |           throw CameraError.device(.noDevice) | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) |         // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) | ||||||
|         if difference.isDeviceConfigurationDirty { |         if difference.isDeviceConfigurationDirty { | ||||||
|           try self.withDeviceLock { device in |           try device.lockForConfiguration() | ||||||
|  |           defer { | ||||||
|  |             device.unlockForConfiguration() | ||||||
|  |           } | ||||||
|  |  | ||||||
|           // 4. Configure format |           // 4. Configure format | ||||||
|           if difference.formatChanged { |           if difference.formatChanged { | ||||||
|             try self.configureFormat(configuration: config, device: device) |             try self.configureFormat(configuration: config, device: device) | ||||||
| @@ -162,6 +171,11 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|             self.configureExposure(configuration: config, device: device) |             self.configureExposure(configuration: config, device: device) | ||||||
|           } |           } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         if difference.isSessionConfigurationDirty { | ||||||
|  |           // We commit the session config updates AFTER the device config, | ||||||
|  |           // that way we can also batch those changes into one update instead of doing two updates. | ||||||
|  |           self.captureSession.commitConfiguration() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // 9. Start or stop the session if needed |         // 9. Start or stop the session if needed | ||||||
| @@ -169,9 +183,11 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|  |  | ||||||
|         // 10. Enable or disable the Torch if needed (requires session to be running) |         // 10. Enable or disable the Torch if needed (requires session to be running) | ||||||
|         if difference.torchChanged { |         if difference.torchChanged { | ||||||
|           try self.withDeviceLock { device in |           try device.lockForConfiguration() | ||||||
|             try self.configureTorch(configuration: config, device: device) |           defer { | ||||||
|  |             device.unlockForConfiguration() | ||||||
|           } |           } | ||||||
|  |           try self.configureTorch(configuration: config, device: device) | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // Notify about Camera initialization |         // Notify about Camera initialization | ||||||
| @@ -179,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|           self.delegate?.onSessionInitialized() |           self.delegate?.onSessionInitialized() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         self.delegate?.onCameraConfigurationChanged(config, difference) | ||||||
|         // After configuring, set this to the new configuration. |         // After configuring, set this to the new configuration. | ||||||
|         self.configuration = config |         self.configuration = config | ||||||
|       } catch { |       } catch { | ||||||
| @@ -206,41 +223,6 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    Runs the given [lambda] under an AVCaptureSession configuration lock (`beginConfiguration()`) |  | ||||||
|    */ |  | ||||||
|   private func withSessionLock(_ lambda: () throws -> Void) throws { |  | ||||||
|     // Lock Capture Session for configuration |  | ||||||
|     ReactLogger.log(level: .info, message: "Beginning CameraSession configuration...") |  | ||||||
|     captureSession.beginConfiguration() |  | ||||||
|     defer { |  | ||||||
|       // Unlock Capture Session again and submit configuration to Hardware |  | ||||||
|       self.captureSession.commitConfiguration() |  | ||||||
|       ReactLogger.log(level: .info, message: "Committed CameraSession configuration!") |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Call lambda |  | ||||||
|     try lambda() |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    Runs the given [lambda] under an AVCaptureDevice configuration lock (`lockForConfiguration()`) |  | ||||||
|    */ |  | ||||||
|   private func withDeviceLock(_ lambda: (_ device: AVCaptureDevice) throws -> Void) throws { |  | ||||||
|     guard let device = videoDeviceInput?.device else { |  | ||||||
|       throw CameraError.session(.cameraNotReady) |  | ||||||
|     } |  | ||||||
|     ReactLogger.log(level: .info, message: "Beginning CaptureDevice configuration...") |  | ||||||
|     try device.lockForConfiguration() |  | ||||||
|     defer { |  | ||||||
|       device.unlockForConfiguration() |  | ||||||
|       ReactLogger.log(level: .info, message: "Committed CaptureDevice configuration!") |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Call lambda with Device |  | ||||||
|     try lambda(device) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |   /** | ||||||
|    Starts or stops the CaptureSession if needed (`isActive`) |    Starts or stops the CaptureSession if needed (`isActive`) | ||||||
|    */ |    */ | ||||||
|   | |||||||
| @@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject { | |||||||
|    Called when the [CameraSession] successfully initializes |    Called when the [CameraSession] successfully initializes | ||||||
|    */ |    */ | ||||||
|   func onSessionInitialized() |   func onSessionInitialized() | ||||||
|  |    | ||||||
|  |   func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) | ||||||
|   /** |   /** | ||||||
|    Called when the [CameraSession] starts streaming frames. (isActive=true) |    Called when the [CameraSession] starts streaming frames. (isActive=true) | ||||||
|    */ |    */ | ||||||
|   | |||||||
| @@ -32,6 +32,15 @@ extension AVCaptureOutput { | |||||||
|   func setOrientation(_ orientation: Orientation) { |   func setOrientation(_ orientation: Orientation) { | ||||||
|     // Set orientation for each connection |     // Set orientation for each connection | ||||||
|     for connection in connections { |     for connection in connections { | ||||||
|  |       connection.setOrientation(orientation) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | extension AVCaptureConnection { | ||||||
|  |   func setOrientation(_ orientation: Orientation) { | ||||||
|     #if swift(>=5.9) |     #if swift(>=5.9) | ||||||
|       if #available(iOS 17.0, *) { |       if #available(iOS 17.0, *) { | ||||||
|         // Camera Sensors are always in landscape rotation (90deg). |         // Camera Sensors are always in landscape rotation (90deg). | ||||||
| @@ -41,19 +50,18 @@ extension AVCaptureOutput { | |||||||
|  |  | ||||||
|         // TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording. |         // TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording. | ||||||
|         //       Does that work when we flip the camera? |         //       Does that work when we flip the camera? | ||||||
|           if connection.isVideoRotationAngleSupported(degrees) { |         if isVideoRotationAngleSupported(degrees) { | ||||||
|             connection.videoRotationAngle = degrees |           videoRotationAngle = degrees | ||||||
|         } |         } | ||||||
|       } else { |       } else { | ||||||
|           if connection.isVideoOrientationSupported { |         if isVideoOrientationSupported { | ||||||
|             connection.videoOrientation = orientation.toAVCaptureVideoOrientation() |           videoOrientation = orientation.toAVCaptureVideoOrientation() | ||||||
|         } |         } | ||||||
|       } |       } | ||||||
|     #else |     #else | ||||||
|         if connection.isVideoOrientationSupported { |       if isVideoOrientationSupported { | ||||||
|           connection.videoOrientation = orientation.toAVCaptureVideoOrientation() |         videoOrientation = orientation.toAVCaptureVideoOrientation() | ||||||
|       } |       } | ||||||
|     #endif |     #endif | ||||||
|   } |   } | ||||||
| } | } | ||||||
| } |  | ||||||
|   | |||||||
| @@ -40,6 +40,9 @@ extension AVMetadataObject.ObjectType { | |||||||
|     case "upc-e": |     case "upc-e": | ||||||
|       self = .upce |       self = .upce | ||||||
|       return |       return | ||||||
|  |     case "upc-a": | ||||||
|  |       self = .ean13 | ||||||
|  |       return | ||||||
|     case "qr": |     case "qr": | ||||||
|       self = .qr |       self = .qr | ||||||
|       return |       return | ||||||
|   | |||||||
| @@ -113,5 +113,19 @@ class ViewController: UIViewController { | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
|    |    | ||||||
|  |   override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) { | ||||||
|  |     switch UIDevice.current.orientation { | ||||||
|  |     case .landscapeLeft: | ||||||
|  |       cameraView.orientation = "landscape-right" | ||||||
|  |     case .landscapeRight: | ||||||
|  |       cameraView.orientation = "landscape-left" | ||||||
|  |     default: | ||||||
|  |       cameraView.orientation = "portrait" | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     cameraView.didSetProps([]) | ||||||
|  |     super.viewWillTransition(to: size, with: coordinator) | ||||||
|  |   } | ||||||
|  |    | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "name": "react-native-vision-camera", |   "name": "react-native-vision-camera", | ||||||
|   "version": "3.8.2", |   "version": "3.9.2", | ||||||
|   "description": "A powerful, high-performance React Native Camera library.", |   "description": "A powerful, high-performance React Native Camera library.", | ||||||
|   "main": "lib/commonjs/index", |   "main": "lib/commonjs/index", | ||||||
|   "module": "lib/module/index", |   "module": "lib/module/index", | ||||||
| @@ -49,26 +49,33 @@ | |||||||
|     "postpack": "rm ./README.md" |     "postpack": "rm ./README.md" | ||||||
|   }, |   }, | ||||||
|   "keywords": [ |   "keywords": [ | ||||||
|     "react-native", |  | ||||||
|     "ios", |  | ||||||
|     "android", |  | ||||||
|     "camera", |  | ||||||
|     "vision", |  | ||||||
|     "native", |  | ||||||
|     "module", |  | ||||||
|     "react", |     "react", | ||||||
|  |     "native", | ||||||
|  |     "camera", | ||||||
|  |     "react-native", | ||||||
|  |     "react-native-camera", | ||||||
|  |     "vision", | ||||||
|  |     "javascript", | ||||||
|  |     "typescript", | ||||||
|  |     "android", | ||||||
|  |     "ios", | ||||||
|  |     "library", | ||||||
|  |     "instagram", | ||||||
|  |     "snapchat", | ||||||
|     "ai", |     "ai", | ||||||
|     "ar", |  | ||||||
|     "qr", |  | ||||||
|     "qr-code", |  | ||||||
|     "barcode", |  | ||||||
|     "scanner", |     "scanner", | ||||||
|  |     "qrcode", | ||||||
|  |     "barcode", | ||||||
|  |     "qr-code", | ||||||
|  |     "jsi", | ||||||
|  |     "worklet", | ||||||
|  |     "module", | ||||||
|     "frame", |     "frame", | ||||||
|     "processing", |     "processing", | ||||||
|     "realtime" |     "realtime" | ||||||
|   ], |   ], | ||||||
|   "repository": "https://github.com/mrousavy/react-native-vision-camera", |   "repository": "https://github.com/mrousavy/react-native-vision-camera", | ||||||
|   "author": "Marc Rousavy <marcrousavy@hotmail.com> (https://github.com/mrousavy)", |   "author": "Marc Rousavy <me@mrousavy.com> (https://github.com/mrousavy)", | ||||||
|   "license": "MIT", |   "license": "MIT", | ||||||
|   "bugs": { |   "bugs": { | ||||||
|     "url": "https://github.com/mrousavy/react-native-vision-camera/issues" |     "url": "https://github.com/mrousavy/react-native-vision-camera/issues" | ||||||
| @@ -159,5 +166,6 @@ | |||||||
|         } |         } | ||||||
|       ] |       ] | ||||||
|     ] |     ] | ||||||
|   } |   }, | ||||||
|  |   "packageManager": "yarn@1.22.19+sha1.4ba7fc5c6e704fce2066ecbfb0b0d8976fe62447" | ||||||
| } | } | ||||||
|   | |||||||
| @@ -5,5 +5,6 @@ if which clang-format >/dev/null; then | |||||||
|     clang-format -style=file:./cpp/.clang-format -i "$file" |     clang-format -style=file:./cpp/.clang-format -i "$file" | ||||||
|   done |   done | ||||||
| else | else | ||||||
|   echo "warning: clang-format not installed, install with 'brew install clang-format' (or manually from https://clang.llvm.org/docs/ClangFormat.html)" |   echo "error: clang-format not installed, install with 'brew install clang-format' (or manually from https://clang.llvm.org/docs/ClangFormat.html)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -3,5 +3,6 @@ | |||||||
| if which ktlint >/dev/null; then | if which ktlint >/dev/null; then | ||||||
|   cd android && ktlint --color --relative --editorconfig=./.editorconfig -F ./**/*.kt* |   cd android && ktlint --color --relative --editorconfig=./.editorconfig -F ./**/*.kt* | ||||||
| else | else | ||||||
|   echo "warning: KTLint not installed, install with 'brew install ktlint' (or manually from https://github.com/pinterest/ktlint)" |   echo "error: KTLint not installed, install with 'brew install ktlint' (or manually from https://github.com/pinterest/ktlint)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -3,5 +3,6 @@ | |||||||
| if which swiftformat >/dev/null; then | if which swiftformat >/dev/null; then | ||||||
|   cd ios && swiftformat --quiet . |   cd ios && swiftformat --quiet . | ||||||
| else | else | ||||||
|   echo "warning: SwiftFormat not installed, install with 'brew install swiftformat' (or manually from https://github.com/nicklockwood/SwiftFormat)" |   echo "error: SwiftFormat not installed, install with 'brew install swiftformat' (or manually from https://github.com/nicklockwood/SwiftFormat)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -3,5 +3,6 @@ | |||||||
| if which swiftlint >/dev/null; then | if which swiftlint >/dev/null; then | ||||||
|   cd ios && swiftlint --quiet --fix && swiftlint --quiet |   cd ios && swiftlint --quiet --fix && swiftlint --quiet | ||||||
| else | else | ||||||
|   echo "warning: SwiftLint not installed, install with 'brew install swiftlint' (or manually from https://github.com/realm/SwiftLint)" |   echo "error: SwiftLint not installed, install with 'brew install swiftlint' (or manually from https://github.com/realm/SwiftLint)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -473,7 +473,8 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> { | |||||||
|  |  | ||||||
|     // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition |     // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition | ||||||
|     if (device == null) { |     if (device == null) { | ||||||
|       throw new Error( |       throw new CameraRuntimeError( | ||||||
|  |         'device/no-device', | ||||||
|         'Camera: `device` is null! Select a valid Camera device. See: https://mrousavy.com/react-native-vision-camera/docs/guides/devices', |         'Camera: `device` is null! Select a valid Camera device. See: https://mrousavy.com/react-native-vision-camera/docs/guides/devices', | ||||||
|       ) |       ) | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -25,6 +25,7 @@ export type SessionError = | |||||||
|   | 'session/camera-cannot-be-opened' |   | 'session/camera-cannot-be-opened' | ||||||
|   | 'session/camera-has-been-disconnected' |   | 'session/camera-has-been-disconnected' | ||||||
|   | 'session/audio-in-use-by-other-app' |   | 'session/audio-in-use-by-other-app' | ||||||
|  |   | 'session/no-outputs' | ||||||
|   | 'session/audio-session-failed-to-activate' |   | 'session/audio-session-failed-to-activate' | ||||||
| export type CodeScannerError = | export type CodeScannerError = | ||||||
|   | 'code-scanner/not-compatible-with-outputs' |   | 'code-scanner/not-compatible-with-outputs' | ||||||
| @@ -40,7 +41,10 @@ export type CaptureError = | |||||||
|   | 'capture/recorder-error' |   | 'capture/recorder-error' | ||||||
|   | 'capture/video-not-enabled' |   | 'capture/video-not-enabled' | ||||||
|   | 'capture/photo-not-enabled' |   | 'capture/photo-not-enabled' | ||||||
|  |   | 'capture/frame-invalid' | ||||||
|   | 'capture/aborted' |   | 'capture/aborted' | ||||||
|  |   | 'capture/focus-canceled' | ||||||
|  |   | 'capture/timed-out' | ||||||
|   | 'capture/unknown' |   | 'capture/unknown' | ||||||
| export type SystemError = | export type SystemError = | ||||||
|   | 'system/camera-module-not-found' |   | 'system/camera-module-not-found' | ||||||
|   | |||||||
| @@ -183,10 +183,29 @@ export interface CameraProps extends ViewProps { | |||||||
|    * |    * | ||||||
|    * @platform iOS |    * @platform iOS | ||||||
|    * @default |    * @default | ||||||
|    * - true // if video={true} and frameProcessor={undefined} |    * - true // if frameProcessor={undefined} | ||||||
|    * - false // otherwise |    * - false // otherwise | ||||||
|    */ |    */ | ||||||
|   enableBufferCompression?: boolean |   enableBufferCompression?: boolean | ||||||
|  |   /** | ||||||
|  |    * Enables or disables GPU-sampled buffers for the video stream. This only takes effect when using a {@linkcode frameProcessor}. | ||||||
|  |    * | ||||||
|  |    * When recording a Video ({@linkcode video}) while a Frame Processor is running ({@linkcode frameProcessor}), | ||||||
|  |    * the {@linkcode Frame | Frames} will need to be forwarded to the Media Encoder. | ||||||
|  |    * | ||||||
|  |    * - When `enableGpuBuffers` is `false`, the Video Pipeline will use CPU buffers causing an additional copy | ||||||
|  |    * from the Frame Processor to the Media Encoder, which potentially results in increased latency. | ||||||
|  |    * - When `enableGpuBuffers` is `true`, the Video Pipeline will use shared GPU buffers which greatly increases | ||||||
|  |    * it's efficiency as an additional buffer copy is avoided. | ||||||
|  |    * (See [`USAGE_GPU_SAMPLED_IMAGE`](https://developer.android.com/reference/android/hardware/HardwareBuffer#USAGE_GPU_SAMPLED_IMAGE)) | ||||||
|  |    * | ||||||
|  |    * In general, it is recommended to set this to `true` if possible, as this can increase performance and efficiency of the Video Pipeline. | ||||||
|  |    * | ||||||
|  |    * @experimental This is an experimental feature flag, use at your own risk. Some devices (especially Samsungs) may crash when trying to use GPU buffers. | ||||||
|  |    * @platform Android (API 29+) | ||||||
|  |    * @default false | ||||||
|  |    */ | ||||||
|  |   enableGpuBuffers?: boolean | ||||||
|   /** |   /** | ||||||
|    * Enables or disables low-light boost on this camera device. |    * Enables or disables low-light boost on this camera device. | ||||||
|    * |    * | ||||||
| @@ -227,6 +246,7 @@ export interface CameraProps extends ViewProps { | |||||||
|    * * Dual Device fusion for greater detail ([`isDualCameraDualPhotoDeliveryEnabled`](https://developer.apple.com/documentation/avfoundation/avcapturephotosettings/2873917-isdualcameradualphotodeliveryena)) |    * * Dual Device fusion for greater detail ([`isDualCameraDualPhotoDeliveryEnabled`](https://developer.apple.com/documentation/avfoundation/avcapturephotosettings/2873917-isdualcameradualphotodeliveryena)) | ||||||
|    * * Sets the maximum quality prioritization to `.quality` ([`maxPhotoQualityPrioritization`](https://developer.apple.com/documentation/avfoundation/avcapturephotooutput/3182995-maxphotoqualityprioritization)) |    * * Sets the maximum quality prioritization to `.quality` ([`maxPhotoQualityPrioritization`](https://developer.apple.com/documentation/avfoundation/avcapturephotooutput/3182995-maxphotoqualityprioritization)) | ||||||
|    * |    * | ||||||
|  |    * @platform iOS | ||||||
|    * @default false |    * @default false | ||||||
|    */ |    */ | ||||||
|   enableHighQualityPhotos?: boolean |   enableHighQualityPhotos?: boolean | ||||||
|   | |||||||
| @@ -12,6 +12,7 @@ export type CodeType = | |||||||
|   | 'ean-8' |   | 'ean-8' | ||||||
|   | 'itf' |   | 'itf' | ||||||
|   | 'upc-e' |   | 'upc-e' | ||||||
|  |   | 'upc-a' | ||||||
|   | 'qr' |   | 'qr' | ||||||
|   | 'pdf-417' |   | 'pdf-417' | ||||||
|   | 'aztec' |   | 'aztec' | ||||||
|   | |||||||
| @@ -44,6 +44,14 @@ export interface TakePhotoOptions { | |||||||
|    * @default true |    * @default true | ||||||
|    */ |    */ | ||||||
|   enableShutterSound?: boolean |   enableShutterSound?: boolean | ||||||
|  |   /** | ||||||
|  |    * Whether to run the pre-capture sequence to properly lock AF, AE and AWB values. | ||||||
|  |    * Enabling this results in greater photos, but might not work on some devices. | ||||||
|  |    * | ||||||
|  |    * @platform Android | ||||||
|  |    * @default false | ||||||
|  |    */ | ||||||
|  |   enablePrecapture?: boolean | ||||||
| } | } | ||||||
|  |  | ||||||
| /** | /** | ||||||
|   | |||||||
| @@ -1,9 +1,17 @@ | |||||||
| import { Dimensions } from 'react-native' | import { Dimensions } from 'react-native' | ||||||
| import { FormatFilter } from './getCameraFormat' | import { FormatFilter } from './getCameraFormat' | ||||||
|  |  | ||||||
| type TTemplates = { | type PredefinedTemplates = | ||||||
|   [key: string]: FormatFilter[] |   | 'Video' | ||||||
| } |   | 'Video60Fps' | ||||||
|  |   | 'VideoSlowMotion' | ||||||
|  |   | 'VideoStabilized' | ||||||
|  |   | 'Photo' | ||||||
|  |   | 'PhotoPortrait' | ||||||
|  |   | 'FrameProcessingYUV' | ||||||
|  |   | 'FrameProcessingRGB' | ||||||
|  |   | 'Snapchat' | ||||||
|  |   | 'Instagram' | ||||||
|  |  | ||||||
| const SnapchatResolution = { width: 1920, height: 1080 } | const SnapchatResolution = { width: 1920, height: 1080 } | ||||||
| const InstagramResolution = { width: 3840, height: 2160 } | const InstagramResolution = { width: 3840, height: 2160 } | ||||||
| @@ -16,7 +24,7 @@ const ScreenAspectRatio = Dimensions.get('window').height / Dimensions.get('wind | |||||||
|  * const format = useCameraFormat(device, Templates.Snapchat) |  * const format = useCameraFormat(device, Templates.Snapchat) | ||||||
|  * ``` |  * ``` | ||||||
|  */ |  */ | ||||||
| export const Templates: TTemplates = { | export const Templates: Record<PredefinedTemplates, FormatFilter[]> = { | ||||||
|   /** |   /** | ||||||
|    * Highest resolution video recordings (e.g. 4k) |    * Highest resolution video recordings (e.g. 4k) | ||||||
|    */ |    */ | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user