Compare commits
	
		
			98 Commits
		
	
	
		
			loewy/came
			...
			031aa9d43a
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 031aa9d43a | ||
| fcf5fe70f3 | |||
|  | 3a20c44a31 | ||
| 0329e7976d | |||
| 7c162fecb1 | |||
| b28a152471 | |||
| 65021b895a | |||
|  | b006b1e744 | ||
|  | 694d9cfa8c | ||
|  | 91767e71c8 | ||
|  | 9f2c7906e5 | ||
|  | 621bfe333c | ||
|  | 20f8fa2937 | ||
|  | b03f9ea423 | ||
|  | 98d90a6442 | ||
|  | 0a43d7a160 | ||
|  | a2ce4df663 | ||
|  | 89ecb35616 | ||
|  | d9a1287b68 | ||
|  | 23459b2635 | ||
|  | 83168044a6 | ||
|  | 60925fc816 | ||
|  | d731fe491d | ||
|  | ebe04b246c | ||
|  | 97941a919f | ||
| 952e4a93e1 | |||
|  | 489171f6f3 | ||
|  | 8e1f03907b | ||
|  | 3f1a7c9e32 | ||
|  | e8dd1e0b2e | ||
|  | bca9472ab8 | ||
|  | 79ebae56f3 | ||
|  | 86637ac112 | ||
|  | 8225ac1aff | ||
|  | 83b852acd0 | ||
|  | 5ab7b291db | ||
|  | eb57b3877c | ||
|  | 369cb4a043 | ||
|  | fabf019f66 | ||
|  | 4c159aff61 | ||
|  | 55992bb954 | ||
|  | 7ac6f4d008 | ||
|  | 9af6e61dc8 | ||
|  | 2c52fb01e4 | ||
|  | f6b7163660 | ||
|  | 2c976d8ccd | ||
|  | 147aff8683 | ||
|  | bda43d3984 | ||
|  | b105de0194 | ||
|  | 57c6431353 | ||
|  | aad7d2b8b3 | ||
|  | e6c12e2ed2 | ||
|  | 31754eb74c | ||
|  | 129e21f14e | ||
|  | 4168d8f752 | ||
|  | bcd12649e2 | ||
|  | c5646ca1e2 | ||
|  | 83c0184796 | ||
|  | 5df5ca9adf | ||
|  | 21042048ae | ||
|  | a7701c8c9c | ||
|  | 37398cc909 | ||
|  | 61b2f7dd4a | ||
|  | 14daaaaf9d | ||
|  | 1a0bd8f7c2 | ||
|  | 478688529b | ||
|  | 3699ccde94 | ||
|  | ad33dd91b1 | ||
|  | 0130085376 | ||
|  | ec7ce36d25 | ||
|  | b7e24c444e | ||
|  | a2a2e94865 | ||
|  | 1011c3f039 | ||
|  | 24f43efa35 | ||
|  | 0ed3aed48a | ||
|  | b3a88278de | ||
| 19bf300bbe | |||
|  | 919e6c9fe8 | ||
|  | fb1d82ad9a | ||
|  | fce6616964 | ||
|  | a4e241a431 | ||
|  | 3192f5e939 | ||
|  | b20d0fc5f7 | ||
|  | 395ee7af89 | ||
|  | 3b892c209e | ||
|  | dbb7f80dc9 | ||
|  | 5acc64e031 | ||
|  | cd5fdd4924 | ||
|  | d8c95c901f | ||
| 1312c5be53 | |||
|  | 97168c647c | ||
|  | 14554fa31a | ||
|  | e95264f782 | ||
| 0e05fc314f | |||
| 413be519d5 | |||
|  | 8c66d36d8f | ||
|  | 1a8e518180 | ||
| 009838db75 | 
							
								
								
									
										8
									
								
								.github/workflows/build-android.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/workflows/build-android.yml
									
									
									
									
										vendored
									
									
								
							| @@ -30,11 +30,11 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|       - name: Setup JDK 11 |       - name: Setup JDK 17 | ||||||
|         uses: actions/setup-java@v4 |         uses: actions/setup-java@v4 | ||||||
|         with: |         with: | ||||||
|           distribution: 'zulu' |           distribution: 'zulu' | ||||||
|           java-version: 11 |           java-version: 17 | ||||||
|           java-package: jdk |           java-package: jdk | ||||||
|  |  | ||||||
|       - name: Get yarn cache directory path |       - name: Get yarn cache directory path | ||||||
| @@ -74,11 +74,11 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v4 |       - uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|       - name: Setup JDK 11 |       - name: Setup JDK 17 | ||||||
|         uses: actions/setup-java@v4 |         uses: actions/setup-java@v4 | ||||||
|         with: |         with: | ||||||
|           distribution: 'zulu' |           distribution: 'zulu' | ||||||
|           java-version: 11 |           java-version: 17 | ||||||
|           java-package: jdk |           java-package: jdk | ||||||
|  |  | ||||||
|       - name: Get yarn cache directory path |       - name: Get yarn cache directory path | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								.github/workflows/build-ios.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/build-ios.yml
									
									
									
									
										vendored
									
									
								
							| @@ -54,7 +54,7 @@ jobs: | |||||||
|           working-directory: package/example/ios |           working-directory: package/example/ios | ||||||
|  |  | ||||||
|       - name: Restore Pods cache |       - name: Restore Pods cache | ||||||
|         uses: actions/cache@v3 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: package/example/ios/Pods |           path: package/example/ios/Pods | ||||||
|           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} |           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} | ||||||
| @@ -113,7 +113,7 @@ jobs: | |||||||
|           working-directory: package/example/ios |           working-directory: package/example/ios | ||||||
|  |  | ||||||
|       - name: Restore Pods cache |       - name: Restore Pods cache | ||||||
|         uses: actions/cache@v3 |         uses: actions/cache@v4 | ||||||
|         with: |         with: | ||||||
|           path: package/example/ios/Pods |           path: package/example/ios/Pods | ||||||
|           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} |           key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} | ||||||
|   | |||||||
							
								
								
									
										33
									
								
								.github/workflows/compress-images.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								.github/workflows/compress-images.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | name: Compress Images (docs) | ||||||
|  | on: | ||||||
|  |   pull_request: | ||||||
|  |     # Run Image Actions when JPG, JPEG, PNG or WebP files are added or changed. | ||||||
|  |     # See https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions#onpushpull_requestpaths for reference. | ||||||
|  |     paths: | ||||||
|  |       - ".github/workflows/compress-images.yml" | ||||||
|  |       - "**.jpg" | ||||||
|  |       - "**.jpeg" | ||||||
|  |       - "**.png" | ||||||
|  |       - "**.webp" | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   compress-images: | ||||||
|  |     # Only run on Pull Requests within the same repository, and not from forks. | ||||||
|  |     if: github.event.pull_request.head.repo.full_name == github.repository | ||||||
|  |     name: 🗂 Compress images | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout Repo | ||||||
|  |         uses: actions/checkout@v4 | ||||||
|  |  | ||||||
|  |       - name: Compress Images | ||||||
|  |         uses: calibreapp/image-actions@main | ||||||
|  |         with: | ||||||
|  |           # The `GITHUB_TOKEN` is automatically generated by GitHub and scoped only to the repository that is currently running the action. By default, the action can’t update Pull Requests initiated from forked repositories. | ||||||
|  |           # See https://docs.github.com/en/actions/reference/authentication-in-a-workflow and https://help.github.com/en/articles/virtual-environments-for-github-actions#token-permissions | ||||||
|  |           githubToken: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |           ignorePaths: "e2e/**" | ||||||
|  |           jpegQuality: "80" | ||||||
|  |           jpegProgressive: false | ||||||
|  |           pngQuality: "80" | ||||||
|  |           webpQuality: "80" | ||||||
| @@ -47,6 +47,15 @@ cd ios && pod install | |||||||
|  |  | ||||||
| To see VisionCamera in action, check out [ShadowLens](https://mrousavy.com/projects/shadowlens)! | To see VisionCamera in action, check out [ShadowLens](https://mrousavy.com/projects/shadowlens)! | ||||||
|  |  | ||||||
|  | <div> | ||||||
|  |   <a href="https://apps.apple.com/app/shadowlens/id6471849004"> | ||||||
|  |     <img height="40" src="docs/static/img/appstore.svg" /> | ||||||
|  |   </a> | ||||||
|  |   <a href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens"> | ||||||
|  |     <img height="40" src="docs/static/img/googleplay.svg" /> | ||||||
|  |   </a> | ||||||
|  | </div> | ||||||
|  |  | ||||||
| ### Example | ### Example | ||||||
|  |  | ||||||
| ```tsx | ```tsx | ||||||
|   | |||||||
| @@ -130,4 +130,23 @@ The Code Scanner will call your [`onCodeScanned`](/docs/api/interfaces/CodeScann | |||||||
|  |  | ||||||
| <br /> | <br /> | ||||||
|  |  | ||||||
|  | ## UPC-A vs EAN-13 codes | ||||||
|  |  | ||||||
|  | UPC-A is a special case to handle if you need to cater for it. Android's SDK officially supports UPC-A but iOS does not, instead they handle the code as EAN-13. Since EAN-13 is a superset of UPC-A, with an extra 0 digit at the front. | ||||||
|  |  | ||||||
|  | This means, the `upc-a` types are reported under the `ean-13` umbrella type on iOS: | ||||||
|  |  | ||||||
|  | ```jsx | ||||||
|  | const codeScanner = useCodeScanner({ | ||||||
|  |   codeTypes: ['upc-a'], // <-- ✅ We configure for 'upc-a' types | ||||||
|  |   onCodeScanned: (codes) => { | ||||||
|  |     for (const code of codes) { | ||||||
|  |       console.log(code.type); // <-- ❌ On iOS, we receive 'ean-13' | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | }) | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You will need to keep this in mind and do the conversion from EAN-13 to UPC-A yourself. This can be done by removing the front `0` digit from the code to get a UPC-A code. | ||||||
|  |  | ||||||
| #### 🚀 Next section: [Frame Processors](frame-processors) | #### 🚀 Next section: [Frame Processors](frame-processors) | ||||||
|   | |||||||
| @@ -34,12 +34,13 @@ To understand a bit more about camera formats, you first need to understand a fe | |||||||
|   * 4k Photos, 1080p Videos, 240 FPS (ultra high FPS/slow motion) |   * 4k Photos, 1080p Videos, 240 FPS (ultra high FPS/slow motion) | ||||||
|   * 720p Photos, 720p Videos, 30 FPS (smaller buffers/e.g. faster face detection) |   * 720p Photos, 720p Videos, 30 FPS (smaller buffers/e.g. faster face detection) | ||||||
| * Each app has different requirements, so the format filtering is up to you. | * Each app has different requirements, so the format filtering is up to you. | ||||||
|  | * The `videoResolution` and `videoAspectRatio` options also affect the preview, as preview is also running in the video stream. | ||||||
|  |  | ||||||
| To get all available formats, simply use the `CameraDevice`'s [`formats` property](/docs/api/interfaces/CameraDevice#formats).  These are a [CameraFormat's](/docs/api/interfaces/CameraDeviceFormat) props: | To get all available formats, simply use the `CameraDevice`'s [`formats` property](/docs/api/interfaces/CameraDevice#formats).  These are a [CameraFormat's](/docs/api/interfaces/CameraDeviceFormat) props: | ||||||
|  |  | ||||||
| - [`photoHeight`](/docs/api/interfaces/CameraDeviceFormat#photoheight)/[`photoWidth`](/docs/api/interfaces/CameraDeviceFormat#photoWidth): The resolution that will be used for taking photos. Choose a format with your desired resolution. | - [`photoHeight`](/docs/api/interfaces/CameraDeviceFormat#photoheight)/[`photoWidth`](/docs/api/interfaces/CameraDeviceFormat#photoWidth): The resolution that will be used for taking photos. Choose a format with your desired resolution. | ||||||
| - [`videoHeight`](/docs/api/interfaces/CameraDeviceFormat#videoheight)/[`videoWidth`](/docs/api/interfaces/CameraDeviceFormat#videoWidth): The resolution that will be used for recording videos. Choose a format with your desired resolution. | - [`videoHeight`](/docs/api/interfaces/CameraDeviceFormat#videoheight)/[`videoWidth`](/docs/api/interfaces/CameraDeviceFormat#videoWidth): The resolution that will be used for recording videos and streaming into frame processors. This also affects the preview's aspect ratio. Choose a format with your desired resolution. | ||||||
| - [`minFps`](/docs/api/interfaces/CameraDeviceFormat#minfps)/[`maxFps`](/docs/api/interfaces/CameraDeviceFormat#maxfps): A range of possible values for the `fps` property. For example, if your format has `minFps: 1` and `maxFps: 60`, you can either use `fps={30}`, `fps={60}` or any other value in between for recording videos. | - [`minFps`](/docs/api/interfaces/CameraDeviceFormat#minfps)/[`maxFps`](/docs/api/interfaces/CameraDeviceFormat#maxfps): A range of possible values for the `fps` property. For example, if your format has `minFps: 1` and `maxFps: 60`, you can either use `fps={30}`, `fps={60}` or any other value in between for recording videos and streaming into frame processors. | ||||||
| - [`videoStabilizationModes`](/docs/api/interfaces/CameraDeviceFormat#videostabilizationmodes): All supported Video Stabilization Modes, digital and optical. If this specific format contains your desired [`VideoStabilizationMode`](/docs/api/#videostabilizationmode), you can pass it to your `<Camera>` via the [`videoStabilizationMode` property](/docs/api/interfaces/CameraProps#videoStabilizationMode). | - [`videoStabilizationModes`](/docs/api/interfaces/CameraDeviceFormat#videostabilizationmodes): All supported Video Stabilization Modes, digital and optical. If this specific format contains your desired [`VideoStabilizationMode`](/docs/api/#videostabilizationmode), you can pass it to your `<Camera>` via the [`videoStabilizationMode` property](/docs/api/interfaces/CameraProps#videoStabilizationMode). | ||||||
| - [`pixelFormats`](/docs/api/interfaces/CameraDeviceFormat#pixelformats): All supported Pixel Formats. If this specific format contains your desired [`PixelFormat`](/docs/api/#PixelFormat), you can pass it to your `<Camera>` via the [`pixelFormat` property](/docs/api/interfaces/CameraProps#pixelFormat). | - [`pixelFormats`](/docs/api/interfaces/CameraDeviceFormat#pixelformats): All supported Pixel Formats. If this specific format contains your desired [`PixelFormat`](/docs/api/#PixelFormat), you can pass it to your `<Camera>` via the [`pixelFormat` property](/docs/api/interfaces/CameraProps#pixelFormat). | ||||||
| - [`supportsVideoHdr`](/docs/api/interfaces/CameraDeviceFormat#supportsvideohdr): Whether this specific format supports true 10-bit HDR for video capture. If this is `true`, you can enable `videoHdr` on your `<Camera>`. | - [`supportsVideoHdr`](/docs/api/interfaces/CameraDeviceFormat#supportsvideohdr): Whether this specific format supports true 10-bit HDR for video capture. If this is `true`, you can enable `videoHdr` on your `<Camera>`. | ||||||
| @@ -61,6 +62,7 @@ You can either find a matching format manually by looping through your `CameraDe | |||||||
| ```ts | ```ts | ||||||
| const device = ... | const device = ... | ||||||
| const format = useCameraFormat(device, [ | const format = useCameraFormat(device, [ | ||||||
|  |   { videoAspectRatio: 16 / 9 }, | ||||||
|   { videoResolution: { width: 3048, height: 2160 } }, |   { videoResolution: { width: 3048, height: 2160 } }, | ||||||
|   { fps: 60 } |   { fps: 60 } | ||||||
| ]) | ]) | ||||||
| @@ -72,6 +74,7 @@ const format = useCameraFormat(device, [ | |||||||
| ```ts | ```ts | ||||||
| const device = ... | const device = ... | ||||||
| const format = getCameraFormat(device, [ | const format = getCameraFormat(device, [ | ||||||
|  |   { videoAspectRatio: 16 / 9 }, | ||||||
|   { videoResolution: { width: 3048, height: 2160 } }, |   { videoResolution: { width: 3048, height: 2160 } }, | ||||||
|   { fps: 60 } |   { fps: 60 } | ||||||
| ]) | ]) | ||||||
|   | |||||||
| @@ -89,7 +89,8 @@ Additionally, you can also directly access the Frame's pixel data using [`toArra | |||||||
| const frameProcessor = useFrameProcessor((frame) => { | const frameProcessor = useFrameProcessor((frame) => { | ||||||
|   'worklet' |   'worklet' | ||||||
|   if (frame.pixelFormat === 'rgb') { |   if (frame.pixelFormat === 'rgb') { | ||||||
|     const data = frame.toArrayBuffer() |     const buffer = frame.toArrayBuffer() | ||||||
|  |     const data = new Uint8Array(buffer) | ||||||
|     console.log(`Pixel at 0,0: RGB(${data[0]}, ${data[1]}, ${data[2]})`) |     console.log(`Pixel at 0,0: RGB(${data[0]}, ${data[1]}, ${data[2]})`) | ||||||
|   } |   } | ||||||
| }, []) | }, []) | ||||||
|   | |||||||
| @@ -33,6 +33,7 @@ cd ios && pod install | |||||||
| * [xulihang/**vision-camera-dynamsoft-barcode-reader**](https://github.com/xulihang/vision-camera-dynamsoft-barcode-reader): A plugin to read barcodes using Dynamsoft Barcode Reader. | * [xulihang/**vision-camera-dynamsoft-barcode-reader**](https://github.com/xulihang/vision-camera-dynamsoft-barcode-reader): A plugin to read barcodes using Dynamsoft Barcode Reader. | ||||||
| * [xulihang/**vision-camera-dynamsoft-label-recognizer**](https://github.com/xulihang/vision-camera-dynamsoft-label-recognizer): A plugin to recognize text on labels, MRZ passports, etc. using Dynamsoft Label Recognizer. | * [xulihang/**vision-camera-dynamsoft-label-recognizer**](https://github.com/xulihang/vision-camera-dynamsoft-label-recognizer): A plugin to recognize text on labels, MRZ passports, etc. using Dynamsoft Label Recognizer. | ||||||
| * [tony-xlh/**vision-camera-dynamsoft-document-normalizer**](https://github.com/tony-xlh/vision-camera-dynamsoft-document-normalizer): A plugin to scan documents using Dynamsoft Document Normalizer with features like document border detection and perspective transformation. | * [tony-xlh/**vision-camera-dynamsoft-document-normalizer**](https://github.com/tony-xlh/vision-camera-dynamsoft-document-normalizer): A plugin to scan documents using Dynamsoft Document Normalizer with features like document border detection and perspective transformation. | ||||||
|  | * [tony-xlh/**vision-camera-cropper**](https://github.com/tony-xlh/vision-camera-cropper): A plugin to crop frames and save frames to files or as base64. | ||||||
| * [aarongrider/**vision-camera-ocr**](https://github.com/aarongrider/vision-camera-ocr): A plugin to detect text in real time using MLKit Text Detector (OCR). | * [aarongrider/**vision-camera-ocr**](https://github.com/aarongrider/vision-camera-ocr): A plugin to detect text in real time using MLKit Text Detector (OCR). | ||||||
| * [yasintorun/**vision-camera-base64**](https://github.com/yasintorun/vision-camera-base64): A plugin to convert a Frame to a base64 string. | * [yasintorun/**vision-camera-base64**](https://github.com/yasintorun/vision-camera-base64): A plugin to convert a Frame to a base64 string. | ||||||
|  |  | ||||||
| @@ -40,7 +41,7 @@ cd ios && pod install | |||||||
|  |  | ||||||
|  |  | ||||||
| <!-- Add your Frame Processor Plugin here! --> | <!-- Add your Frame Processor Plugin here! --> | ||||||
|  | * [nonam4/**react-native-vision-camera-face-detector**](https://github.com/nonam4/react-native-vision-camera-face-detector): A V3 frame processor plugin to detect faces using MLKit Vision Face Detector. | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -50,7 +51,7 @@ cd ios && pod install | |||||||
| <br /> | <br /> | ||||||
| <p align="center"> | <p align="center"> | ||||||
| <b> | <b> | ||||||
| <a href="https://github.com/mrousavy/react-native-vision-camera/edit/main/docs/docs/guides/FRAME_PROCESSOR_PLUGIN_LIST.mdx">Click here</a> to add your Frame Processor Plugin to this list! | <a href="https://github.com/mrousavy/react-native-vision-camera/edit/main/docs/docs/guides/FRAME_PROCESSOR_PLUGINS.mdx">Click here</a> to add your Frame Processor Plugin to this list! | ||||||
| </b> | </b> | ||||||
| </p> | </p> | ||||||
| <br /> | <br /> | ||||||
|   | |||||||
| @@ -70,6 +70,13 @@ Enable Buffer Compression ([`enableBufferCompression`](/docs/api/interfaces/Came | |||||||
|  |  | ||||||
| Note: When not using a `frameProcessor`, buffer compression is automatically enabled. | Note: When not using a `frameProcessor`, buffer compression is automatically enabled. | ||||||
|  |  | ||||||
|  | ### GPU buffers | ||||||
|  |  | ||||||
|  | Enable GPU Buffer flags ([`enableGpuBuffers`](/docs/api/interfaces/CameraProps#enablegpubuffers)) to optimize the Video Pipeline for zero-copy buffer forwarding. | ||||||
|  | If this is enabled, the Video Pipeline can avoid an additional CPU -> GPU copy, resulting in better performance and more efficiency. | ||||||
|  |  | ||||||
|  | Note: This only takes effect when using a `frameProcessor`. | ||||||
|  |  | ||||||
| ### Video Stabilization | ### Video Stabilization | ||||||
|  |  | ||||||
| Video Stabilization requires additional overhead to start the algorithm, so disabling [`videoStabilizationMode`](/docs/api/interfaces/CameraProps#videostabilizationmode) can significantly speed up the Camera initialization time. | Video Stabilization requires additional overhead to start the algorithm, so disabling [`videoStabilizationMode`](/docs/api/interfaces/CameraProps#videostabilizationmode) can significantly speed up the Camera initialization time. | ||||||
|   | |||||||
| @@ -21,10 +21,10 @@ import useBaseUrl from '@docusaurus/useBaseUrl' | |||||||
| **Download now:** | **Download now:** | ||||||
|  |  | ||||||
| <div style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}> | <div style={{ display: 'flex', flexDirection: 'row', alignItems: 'center' }}> | ||||||
|   <a href="https://apps.apple.com/at/app/shadowlens/id6471849004"> |   <a href="https://apps.apple.com/app/shadowlens/id6471849004"> | ||||||
|     <img height={40} src={useBaseUrl("img/appstore.svg")} /> |     <img height={40} src={useBaseUrl("img/appstore.svg")} /> | ||||||
|   </a> |   </a> | ||||||
|   <a href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens"> |   <a href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens"> | ||||||
|     <img height={58} src={useBaseUrl("img/playstore.png")} /> |     <img height={40} src={useBaseUrl("img/googleplay.svg")} /> | ||||||
|   </a> |   </a> | ||||||
| </div> | </div> | ||||||
|   | |||||||
| @@ -112,7 +112,7 @@ If you're experiencing build issues or runtime issues in VisionCamera, make sure | |||||||
| 2. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information. | 2. If a camera device is not being returned by [`Camera.getAvailableCameraDevices()`](/docs/api/classes/Camera#getavailablecameradevices), make sure it is a Camera2 compatible device. See [this section in the Android docs](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#reprocessing) for more information. | ||||||
| 3. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs. There is useful information about the Frame Processor Runtime that will tell you if something goes wrong. | 3. If your Frame Processor is not running, make sure you check the native Android Studio/Logcat logs. There is useful information about the Frame Processor Runtime that will tell you if something goes wrong. | ||||||
| 4. If your Frame Processor is not running, make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI. | 4. If your Frame Processor is not running, make sure you are not using a remote JS debugger such as Google Chrome, since those don't work with JSI. | ||||||
| 5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr` or `format` on the `<Camera>` component except for the required ones: | 5. If you are experiencing black-screens, try removing all properties such as `fps`, `videoHdr`, `enableGpuBuffers` or `format` on the `<Camera>` component except for the required ones: | ||||||
|    ```tsx |    ```tsx | ||||||
|    <Camera device={device} isActive={true} style={{ width: 500, height: 500 }} /> |    <Camera device={device} isActive={true} style={{ width: 500, height: 500 }} /> | ||||||
|    ``` |    ``` | ||||||
|   | |||||||
| @@ -25,7 +25,7 @@ module.exports = { | |||||||
|     }, |     }, | ||||||
|     announcementBar: { |     announcementBar: { | ||||||
|       id: 'shadowlens', |       id: 'shadowlens', | ||||||
|       content: '<b>ShadowLens is out!!! 🥳🥳</b> Download the iOS app to see VisionCamera in action: <a target="_blank" rel="noopener noreferrer" href="https://apps.apple.com/at/app/shadowlens/id6471849004?l=en-GB">apps.apple.com/shadowlens</a>', |       content: '<b>ShadowLens is out!!! 🥳🥳</b> See VisionCamera in action: <a target="_blank" rel="noopener noreferrer" href="https://apps.apple.com/app/shadowlens/id6471849004">Download ShadowLens for iOS</a> or <a target="_blank" rel="noopener noreferrer" href="https://play.google.com/store/apps/details?id=com.mrousavy.shadowlens">Download ShadowLens for Android</a>', | ||||||
|       backgroundColor: '#e39600', |       backgroundColor: '#e39600', | ||||||
|       textColor: '#ffffff', |       textColor: '#ffffff', | ||||||
|       isCloseable: false, |       isCloseable: false, | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								docs/static/img/googleplay.svg
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								docs/static/img/googleplay.svg
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| After Width: | Height: | Size: 7.0 KiB | 
							
								
								
									
										
											BIN
										
									
								
								docs/static/img/playstore.png
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/static/img/playstore.png
									
									
									
									
										vendored
									
									
								
							
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 4.8 KiB | 
| @@ -46,6 +46,10 @@ def safeExtGet(prop, fallback) { | |||||||
|   rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback |   rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback | ||||||
| } | } | ||||||
|  |  | ||||||
|  | def safeExtGetBool(prop, fallback) { | ||||||
|  |   Boolean.parseBoolean("${safeExtGet(prop, fallback)}") | ||||||
|  | } | ||||||
|  |  | ||||||
| def reactNativeArchitectures() { | def reactNativeArchitectures() { | ||||||
|   def value = project.getProperties().get("reactNativeArchitectures") |   def value = project.getProperties().get("reactNativeArchitectures") | ||||||
|   return value ? value.split(",") : ["armeabi-v7a", "x86", "x86_64", "arm64-v8a"] |   return value ? value.split(",") : ["armeabi-v7a", "x86", "x86_64", "arm64-v8a"] | ||||||
| @@ -68,10 +72,10 @@ static def findNodeModules(baseDir) { | |||||||
|  |  | ||||||
| def nodeModules = findNodeModules(projectDir) | def nodeModules = findNodeModules(projectDir) | ||||||
|  |  | ||||||
| def hasWorklets = !safeExtGet("VisionCamera_disableFrameProcessors", false) && findProject(":react-native-worklets-core") != null | def hasWorklets = !safeExtGetBool('VisionCamera_disableFrameProcessors', false) && findProject(":react-native-worklets-core") != null | ||||||
| logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!") | logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!") | ||||||
|  |  | ||||||
| def enableCodeScanner = safeExtGet("VisionCamera_enableCodeScanner", false) | def enableCodeScanner = safeExtGetBool('VisionCamera_enableCodeScanner', false) | ||||||
|  |  | ||||||
| repositories { | repositories { | ||||||
|   google() |   google() | ||||||
| @@ -129,8 +133,8 @@ android { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   compileOptions { |   compileOptions { | ||||||
|     sourceCompatibility JavaVersion.VERSION_1_8 |     sourceCompatibility JavaVersion.VERSION_17 | ||||||
|     targetCompatibility JavaVersion.VERSION_1_8 |     targetCompatibility JavaVersion.VERSION_17 | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   externalNativeBuild { |   externalNativeBuild { | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| <manifest xmlns:android="http://schemas.android.com/apk/res/android" | <manifest xmlns:android="http://schemas.android.com/apk/res/android"> | ||||||
|           package="com.mrousavy.camera"> |  | ||||||
|  |  | ||||||
| </manifest> | </manifest> | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ MutableJByteBuffer::MutableJByteBuffer(jni::alias_ref<jni::JByteBuffer> byteBuff | |||||||
|   _byteBuffer = jni::make_global(byteBuffer); |   _byteBuffer = jni::make_global(byteBuffer); | ||||||
| } | } | ||||||
|  |  | ||||||
| MutableJByteBuffer::~MutableJByteBuffer() noexcept { | MutableJByteBuffer::~MutableJByteBuffer() { | ||||||
|   // Hermes GC might destroy HostObjects on an arbitrary Thread which might not be |   // Hermes GC might destroy HostObjects on an arbitrary Thread which might not be | ||||||
|   // connected to the JNI environment. To make sure fbjni can properly destroy |   // connected to the JNI environment. To make sure fbjni can properly destroy | ||||||
|   // the Java method, we connect to a JNI environment first. |   // the Java method, we connect to a JNI environment first. | ||||||
|   | |||||||
| @@ -29,10 +29,11 @@ OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWi | |||||||
| } | } | ||||||
|  |  | ||||||
| OpenGLRenderer::~OpenGLRenderer() { | OpenGLRenderer::~OpenGLRenderer() { | ||||||
|  |   __android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGLRenderer..."); | ||||||
|  |   destroy(); | ||||||
|   if (_outputSurface != nullptr) { |   if (_outputSurface != nullptr) { | ||||||
|     ANativeWindow_release(_outputSurface); |     ANativeWindow_release(_outputSurface); | ||||||
|   } |   } | ||||||
|   destroy(); |  | ||||||
| } | } | ||||||
|  |  | ||||||
| void OpenGLRenderer::destroy() { | void OpenGLRenderer::destroy() { | ||||||
|   | |||||||
| @@ -34,7 +34,7 @@ JSharedArray::JSharedArray(const jni::alias_ref<jhybridobject>& javaThis, const | |||||||
| #else | #else | ||||||
|   jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime(); |   jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime(); | ||||||
| #endif | #endif | ||||||
|   __android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping Java ByteBuffer with size %i...", byteBuffer->getDirectSize()); |   __android_log_print(ANDROID_LOG_INFO, TAG, "Wrapping Java ByteBuffer with size %zu...", byteBuffer->getDirectSize()); | ||||||
|   _byteBuffer = jni::make_global(byteBuffer); |   _byteBuffer = jni::make_global(byteBuffer); | ||||||
|   _size = _byteBuffer->getDirectSize(); |   _size = _byteBuffer->getDirectSize(); | ||||||
|  |  | ||||||
|   | |||||||
| @@ -15,6 +15,7 @@ TSelf JVisionCameraScheduler::initHybrid(jni::alias_ref<jhybridobject> jThis) { | |||||||
| } | } | ||||||
|  |  | ||||||
| void JVisionCameraScheduler::dispatchAsync(const std::function<void()>& job) { | void JVisionCameraScheduler::dispatchAsync(const std::function<void()>& job) { | ||||||
|  |   std::unique_lock<std::mutex> lock(_mutex); | ||||||
|   // 1. add job to queue |   // 1. add job to queue | ||||||
|   _jobs.push(job); |   _jobs.push(job); | ||||||
|   scheduleTrigger(); |   scheduleTrigger(); | ||||||
|   | |||||||
| @@ -5,56 +5,75 @@ import com.facebook.react.bridge.Arguments | |||||||
| import com.facebook.react.bridge.ReactContext | import com.facebook.react.bridge.ReactContext | ||||||
| import com.facebook.react.bridge.WritableMap | import com.facebook.react.bridge.WritableMap | ||||||
| import com.facebook.react.uimanager.events.RCTEventEmitter | import com.facebook.react.uimanager.events.RCTEventEmitter | ||||||
|  | import com.facebook.react.uimanager.UIManagerHelper | ||||||
|  | import com.facebook.react.uimanager.events.Event | ||||||
| import com.google.mlkit.vision.barcode.common.Barcode | import com.google.mlkit.vision.barcode.common.Barcode | ||||||
| import com.mrousavy.camera.core.CameraError | import com.mrousavy.camera.core.CameraError | ||||||
| import com.mrousavy.camera.core.CodeScannerFrame | import com.mrousavy.camera.core.CodeScannerFrame | ||||||
| import com.mrousavy.camera.core.UnknownCameraError | import com.mrousavy.camera.core.UnknownCameraError | ||||||
| import com.mrousavy.camera.core.code | import com.mrousavy.camera.core.code | ||||||
| import com.mrousavy.camera.types.CodeType | import com.mrousavy.camera.types.CodeType | ||||||
|  | import java.io.File | ||||||
|  | import com.mrousavy.camera.types.* | ||||||
|  |  | ||||||
| fun CameraView.invokeOnInitialized() { | fun CameraView.invokeOnInitialized() { | ||||||
|   Log.i(CameraView.TAG, "invokeOnInitialized()") |   Log.i(CameraView.TAG, "invokeOnInitialized()") | ||||||
|  |  | ||||||
|   val reactContext = context as ReactContext |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraInitialized", null) |   val event = CameraInitializedEvent(surfaceId, id) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnStarted() { | fun CameraView.invokeOnStarted() { | ||||||
|   Log.i(CameraView.TAG, "invokeOnStarted()") |   Log.i(CameraView.TAG, "invokeOnStarted()") | ||||||
|  |  | ||||||
|   val reactContext = context as ReactContext |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStarted", null) |   val event = CameraStartedEvent(surfaceId, id) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnStopped() { | fun CameraView.invokeOnStopped() { | ||||||
|   Log.i(CameraView.TAG, "invokeOnStopped()") |   Log.i(CameraView.TAG, "invokeOnStopped()") | ||||||
|  |  | ||||||
|  |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|  |   val event = CameraStoppedEvent(surfaceId, id) | ||||||
|  |   this.sendEvent(event) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { | ||||||
|  |   Log.e(CameraView.TAG, "invokeOnError(...):") | ||||||
|  |   val event = Arguments.createMap() | ||||||
|  |   event.putInt("index", index) | ||||||
|  |   event.putString("filepath", filepath.toString()) | ||||||
|   val reactContext = context as ReactContext |   val reactContext = context as ReactContext | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null) |   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnError(error: Throwable) { | fun CameraView.invokeOnError(error: Throwable) { | ||||||
|   Log.e(CameraView.TAG, "invokeOnError(...):") |   Log.e(CameraView.TAG, "invokeOnError(...):") | ||||||
|   error.printStackTrace() |   error.printStackTrace() | ||||||
|  |  | ||||||
|   val cameraError = when (error) { |   val cameraError = | ||||||
|  |     when (error) { | ||||||
|       is CameraError -> error |       is CameraError -> error | ||||||
|       else -> UnknownCameraError(error) |       else -> UnknownCameraError(error) | ||||||
|     } |     } | ||||||
|   val event = Arguments.createMap() |   val data = Arguments.createMap() | ||||||
|   event.putString("code", cameraError.code) |   data.putString("code", cameraError.code) | ||||||
|   event.putString("message", cameraError.message) |   data.putString("message", cameraError.message) | ||||||
|   cameraError.cause?.let { cause -> |   cameraError.cause?.let { cause -> | ||||||
|     event.putMap("cause", errorToMap(cause)) |     data.putMap("cause", errorToMap(cause)) | ||||||
|   } |   } | ||||||
|   val reactContext = context as ReactContext |  | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraError", event) |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|  |   val event = CameraErrorEvent(surfaceId, id, data) | ||||||
|  |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnViewReady() { | fun CameraView.invokeOnViewReady() { | ||||||
|   val event = Arguments.createMap() |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|   val reactContext = context as ReactContext |   val event = CameraViewReadyEvent(surfaceId, id) | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraViewReady", event) |   this.sendEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| fun CameraView.invokeOnCodeScanned(barcodes: List<Barcode>, scannerFrame: CodeScannerFrame) { | fun CameraView.invokeOnCodeScanned(barcodes: List<Barcode>, scannerFrame: CodeScannerFrame) { | ||||||
| @@ -87,14 +106,23 @@ fun CameraView.invokeOnCodeScanned(barcodes: List<Barcode>, scannerFrame: CodeSc | |||||||
|     codes.pushMap(code) |     codes.pushMap(code) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   val event = Arguments.createMap() |   val data = Arguments.createMap() | ||||||
|   event.putArray("codes", codes) |   data.putArray("codes", codes) | ||||||
|   val codeScannerFrame = Arguments.createMap() |   val codeScannerFrame = Arguments.createMap() | ||||||
|   codeScannerFrame.putInt("width", scannerFrame.width) |   codeScannerFrame.putInt("width", scannerFrame.width) | ||||||
|   codeScannerFrame.putInt("height", scannerFrame.height) |   codeScannerFrame.putInt("height", scannerFrame.height) | ||||||
|   event.putMap("frame", codeScannerFrame) |   data.putMap("frame", codeScannerFrame) | ||||||
|  |  | ||||||
|  |   val surfaceId = UIManagerHelper.getSurfaceId(this) | ||||||
|  |   val event = CameraCodeScannedEvent(surfaceId, id, data) | ||||||
|  |   this.sendEvent(event) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | private fun CameraView.sendEvent(event: Event<*>) { | ||||||
|   val reactContext = context as ReactContext |   val reactContext = context as ReactContext | ||||||
|   reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraCodeScanned", event) |   val dispatcher = | ||||||
|  |     UIManagerHelper.getEventDispatcherForReactTag(reactContext, id) | ||||||
|  |   dispatcher?.dispatchEvent(event) | ||||||
| } | } | ||||||
|  |  | ||||||
| private fun errorToMap(error: Throwable): WritableMap { | private fun errorToMap(error: Throwable): WritableMap { | ||||||
|   | |||||||
| @@ -13,7 +13,7 @@ import com.mrousavy.camera.types.RecordVideoOptions | |||||||
| import com.mrousavy.camera.utils.makeErrorMap | import com.mrousavy.camera.utils.makeErrorMap | ||||||
| import java.util.* | import java.util.* | ||||||
|  |  | ||||||
| suspend fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallback: Callback) { | suspend fun CameraView.startRecording(options: RecordVideoOptions, filePath: String, onRecordCallback: Callback) { | ||||||
|   // check audio permission |   // check audio permission | ||||||
|   if (audio == true) { |   if (audio == true) { | ||||||
|     if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { |     if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { | ||||||
| @@ -33,7 +33,7 @@ suspend fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallb | |||||||
|     val errorMap = makeErrorMap(error.code, error.message) |     val errorMap = makeErrorMap(error.code, error.message) | ||||||
|     onRecordCallback(null, errorMap) |     onRecordCallback(null, errorMap) | ||||||
|   } |   } | ||||||
|   cameraSession.startRecording(audio == true, options, callback, onError) |   cameraSession.startRecording(audio == true, options, filePath, callback, onError) | ||||||
| } | } | ||||||
|  |  | ||||||
| @SuppressLint("RestrictedApi") | @SuppressLint("RestrictedApi") | ||||||
|   | |||||||
| @@ -30,9 +30,12 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { | |||||||
|  |  | ||||||
|   val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced" |   val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced" | ||||||
|   val flash = options["flash"] as? String ?: "off" |   val flash = options["flash"] as? String ?: "off" | ||||||
|   val enableAutoRedEyeReduction = options["enableAutoRedEyeReduction"] == true |  | ||||||
|   val enableAutoStabilization = options["enableAutoStabilization"] == true |   val enableAutoStabilization = options["enableAutoStabilization"] == true | ||||||
|   val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true |   val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true | ||||||
|  |   val enablePrecapture = options["enablePrecapture"] as? Boolean ?: false | ||||||
|  |  | ||||||
|  |   // TODO: Implement Red Eye Reduction | ||||||
|  |   options["enableAutoRedEyeReduction"] | ||||||
|  |  | ||||||
|   val flashMode = Flash.fromUnionValue(flash) |   val flashMode = Flash.fromUnionValue(flash) | ||||||
|   val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization) |   val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization) | ||||||
| @@ -41,8 +44,8 @@ suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { | |||||||
|     qualityPrioritizationMode, |     qualityPrioritizationMode, | ||||||
|     flashMode, |     flashMode, | ||||||
|     enableShutterSound, |     enableShutterSound, | ||||||
|     enableAutoRedEyeReduction, |  | ||||||
|     enableAutoStabilization, |     enableAutoStabilization, | ||||||
|  |     enablePrecapture, | ||||||
|     orientation |     orientation | ||||||
|   ) |   ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -4,9 +4,9 @@ import android.annotation.SuppressLint | |||||||
| import android.content.Context | import android.content.Context | ||||||
| import android.hardware.camera2.CameraManager | import android.hardware.camera2.CameraManager | ||||||
| import android.util.Log | import android.util.Log | ||||||
|  | import android.view.Gravity | ||||||
| import android.view.ScaleGestureDetector | import android.view.ScaleGestureDetector | ||||||
| import android.widget.FrameLayout | import android.widget.FrameLayout | ||||||
| import com.facebook.react.bridge.ReadableMap |  | ||||||
| import com.google.mlkit.vision.barcode.common.Barcode | import com.google.mlkit.vision.barcode.common.Barcode | ||||||
| import com.mrousavy.camera.core.CameraConfiguration | import com.mrousavy.camera.core.CameraConfiguration | ||||||
| import com.mrousavy.camera.core.CameraQueues | import com.mrousavy.camera.core.CameraQueues | ||||||
| @@ -25,6 +25,7 @@ import com.mrousavy.camera.types.Torch | |||||||
| import com.mrousavy.camera.types.VideoStabilizationMode | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
| import kotlinx.coroutines.CoroutineScope | import kotlinx.coroutines.CoroutineScope | ||||||
| import kotlinx.coroutines.launch | import kotlinx.coroutines.launch | ||||||
|  | import java.io.File | ||||||
|  |  | ||||||
| // | // | ||||||
| // TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity. | // TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity. | ||||||
| @@ -47,23 +48,23 @@ class CameraView(context: Context) : | |||||||
|   // props that require reconfiguring |   // props that require reconfiguring | ||||||
|   var cameraId: String? = null |   var cameraId: String? = null | ||||||
|   var enableDepthData = false |   var enableDepthData = false | ||||||
|   var enableHighQualityPhotos: Boolean? = null |  | ||||||
|   var enablePortraitEffectsMatteDelivery = false |   var enablePortraitEffectsMatteDelivery = false | ||||||
|  |  | ||||||
|   // use-cases |   // use-cases | ||||||
|   var photo: Boolean? = null |   var photo = false | ||||||
|   var video: Boolean? = null |   var video = false | ||||||
|   var audio: Boolean? = null |   var audio = false | ||||||
|   var enableFrameProcessor = false |   var enableFrameProcessor = false | ||||||
|   var pixelFormat: PixelFormat = PixelFormat.NATIVE |   var pixelFormat: PixelFormat = PixelFormat.NATIVE | ||||||
|  |  | ||||||
|   // props that require format reconfiguring |   // props that require format reconfiguring | ||||||
|   var format: ReadableMap? = null |   var format: CameraDeviceFormat? = null | ||||||
|   var fps: Int? = null |   var fps: Int? = null | ||||||
|   var videoStabilizationMode: VideoStabilizationMode? = null |   var videoStabilizationMode: VideoStabilizationMode? = null | ||||||
|   var videoHdr = false |   var videoHdr = false | ||||||
|   var photoHdr = false |   var photoHdr = false | ||||||
|   var lowLightBoost: Boolean? = null // nullable bool |   var lowLightBoost = false | ||||||
|  |   var enableGpuBuffers = false | ||||||
|  |  | ||||||
|   // other props |   // other props | ||||||
|   var isActive = false |   var isActive = false | ||||||
| @@ -71,7 +72,11 @@ class CameraView(context: Context) : | |||||||
|   var zoom: Float = 1f // in "factor" |   var zoom: Float = 1f // in "factor" | ||||||
|   var exposure: Double = 1.0 |   var exposure: Double = 1.0 | ||||||
|   var orientation: Orientation = Orientation.PORTRAIT |   var orientation: Orientation = Orientation.PORTRAIT | ||||||
|   var enableZoomGesture: Boolean = false |       set(value) { | ||||||
|  |         field = value | ||||||
|  |         previewView.orientation = value | ||||||
|  |       } | ||||||
|  |   var enableZoomGesture = false | ||||||
|     set(value) { |     set(value) { | ||||||
|       field = value |       field = value | ||||||
|       updateZoomGesture() |       updateZoomGesture() | ||||||
| @@ -81,7 +86,7 @@ class CameraView(context: Context) : | |||||||
|       previewView.resizeMode = value |       previewView.resizeMode = value | ||||||
|       field = value |       field = value | ||||||
|     } |     } | ||||||
|   var enableFpsGraph: Boolean = false |   var enableFpsGraph = false | ||||||
|     set(value) { |     set(value) { | ||||||
|       field = value |       field = value | ||||||
|       updateFpsGraph() |       updateFpsGraph() | ||||||
| @@ -109,21 +114,26 @@ class CameraView(context: Context) : | |||||||
|     clipToOutline = true |     clipToOutline = true | ||||||
|     cameraSession = CameraSession(context, cameraManager, this) |     cameraSession = CameraSession(context, cameraManager, this) | ||||||
|     previewView = cameraSession.createPreviewView(context) |     previewView = cameraSession.createPreviewView(context) | ||||||
|  |     previewView.layoutParams = LayoutParams( | ||||||
|  |       LayoutParams.MATCH_PARENT, | ||||||
|  |       LayoutParams.MATCH_PARENT, | ||||||
|  |       Gravity.CENTER | ||||||
|  |     ) | ||||||
|     addView(previewView) |     addView(previewView) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun onAttachedToWindow() { |   override fun onAttachedToWindow() { | ||||||
|  |     super.onAttachedToWindow() | ||||||
|     if (!isMounted) { |     if (!isMounted) { | ||||||
|       isMounted = true |       isMounted = true | ||||||
|       invokeOnViewReady() |       invokeOnViewReady() | ||||||
|     } |     } | ||||||
|     update() |     update() | ||||||
|     super.onAttachedToWindow() |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun onDetachedFromWindow() { |   override fun onDetachedFromWindow() { | ||||||
|     update() |  | ||||||
|     super.onDetachedFromWindow() |     super.onDetachedFromWindow() | ||||||
|  |     update() | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   fun destroy() { |   fun destroy() { | ||||||
| @@ -148,19 +158,20 @@ class CameraView(context: Context) : | |||||||
|         config.cameraId = cameraId |         config.cameraId = cameraId | ||||||
|  |  | ||||||
|         // Photo |         // Photo | ||||||
|         if (photo == true) { |         if (photo) { | ||||||
|           config.photo = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Photo(photoHdr)) |           config.photo = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Photo(photoHdr)) | ||||||
|         } else { |         } else { | ||||||
|           config.photo = CameraConfiguration.Output.Disabled.create() |           config.photo = CameraConfiguration.Output.Disabled.create() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // Video/Frame Processor |         // Video/Frame Processor | ||||||
|         if (video == true || enableFrameProcessor) { |         if (video || enableFrameProcessor) { | ||||||
|           config.video = CameraConfiguration.Output.Enabled.create( |           config.video = CameraConfiguration.Output.Enabled.create( | ||||||
|             CameraConfiguration.Video( |             CameraConfiguration.Video( | ||||||
|               videoHdr, |               videoHdr, | ||||||
|               pixelFormat, |               pixelFormat, | ||||||
|               enableFrameProcessor |               enableFrameProcessor, | ||||||
|  |               enableGpuBuffers | ||||||
|             ) |             ) | ||||||
|           ) |           ) | ||||||
|         } else { |         } else { | ||||||
| @@ -168,7 +179,7 @@ class CameraView(context: Context) : | |||||||
|         } |         } | ||||||
|  |  | ||||||
|         // Audio |         // Audio | ||||||
|         if (audio == true) { |         if (audio) { | ||||||
|           config.audio = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Audio(Unit)) |           config.audio = CameraConfiguration.Output.Enabled.create(CameraConfiguration.Audio(Unit)) | ||||||
|         } else { |         } else { | ||||||
|           config.audio = CameraConfiguration.Output.Disabled.create() |           config.audio = CameraConfiguration.Output.Disabled.create() | ||||||
| @@ -188,12 +199,7 @@ class CameraView(context: Context) : | |||||||
|         config.orientation = orientation |         config.orientation = orientation | ||||||
|  |  | ||||||
|         // Format |         // Format | ||||||
|         val format = format |         config.format = format | ||||||
|         if (format != null) { |  | ||||||
|           config.format = CameraDeviceFormat.fromJSValue(format) |  | ||||||
|         } else { |  | ||||||
|           config.format = null |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         // Side-Props |         // Side-Props | ||||||
|         config.fps = fps |         config.fps = fps | ||||||
| @@ -265,6 +271,10 @@ class CameraView(context: Context) : | |||||||
|     invokeOnStopped() |     invokeOnStopped() | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |   override fun onVideoChunkReady(filepath: File, index: Int) { | ||||||
|  |     invokeOnChunkReady(filepath, index) | ||||||
|  |   } | ||||||
|  |  | ||||||
|   override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) { |   override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) { | ||||||
|     invokeOnCodeScanned(codes, scannerFrame) |     invokeOnCodeScanned(codes, scannerFrame) | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -5,10 +5,12 @@ import com.facebook.react.common.MapBuilder | |||||||
| import com.facebook.react.uimanager.ThemedReactContext | import com.facebook.react.uimanager.ThemedReactContext | ||||||
| import com.facebook.react.uimanager.ViewGroupManager | import com.facebook.react.uimanager.ViewGroupManager | ||||||
| import com.facebook.react.uimanager.annotations.ReactProp | import com.facebook.react.uimanager.annotations.ReactProp | ||||||
|  | import com.mrousavy.camera.types.CameraDeviceFormat | ||||||
| import com.mrousavy.camera.types.CodeScannerOptions | import com.mrousavy.camera.types.CodeScannerOptions | ||||||
| import com.mrousavy.camera.types.Orientation | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.PixelFormat | import com.mrousavy.camera.types.PixelFormat | ||||||
| import com.mrousavy.camera.types.ResizeMode | import com.mrousavy.camera.types.ResizeMode | ||||||
|  | import android.util.Log | ||||||
| import com.mrousavy.camera.types.Torch | import com.mrousavy.camera.types.Torch | ||||||
| import com.mrousavy.camera.types.VideoStabilizationMode | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
|  |  | ||||||
| @@ -29,6 +31,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|       .put("cameraStopped", MapBuilder.of("registrationName", "onStopped")) |       .put("cameraStopped", MapBuilder.of("registrationName", "onStopped")) | ||||||
|       .put("cameraError", MapBuilder.of("registrationName", "onError")) |       .put("cameraError", MapBuilder.of("registrationName", "onError")) | ||||||
|       .put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned")) |       .put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned")) | ||||||
|  |       .put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady")) | ||||||
|       .build() |       .build() | ||||||
|  |  | ||||||
|   override fun getName(): String = TAG |   override fun getName(): String = TAG | ||||||
| @@ -44,17 +47,17 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "photo") |   @ReactProp(name = "photo") | ||||||
|   fun setPhoto(view: CameraView, photo: Boolean?) { |   fun setPhoto(view: CameraView, photo: Boolean) { | ||||||
|     view.photo = photo |     view.photo = photo | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "video") |   @ReactProp(name = "video") | ||||||
|   fun setVideo(view: CameraView, video: Boolean?) { |   fun setVideo(view: CameraView, video: Boolean) { | ||||||
|     view.video = video |     view.video = video | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "audio") |   @ReactProp(name = "audio") | ||||||
|   fun setAudio(view: CameraView, audio: Boolean?) { |   fun setAudio(view: CameraView, audio: Boolean) { | ||||||
|     view.audio = audio |     view.audio = audio | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -65,8 +68,12 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|  |  | ||||||
|   @ReactProp(name = "pixelFormat") |   @ReactProp(name = "pixelFormat") | ||||||
|   fun setPixelFormat(view: CameraView, pixelFormat: String?) { |   fun setPixelFormat(view: CameraView, pixelFormat: String?) { | ||||||
|  |     if (pixelFormat != null) { | ||||||
|       val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) |       val newPixelFormat = PixelFormat.fromUnionValue(pixelFormat) | ||||||
|       view.pixelFormat = newPixelFormat |       view.pixelFormat = newPixelFormat | ||||||
|  |     } else { | ||||||
|  |       view.pixelFormat = PixelFormat.NATIVE | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "enableDepthData") |   @ReactProp(name = "enableDepthData") | ||||||
| @@ -84,15 +91,19 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|     view.enableFpsGraph = enableFpsGraph |     view.enableFpsGraph = enableFpsGraph | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "videoStabilizationMode") |   @ReactProp(name = "enableGpuBuffers") | ||||||
|   fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { |   fun setEnableGpuBuffers(view: CameraView, enableGpuBuffers: Boolean) { | ||||||
|     val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) |     view.enableGpuBuffers = enableGpuBuffers | ||||||
|     view.videoStabilizationMode = newMode |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "enableHighQualityPhotos") |   @ReactProp(name = "videoStabilizationMode") | ||||||
|   fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) { |   fun setVideoStabilizationMode(view: CameraView, videoStabilizationMode: String?) { | ||||||
|     view.enableHighQualityPhotos = enableHighQualityPhotos |     if (videoStabilizationMode != null) { | ||||||
|  |       val newMode = VideoStabilizationMode.fromUnionValue(videoStabilizationMode) | ||||||
|  |       view.videoStabilizationMode = newMode | ||||||
|  |     } else { | ||||||
|  |       view.videoStabilizationMode = null | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "enablePortraitEffectsMatteDelivery") |   @ReactProp(name = "enablePortraitEffectsMatteDelivery") | ||||||
| @@ -102,13 +113,22 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|  |  | ||||||
|   @ReactProp(name = "format") |   @ReactProp(name = "format") | ||||||
|   fun setFormat(view: CameraView, format: ReadableMap?) { |   fun setFormat(view: CameraView, format: ReadableMap?) { | ||||||
|     view.format = format |     if (format != null) { | ||||||
|  |       val newFormat = CameraDeviceFormat.fromJSValue(format) | ||||||
|  |       view.format = newFormat | ||||||
|  |     } else { | ||||||
|  |       view.format = null | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "resizeMode") |   @ReactProp(name = "resizeMode") | ||||||
|   fun setResizeMode(view: CameraView, resizeMode: String) { |   fun setResizeMode(view: CameraView, resizeMode: String?) { | ||||||
|  |     if (resizeMode != null) { | ||||||
|       val newMode = ResizeMode.fromUnionValue(resizeMode) |       val newMode = ResizeMode.fromUnionValue(resizeMode) | ||||||
|       view.resizeMode = newMode |       view.resizeMode = newMode | ||||||
|  |     } else { | ||||||
|  |       view.resizeMode = ResizeMode.COVER | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   // TODO: Change when TurboModules release. |   // TODO: Change when TurboModules release. | ||||||
| @@ -119,30 +139,34 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|     view.fps = if (fps > 0) fps else null |     view.fps = if (fps > 0) fps else null | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "photoHdr", defaultBoolean = false) |   @ReactProp(name = "photoHdr") | ||||||
|   fun setPhotoHdr(view: CameraView, photoHdr: Boolean) { |   fun setPhotoHdr(view: CameraView, photoHdr: Boolean) { | ||||||
|     view.photoHdr = photoHdr |     view.photoHdr = photoHdr | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "videoHdr", defaultBoolean = false) |   @ReactProp(name = "videoHdr") | ||||||
|   fun setVideoHdr(view: CameraView, videoHdr: Boolean) { |   fun setVideoHdr(view: CameraView, videoHdr: Boolean) { | ||||||
|     view.videoHdr = videoHdr |     view.videoHdr = videoHdr | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "lowLightBoost") |   @ReactProp(name = "lowLightBoost") | ||||||
|   fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean?) { |   fun setLowLightBoost(view: CameraView, lowLightBoost: Boolean) { | ||||||
|     view.lowLightBoost = lowLightBoost |     view.lowLightBoost = lowLightBoost | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "isActive", defaultBoolean = false) |   @ReactProp(name = "isActive") | ||||||
|   fun setIsActive(view: CameraView, isActive: Boolean) { |   fun setIsActive(view: CameraView, isActive: Boolean) { | ||||||
|     view.isActive = isActive |     view.isActive = isActive | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "torch") |   @ReactProp(name = "torch") | ||||||
|   fun setTorch(view: CameraView, torch: String) { |   fun setTorch(view: CameraView, torch: String?) { | ||||||
|  |     if (torch != null) { | ||||||
|       val newMode = Torch.fromUnionValue(torch) |       val newMode = Torch.fromUnionValue(torch) | ||||||
|       view.torch = newMode |       view.torch = newMode | ||||||
|  |     } else { | ||||||
|  |       view.torch = Torch.OFF | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "zoom") |   @ReactProp(name = "zoom") | ||||||
| @@ -157,14 +181,23 @@ class CameraViewManager : ViewGroupManager<CameraView>() { | |||||||
|  |  | ||||||
|   @ReactProp(name = "orientation") |   @ReactProp(name = "orientation") | ||||||
|   fun setOrientation(view: CameraView, orientation: String?) { |   fun setOrientation(view: CameraView, orientation: String?) { | ||||||
|  |     if (orientation != null) { | ||||||
|       val newMode = Orientation.fromUnionValue(orientation) |       val newMode = Orientation.fromUnionValue(orientation) | ||||||
|  |       Log.i(TAG, "Orientation set to: $newMode") | ||||||
|       view.orientation = newMode |       view.orientation = newMode | ||||||
|  |     } else { | ||||||
|  |       view.orientation = Orientation.PORTRAIT | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @ReactProp(name = "codeScannerOptions") |   @ReactProp(name = "codeScannerOptions") | ||||||
|   fun setCodeScanner(view: CameraView, codeScannerOptions: ReadableMap) { |   fun setCodeScanner(view: CameraView, codeScannerOptions: ReadableMap?) { | ||||||
|     val newCodeScannerOptions = CodeScannerOptions(codeScannerOptions) |     if (codeScannerOptions != null) { | ||||||
|  |       val newCodeScannerOptions = CodeScannerOptions.fromJSValue(codeScannerOptions) | ||||||
|       view.codeScannerOptions = newCodeScannerOptions |       view.codeScannerOptions = newCodeScannerOptions | ||||||
|  |     } else { | ||||||
|  |       view.codeScannerOptions = null | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   companion object { |   companion object { | ||||||
|   | |||||||
| @@ -95,12 +95,12 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase | |||||||
|  |  | ||||||
|   // TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that |   // TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that | ||||||
|   @ReactMethod |   @ReactMethod | ||||||
|   fun startRecording(viewTag: Int, jsOptions: ReadableMap, onRecordCallback: Callback) { |   fun startRecording(viewTag: Int, jsOptions: ReadableMap, filePath: String, onRecordCallback: Callback) { | ||||||
|     coroutineScope.launch { |     coroutineScope.launch { | ||||||
|       val view = findCameraView(viewTag) |       val view = findCameraView(viewTag) | ||||||
|       try { |       try { | ||||||
|         val options = RecordVideoOptions(jsOptions) |         val options = RecordVideoOptions(jsOptions) | ||||||
|         view.startRecording(options, onRecordCallback) |         view.startRecording(options, filePath, onRecordCallback) | ||||||
|       } catch (error: CameraError) { |       } catch (error: CameraError) { | ||||||
|         val map = makeErrorMap("${error.domain}/${error.id}", error.message, error) |         val map = makeErrorMap("${error.domain}/${error.id}", error.message, error) | ||||||
|         onRecordCallback(null, map) |         onRecordCallback(null, map) | ||||||
|   | |||||||
| @@ -44,7 +44,7 @@ data class CameraConfiguration( | |||||||
|   // Output<T> types, those need to be comparable |   // Output<T> types, those need to be comparable | ||||||
|   data class CodeScanner(val codeTypes: List<CodeType>) |   data class CodeScanner(val codeTypes: List<CodeType>) | ||||||
|   data class Photo(val enableHdr: Boolean) |   data class Photo(val enableHdr: Boolean) | ||||||
|   data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean) |   data class Video(val enableHdr: Boolean, val pixelFormat: PixelFormat, val enableFrameProcessor: Boolean, val enableGpuBuffers: Boolean) | ||||||
|   data class Audio(val nothing: Unit) |   data class Audio(val nothing: Unit) | ||||||
|   data class Preview(val surface: Surface) |   data class Preview(val surface: Surface) | ||||||
|  |  | ||||||
| @@ -67,7 +67,7 @@ data class CameraConfiguration( | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   data class Difference( |   data class Difference( | ||||||
|     // Input Camera (cameraId, isActive) |     // Input Camera (cameraId) | ||||||
|     val deviceChanged: Boolean, |     val deviceChanged: Boolean, | ||||||
|     // Outputs & Session (Photo, Video, CodeScanner, HDR, Format) |     // Outputs & Session (Photo, Video, CodeScanner, HDR, Format) | ||||||
|     val outputsChanged: Boolean, |     val outputsChanged: Boolean, | ||||||
| @@ -75,14 +75,17 @@ data class CameraConfiguration( | |||||||
|     val sidePropsChanged: Boolean, |     val sidePropsChanged: Boolean, | ||||||
|     // (isActive) changed |     // (isActive) changed | ||||||
|     val isActiveChanged: Boolean |     val isActiveChanged: Boolean | ||||||
|   ) |   ) { | ||||||
|  |     val hasChanges: Boolean | ||||||
|  |       get() = deviceChanged || outputsChanged || sidePropsChanged || isActiveChanged | ||||||
|  |   } | ||||||
|  |  | ||||||
|   companion object { |   companion object { | ||||||
|     fun copyOf(other: CameraConfiguration?): CameraConfiguration = other?.copy() ?: CameraConfiguration() |     fun copyOf(other: CameraConfiguration?): CameraConfiguration = other?.copy() ?: CameraConfiguration() | ||||||
|  |  | ||||||
|     fun difference(left: CameraConfiguration?, right: CameraConfiguration): Difference { |     fun difference(left: CameraConfiguration?, right: CameraConfiguration): Difference { | ||||||
|       // input device |       // input device | ||||||
|       val deviceChanged = left?.cameraId != right.cameraId || left?.isActive != right.isActive |       val deviceChanged = left?.cameraId != right.cameraId | ||||||
|  |  | ||||||
|       // outputs |       // outputs | ||||||
|       val outputsChanged = deviceChanged || |       val outputsChanged = deviceChanged || | ||||||
| @@ -101,7 +104,7 @@ data class CameraConfiguration( | |||||||
|         left.videoStabilizationMode != right.videoStabilizationMode || |         left.videoStabilizationMode != right.videoStabilizationMode || | ||||||
|         left.exposure != right.exposure |         left.exposure != right.exposure | ||||||
|  |  | ||||||
|       val isActiveChanged = left?.isActive != right.isActive |       val isActiveChanged = sidePropsChanged || left?.isActive != right.isActive | ||||||
|  |  | ||||||
|       return Difference( |       return Difference( | ||||||
|         deviceChanged, |         deviceChanged, | ||||||
|   | |||||||
| @@ -1,17 +1,24 @@ | |||||||
| package com.mrousavy.camera.core | package com.mrousavy.camera.core | ||||||
|  |  | ||||||
|  | import android.content.res.Resources | ||||||
| import android.graphics.ImageFormat | import android.graphics.ImageFormat | ||||||
| import android.hardware.camera2.CameraCharacteristics | import android.hardware.camera2.CameraCharacteristics | ||||||
|  | import android.hardware.camera2.CameraExtensionCharacteristics | ||||||
| import android.hardware.camera2.CameraManager | import android.hardware.camera2.CameraManager | ||||||
| import android.hardware.camera2.CameraMetadata | import android.hardware.camera2.CameraMetadata | ||||||
| import android.os.Build | import android.os.Build | ||||||
|  | import android.util.Log | ||||||
| import android.util.Range | import android.util.Range | ||||||
| import android.util.Size | import android.util.Size | ||||||
|  | import android.util.SizeF | ||||||
|  | import android.view.SurfaceHolder | ||||||
| import com.facebook.react.bridge.Arguments | import com.facebook.react.bridge.Arguments | ||||||
| import com.facebook.react.bridge.ReadableArray | import com.facebook.react.bridge.ReadableArray | ||||||
| import com.facebook.react.bridge.ReadableMap | import com.facebook.react.bridge.ReadableMap | ||||||
|  | import com.mrousavy.camera.extensions.bigger | ||||||
| import com.mrousavy.camera.extensions.getPhotoSizes | import com.mrousavy.camera.extensions.getPhotoSizes | ||||||
| import com.mrousavy.camera.extensions.getVideoSizes | import com.mrousavy.camera.extensions.getVideoSizes | ||||||
|  | import com.mrousavy.camera.extensions.smaller | ||||||
| import com.mrousavy.camera.extensions.toJSValue | import com.mrousavy.camera.extensions.toJSValue | ||||||
| import com.mrousavy.camera.types.AutoFocusSystem | import com.mrousavy.camera.types.AutoFocusSystem | ||||||
| import com.mrousavy.camera.types.DeviceType | import com.mrousavy.camera.types.DeviceType | ||||||
| @@ -20,65 +27,127 @@ import com.mrousavy.camera.types.LensFacing | |||||||
| import com.mrousavy.camera.types.Orientation | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.PixelFormat | import com.mrousavy.camera.types.PixelFormat | ||||||
| import com.mrousavy.camera.types.VideoStabilizationMode | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
|  | import com.mrousavy.camera.utils.CamcorderProfileUtils | ||||||
| import kotlin.math.atan2 | import kotlin.math.atan2 | ||||||
| import kotlin.math.sqrt | import kotlin.math.sqrt | ||||||
|  |  | ||||||
| class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String) { | class CameraDeviceDetails(private val cameraManager: CameraManager, val cameraId: String) { | ||||||
|   val characteristics = cameraManager.getCameraCharacteristics(cameraId) |   companion object { | ||||||
|   val hardwareLevel = HardwareLevel.fromCameraCharacteristics(characteristics) |     private const val TAG = "CameraDeviceDetails" | ||||||
|   val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0) |  | ||||||
|   val extensions = getSupportedExtensions() |     fun getMaximumPreviewSize(): Size { | ||||||
|  |       // See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap | ||||||
|  |       // According to the Android Developer documentation, PREVIEW streams can have a resolution | ||||||
|  |       // of up to the phone's display's resolution, with a maximum of 1920x1080. | ||||||
|  |       val display1080p = Size(1920, 1080) | ||||||
|  |       val displaySize = Size( | ||||||
|  |         Resources.getSystem().displayMetrics.widthPixels, | ||||||
|  |         Resources.getSystem().displayMetrics.heightPixels | ||||||
|  |       ) | ||||||
|  |       val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller | ||||||
|  |  | ||||||
|  |       return if (isHighResScreen) display1080p else displaySize | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   val characteristics by lazy { cameraManager.getCameraCharacteristics(cameraId) } | ||||||
|  |   val hardwareLevel by lazy { HardwareLevel.fromCameraCharacteristics(characteristics) } | ||||||
|  |   val capabilities by lazy { characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0) } | ||||||
|  |   val extensions by lazy { getSupportedExtensions() } | ||||||
|  |  | ||||||
|   // device characteristics |   // device characteristics | ||||||
|   val isMultiCam = capabilities.contains(11) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA |   val isMultiCam by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) } | ||||||
|   val supportsDepthCapture = capabilities.contains(8) // TODO: CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT |   val supportsDepthCapture by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) } | ||||||
|   val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) |   val supportsRawCapture by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) } | ||||||
|   val supportsLowLightBoost = extensions.contains(4) // TODO: CameraExtensionCharacteristics.EXTENSION_NIGHT |   val supportsLowLightBoost by lazy { | ||||||
|   val lensFacing = LensFacing.fromCameraCharacteristics(characteristics) |     extensions.contains(CameraExtensionCharacteristics.EXTENSION_NIGHT) && | ||||||
|   val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false |       modes.contains(CameraCharacteristics.CONTROL_MODE_USE_SCENE_MODE) | ||||||
|   val focalLengths = |   } | ||||||
|     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) |   val lensFacing by lazy { LensFacing.fromCameraCharacteristics(characteristics) } | ||||||
|       // 35mm is the film standard sensor size |   val hasFlash by lazy { characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false } | ||||||
|       ?: floatArrayOf(35f) |   val focalLengths by lazy { | ||||||
|   val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!! |     // 35mm is the film standard sensor size | ||||||
|   val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! |     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f) | ||||||
|   val minFocusDistance = getMinFocusDistanceCm() |   } | ||||||
|   val name = ( |   val sensorSize by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE) ?: SizeF(0f, 0f) } | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { |   val activeSize | ||||||
|       characteristics.get(CameraCharacteristics.INFO_VERSION) |     get() = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! | ||||||
|     } else { |   val sensorOrientation by lazy { | ||||||
|       null |     val degrees = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION) ?: 0 | ||||||
|  |     return@lazy Orientation.fromRotationDegrees(degrees) | ||||||
|  |   } | ||||||
|  |   val minFocusDistance by lazy { getMinFocusDistanceCm() } | ||||||
|  |   val name by lazy { | ||||||
|  |     val info = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION) else null | ||||||
|  |     return@lazy info ?: "$lensFacing ($cameraId)" | ||||||
|   } |   } | ||||||
|     ) ?: "$lensFacing ($cameraId)" |  | ||||||
|  |  | ||||||
|   // "formats" (all possible configurations for this device) |   // "formats" (all possible configurations for this device) | ||||||
|   val zoomRange = ( |   val maxDigitalZoom by lazy { characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f } | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { |   val zoomRange by lazy { | ||||||
|  |     val range = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { | ||||||
|       characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) |       characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) | ||||||
|     } else { |     } else { | ||||||
|       null |       null | ||||||
|     } |     } | ||||||
|     ) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f) |     return@lazy range ?: Range(1f, maxDigitalZoom) | ||||||
|   val physicalDevices = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && characteristics.physicalCameraIds.isNotEmpty()) { |   } | ||||||
|  |   val physicalDevices by lazy { | ||||||
|  |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && characteristics.physicalCameraIds.isNotEmpty()) { | ||||||
|       characteristics.physicalCameraIds |       characteristics.physicalCameraIds | ||||||
|     } else { |     } else { | ||||||
|       setOf(cameraId) |       setOf(cameraId) | ||||||
|     } |     } | ||||||
|   val minZoom = zoomRange.lower.toDouble() |   } | ||||||
|   val maxZoom = zoomRange.upper.toDouble() |   val minZoom by lazy { zoomRange.lower.toDouble() } | ||||||
|  |   val maxZoom by lazy { zoomRange.upper.toDouble() } | ||||||
|  |  | ||||||
|   val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! |   val cameraConfig by lazy { characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! } | ||||||
|   val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) |   val isoRange by lazy { characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0) } | ||||||
|   val exposureRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE) ?: Range(0, 0) |   val exposureRange by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE) ?: Range(0, 0) } | ||||||
|   val digitalStabilizationModes = |   val digitalStabilizationModes by lazy { | ||||||
|     characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0) |     characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0) | ||||||
|   val opticalStabilizationModes = |   } | ||||||
|  |   val opticalStabilizationModes by lazy { | ||||||
|     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0) |     characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0) | ||||||
|   val supportsPhotoHdr = extensions.contains(3) // TODO: CameraExtensionCharacteristics.EXTENSION_HDR |   } | ||||||
|   val supportsVideoHdr = getHasVideoHdr() |   val supportsPhotoHdr by lazy { extensions.contains(CameraExtensionCharacteristics.EXTENSION_HDR) } | ||||||
|   val autoFocusSystem = getAutoFocusSystemMode() |   val supportsVideoHdr by lazy { getHasVideoHdr() } | ||||||
|  |   val autoFocusSystem by lazy { getAutoFocusSystemMode() } | ||||||
|  |  | ||||||
|  |   val supportsYuvProcessing by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING) } | ||||||
|  |   val supportsPrivateProcessing by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING) } | ||||||
|  |   val supportsZsl by lazy { supportsYuvProcessing || supportsPrivateProcessing } | ||||||
|  |  | ||||||
|  |   val isBackwardsCompatible by lazy { capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) } | ||||||
|  |   val supportsSnapshotCapture by lazy { supportsSnapshotCapture() } | ||||||
|  |  | ||||||
|  |   val supportsFocusRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF) ?: 0) > 0 } | ||||||
|  |   val supportsExposureRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0) > 0 } | ||||||
|  |   val supportsWhiteBalanceRegions by lazy { (characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB) ?: 0) > 0 } | ||||||
|  |  | ||||||
|  |   val modes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |   val afModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |   val aeModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |   val awbModes by lazy { characteristics.get(CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES)?.toList() ?: emptyList() } | ||||||
|  |  | ||||||
|  |   val availableAberrationModes by lazy { | ||||||
|  |     characteristics.get(CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES) | ||||||
|  |       ?: intArrayOf() | ||||||
|  |   } | ||||||
|  |   val availableHotPixelModes by lazy { characteristics.get(CameraCharacteristics.HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES) ?: intArrayOf() } | ||||||
|  |   val availableEdgeModes by lazy { characteristics.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES) ?: intArrayOf() } | ||||||
|  |   val availableDistortionCorrectionModes by lazy { getAvailableDistortionCorrectionModesOrEmptyArray() } | ||||||
|  |   val availableShadingModes by lazy { characteristics.get(CameraCharacteristics.SHADING_AVAILABLE_MODES) ?: intArrayOf() } | ||||||
|  |   val availableToneMapModes by lazy { characteristics.get(CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES) ?: intArrayOf() } | ||||||
|  |   val availableNoiseReductionModes by lazy { | ||||||
|  |     characteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES) | ||||||
|  |       ?: intArrayOf() | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // TODO: Also add 10-bit YUV here? | ||||||
|   val videoFormat = ImageFormat.YUV_420_888 |   val videoFormat = ImageFormat.YUV_420_888 | ||||||
|  |   val photoFormat = ImageFormat.JPEG | ||||||
|  |  | ||||||
|   // get extensions (HDR, Night Mode, ..) |   // get extensions (HDR, Night Mode, ..) | ||||||
|   private fun getSupportedExtensions(): List<Int> = |   private fun getSupportedExtensions(): List<Int> = | ||||||
| @@ -89,6 +158,13 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|       emptyList() |       emptyList() | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |   private fun getAvailableDistortionCorrectionModesOrEmptyArray(): IntArray = | ||||||
|  |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { | ||||||
|  |       characteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES) ?: intArrayOf() | ||||||
|  |     } else { | ||||||
|  |       intArrayOf() | ||||||
|  |     } | ||||||
|  |  | ||||||
|   private fun getHasVideoHdr(): Boolean { |   private fun getHasVideoHdr(): Boolean { | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { | ||||||
|       if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) { |       if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) { | ||||||
| @@ -102,10 +178,19 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|   private fun getMinFocusDistanceCm(): Double { |   private fun getMinFocusDistanceCm(): Double { | ||||||
|     val distance = characteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) |     val distance = characteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) | ||||||
|     if (distance == null || distance == 0f) return 0.0 |     if (distance == null || distance == 0f) return 0.0 | ||||||
|  |     if (distance.isNaN() || distance.isInfinite()) return 0.0 | ||||||
|     // distance is in "diopters", meaning 1/meter. Convert to meters, then centi-meters |     // distance is in "diopters", meaning 1/meter. Convert to meters, then centi-meters | ||||||
|     return 1.0 / distance * 100.0 |     return 1.0 / distance * 100.0 | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |   @Suppress("RedundantIf") | ||||||
|  |   private fun supportsSnapshotCapture(): Boolean { | ||||||
|  |     // As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation: | ||||||
|  |     if (hardwareLevel == HardwareLevel.LEGACY) return false | ||||||
|  |     if (supportsDepthCapture && !isBackwardsCompatible) return false | ||||||
|  |     return true | ||||||
|  |   } | ||||||
|  |  | ||||||
|   private fun createStabilizationModes(): ReadableArray { |   private fun createStabilizationModes(): ReadableArray { | ||||||
|     val array = Arguments.createArray() |     val array = Arguments.createArray() | ||||||
|     digitalStabilizationModes.forEach { videoStabilizationMode -> |     digitalStabilizationModes.forEach { videoStabilizationMode -> | ||||||
| @@ -146,6 +231,9 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun getFieldOfView(focalLength: Float): Double { |   private fun getFieldOfView(focalLength: Float): Double { | ||||||
|  |     if ((sensorSize.width == 0f) || (sensorSize.height == 0f)) { | ||||||
|  |       return 0.0 | ||||||
|  |     } | ||||||
|     val sensorDiagonal = sqrt((sensorSize.width * sensorSize.width + sensorSize.height * sensorSize.height).toDouble()) |     val sensorDiagonal = sqrt((sensorSize.width * sensorSize.width + sensorSize.height * sensorSize.height).toDouble()) | ||||||
|     val fovRadians = 2.0 * atan2(sensorDiagonal, (2.0 * focalLength)) |     val fovRadians = 2.0 * atan2(sensorDiagonal, (2.0 * focalLength)) | ||||||
|     return Math.toDegrees(fovRadians) |     return Math.toDegrees(fovRadians) | ||||||
| @@ -156,18 +244,31 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|     return getFieldOfView(smallestFocalLength) |     return getFieldOfView(smallestFocalLength) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun getVideoSizes(): List<Size> = characteristics.getVideoSizes(cameraId, videoFormat) |   fun getVideoSizes(format: Int): List<Size> = characteristics.getVideoSizes(cameraId, format) | ||||||
|   private fun getPhotoSizes(): List<Size> = characteristics.getPhotoSizes(ImageFormat.JPEG) |   fun getPhotoSizes(): List<Size> = characteristics.getPhotoSizes(photoFormat) | ||||||
|  |   fun getPreviewSizes(): List<Size> { | ||||||
|  |     val maximumPreviewSize = getMaximumPreviewSize() | ||||||
|  |     return cameraConfig.getOutputSizes(SurfaceHolder::class.java) | ||||||
|  |       .filter { it.bigger <= maximumPreviewSize.bigger && it.smaller <= maximumPreviewSize.smaller } | ||||||
|  |   } | ||||||
|  |  | ||||||
|   private fun getFormats(): ReadableArray { |   private fun getFormats(): ReadableArray { | ||||||
|     val array = Arguments.createArray() |     val array = Arguments.createArray() | ||||||
|  |  | ||||||
|     val videoSizes = getVideoSizes() |     val videoSizes = getVideoSizes(videoFormat) | ||||||
|     val photoSizes = getPhotoSizes() |     val photoSizes = getPhotoSizes() | ||||||
|  |  | ||||||
|     videoSizes.forEach { videoSize -> |     videoSizes.forEach { videoSize -> | ||||||
|       val frameDuration = cameraConfig.getOutputMinFrameDuration(videoFormat, videoSize) |       val frameDuration = cameraConfig.getOutputMinFrameDuration(videoFormat, videoSize) | ||||||
|       val maxFps = (1.0 / (frameDuration.toDouble() / 1_000_000_000)).toInt() |       var maxFps = (1.0 / (frameDuration.toDouble() / 1_000_000_000)).toInt() | ||||||
|  |       val maxEncoderFps = CamcorderProfileUtils.getMaximumFps(cameraId, videoSize) | ||||||
|  |       if (maxEncoderFps != null && maxEncoderFps < maxFps) { | ||||||
|  |         Log.i( | ||||||
|  |           TAG, | ||||||
|  |           "Camera could do $maxFps FPS at $videoSize, but Media Encoder can only do $maxEncoderFps FPS. Clamping to $maxEncoderFps FPS..." | ||||||
|  |         ) | ||||||
|  |         maxFps = maxEncoderFps | ||||||
|  |       } | ||||||
|  |  | ||||||
|       photoSizes.forEach { photoSize -> |       photoSizes.forEach { photoSize -> | ||||||
|         val map = buildFormatMap(photoSize, videoSize, Range(1, maxFps)) |         val map = buildFormatMap(photoSize, videoSize, Range(1, maxFps)) | ||||||
| @@ -175,8 +276,6 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|       } |       } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // TODO: Add high-speed video ranges (high-fps / slow-motion) |  | ||||||
|  |  | ||||||
|     return array |     return array | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -223,14 +322,14 @@ class CameraDeviceDetails(val cameraManager: CameraManager, val cameraId: String | |||||||
|     map.putBoolean("isMultiCam", isMultiCam) |     map.putBoolean("isMultiCam", isMultiCam) | ||||||
|     map.putBoolean("supportsRawCapture", supportsRawCapture) |     map.putBoolean("supportsRawCapture", supportsRawCapture) | ||||||
|     map.putBoolean("supportsLowLightBoost", supportsLowLightBoost) |     map.putBoolean("supportsLowLightBoost", supportsLowLightBoost) | ||||||
|     map.putBoolean("supportsFocus", true) // I believe every device here supports focussing |     map.putBoolean("supportsFocus", supportsFocusRegions) | ||||||
|     map.putDouble("minZoom", minZoom) |     map.putDouble("minZoom", minZoom) | ||||||
|     map.putDouble("maxZoom", maxZoom) |     map.putDouble("maxZoom", maxZoom) | ||||||
|     map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android |     map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android | ||||||
|     map.putDouble("minExposure", exposureRange.lower.toDouble()) |     map.putDouble("minExposure", exposureRange.lower.toDouble()) | ||||||
|     map.putDouble("maxExposure", exposureRange.upper.toDouble()) |     map.putDouble("maxExposure", exposureRange.upper.toDouble()) | ||||||
|     map.putString("hardwareLevel", hardwareLevel.unionValue) |     map.putString("hardwareLevel", hardwareLevel.unionValue) | ||||||
|     map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue) |     map.putString("sensorOrientation", sensorOrientation.unionValue) | ||||||
|     map.putArray("formats", getFormats()) |     map.putArray("formats", getFormats()) | ||||||
|     return map |     return map | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -62,6 +62,8 @@ class FlashUnavailableError : | |||||||
|     "flash-unavailable", |     "flash-unavailable", | ||||||
|     "The Camera Device does not have a flash unit! Make sure you select a device where `device.hasFlash`/`device.hasTorch` is true." |     "The Camera Device does not have a flash unit! Make sure you select a device where `device.hasFlash`/`device.hasTorch` is true." | ||||||
|   ) |   ) | ||||||
|  | class FocusNotSupportedError : | ||||||
|  |   CameraError("device", "focus-not-supported", "The currently selected camera device does not support focusing!") | ||||||
|  |  | ||||||
| class CameraNotReadyError : | class CameraNotReadyError : | ||||||
|   CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") |   CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!") | ||||||
| @@ -71,6 +73,8 @@ class CameraSessionCannotBeConfiguredError(cameraId: String) : | |||||||
|   CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera #$cameraId!") |   CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera #$cameraId!") | ||||||
| class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : | class CameraDisconnectedError(cameraId: String, error: CameraDeviceError) : | ||||||
|   CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error") |   CameraError("session", "camera-has-been-disconnected", "The given Camera device (id: $cameraId) has been disconnected! Error: $error") | ||||||
|  | class NoOutputsError : | ||||||
|  |   CameraError("session", "no-outputs", "Cannot create a CameraCaptureSession without any outputs! (PREVIEW, PHOTO, VIDEO, ...)") | ||||||
|  |  | ||||||
| class PropRequiresFormatToBeNonNullError(propName: String) : | class PropRequiresFormatToBeNonNullError(propName: String) : | ||||||
|   CameraError("format", "format-required", "The prop \"$propName\" requires a format to be set, but format was null!") |   CameraError("format", "format-required", "The prop \"$propName\" requires a format to be set, but format was null!") | ||||||
| @@ -100,6 +104,8 @@ class PhotoNotEnabledError : | |||||||
|   CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.") |   CameraError("capture", "photo-not-enabled", "Photo capture is disabled! Pass `photo={true}` to enable photo capture.") | ||||||
| class CaptureAbortedError(wasImageCaptured: Boolean) : | class CaptureAbortedError(wasImageCaptured: Boolean) : | ||||||
|   CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") |   CameraError("capture", "aborted", "The image capture was aborted! Was Image captured: $wasImageCaptured") | ||||||
|  | class FocusCanceledError : CameraError("capture", "focus-canceled", "The focus operation was canceled.") | ||||||
|  | class CaptureTimedOutError : CameraError("capture", "timed-out", "The image capture was aborted because it timed out.") | ||||||
| class UnknownCaptureError(wasImageCaptured: Boolean) : | class UnknownCaptureError(wasImageCaptured: Boolean) : | ||||||
|   CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") |   CameraError("capture", "unknown", "An unknown error occurred while trying to capture an Image! Was Image captured: $wasImageCaptured") | ||||||
| class RecorderError(name: String, extra: Int) : | class RecorderError(name: String, extra: Int) : | ||||||
| @@ -113,6 +119,16 @@ class RecordingInProgressError : | |||||||
|     "recording-in-progress", |     "recording-in-progress", | ||||||
|     "There is already an active video recording in progress! Did you call startRecording() twice?" |     "There is already an active video recording in progress! Did you call startRecording() twice?" | ||||||
|   ) |   ) | ||||||
|  | class FrameInvalidError : | ||||||
|  |   CameraError( | ||||||
|  |     "capture", | ||||||
|  |     "frame-invalid", | ||||||
|  |     "Trying to access an already closed Frame! " + | ||||||
|  |       "Are you trying to access the Image data outside of a Frame Processor's lifetime?\n" + | ||||||
|  |       "- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n" + | ||||||
|  |       "- If you want to do async processing, use `runAsync(...)` instead.\n" + | ||||||
|  |       "- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`" | ||||||
|  |   ) | ||||||
|  |  | ||||||
| class CodeTypeNotSupportedError(codeType: String) : | class CodeTypeNotSupportedError(codeType: String) : | ||||||
|   CameraError( |   CameraError( | ||||||
|   | |||||||
| @@ -5,59 +5,43 @@ import android.content.Context | |||||||
| import android.content.pm.PackageManager | import android.content.pm.PackageManager | ||||||
| import android.graphics.ImageFormat | import android.graphics.ImageFormat | ||||||
| import android.graphics.Point | import android.graphics.Point | ||||||
| import android.hardware.camera2.CameraCaptureSession |  | ||||||
| import android.hardware.camera2.CameraCharacteristics | import android.hardware.camera2.CameraCharacteristics | ||||||
| import android.hardware.camera2.CameraDevice |  | ||||||
| import android.hardware.camera2.CameraManager | import android.hardware.camera2.CameraManager | ||||||
| import android.hardware.camera2.CameraMetadata |  | ||||||
| import android.hardware.camera2.CaptureRequest |  | ||||||
| import android.hardware.camera2.CaptureResult | import android.hardware.camera2.CaptureResult | ||||||
| import android.hardware.camera2.TotalCaptureResult | import android.hardware.camera2.TotalCaptureResult | ||||||
| import android.hardware.camera2.params.MeteringRectangle |  | ||||||
| import android.media.Image | import android.media.Image | ||||||
| import android.media.ImageReader | import android.media.ImageReader | ||||||
| import android.os.Build |  | ||||||
| import android.util.Log | import android.util.Log | ||||||
| import android.util.Range |  | ||||||
| import android.util.Size | import android.util.Size | ||||||
| import android.view.Surface | import android.view.Surface | ||||||
| import android.view.SurfaceHolder | import android.view.SurfaceHolder | ||||||
| import androidx.core.content.ContextCompat | import androidx.core.content.ContextCompat | ||||||
| import com.google.mlkit.vision.barcode.common.Barcode | import com.google.mlkit.vision.barcode.common.Barcode | ||||||
|  | import com.mrousavy.camera.core.capture.RepeatingCaptureRequest | ||||||
| import com.mrousavy.camera.core.outputs.BarcodeScannerOutput | import com.mrousavy.camera.core.outputs.BarcodeScannerOutput | ||||||
| import com.mrousavy.camera.core.outputs.PhotoOutput | import com.mrousavy.camera.core.outputs.PhotoOutput | ||||||
| import com.mrousavy.camera.core.outputs.SurfaceOutput | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
| import com.mrousavy.camera.core.outputs.VideoPipelineOutput | import com.mrousavy.camera.core.outputs.VideoPipelineOutput | ||||||
| import com.mrousavy.camera.extensions.capture |  | ||||||
| import com.mrousavy.camera.extensions.closestToOrMax | import com.mrousavy.camera.extensions.closestToOrMax | ||||||
| import com.mrousavy.camera.extensions.createCaptureSession |  | ||||||
| import com.mrousavy.camera.extensions.createPhotoCaptureRequest |  | ||||||
| import com.mrousavy.camera.extensions.getPhotoSizes |  | ||||||
| import com.mrousavy.camera.extensions.getPreviewTargetSize |  | ||||||
| import com.mrousavy.camera.extensions.getVideoSizes |  | ||||||
| import com.mrousavy.camera.extensions.openCamera |  | ||||||
| import com.mrousavy.camera.extensions.setZoom |  | ||||||
| import com.mrousavy.camera.frameprocessor.Frame | import com.mrousavy.camera.frameprocessor.Frame | ||||||
| import com.mrousavy.camera.frameprocessor.FrameProcessor |  | ||||||
| import com.mrousavy.camera.types.Flash | import com.mrousavy.camera.types.Flash | ||||||
|  | import com.mrousavy.camera.types.LensFacing | ||||||
| import com.mrousavy.camera.types.Orientation | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.QualityPrioritization | import com.mrousavy.camera.types.QualityPrioritization | ||||||
| import com.mrousavy.camera.types.RecordVideoOptions | import com.mrousavy.camera.types.RecordVideoOptions | ||||||
| import com.mrousavy.camera.types.Torch |  | ||||||
| import com.mrousavy.camera.types.VideoStabilizationMode |  | ||||||
| import com.mrousavy.camera.utils.ImageFormatUtils | import com.mrousavy.camera.utils.ImageFormatUtils | ||||||
| import java.io.Closeable | import java.io.Closeable | ||||||
| import java.lang.IllegalStateException | import kotlin.coroutines.cancellation.CancellationException | ||||||
| import java.util.concurrent.CancellationException |  | ||||||
| import kotlinx.coroutines.CoroutineScope | import kotlinx.coroutines.CoroutineScope | ||||||
| import kotlinx.coroutines.launch | import kotlinx.coroutines.launch | ||||||
| import kotlinx.coroutines.runBlocking | import kotlinx.coroutines.runBlocking | ||||||
| import kotlinx.coroutines.sync.Mutex | import kotlinx.coroutines.sync.Mutex | ||||||
| import kotlinx.coroutines.sync.withLock | import kotlinx.coroutines.sync.withLock | ||||||
|  | import java.io.File | ||||||
|  |  | ||||||
| class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) : | class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) : | ||||||
|   CameraManager.AvailabilityCallback(), |   Closeable, | ||||||
|   Closeable { |   PersistentCameraCaptureSession.Callback { | ||||||
|   companion object { |   companion object { | ||||||
|     private const val TAG = "CameraSession" |     private const val TAG = "CameraSession" | ||||||
|   } |   } | ||||||
| @@ -66,14 +50,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|   private var configuration: CameraConfiguration? = null |   private var configuration: CameraConfiguration? = null | ||||||
|  |  | ||||||
|   // Camera State |   // Camera State | ||||||
|   private var cameraDevice: CameraDevice? = null |   private val captureSession = PersistentCameraCaptureSession(cameraManager, this) | ||||||
|     set(value) { |  | ||||||
|       field = value |  | ||||||
|       cameraDeviceDetails = if (value != null) CameraDeviceDetails(cameraManager, value.id) else null |  | ||||||
|     } |  | ||||||
|   private var cameraDeviceDetails: CameraDeviceDetails? = null |  | ||||||
|   private var captureSession: CameraCaptureSession? = null |  | ||||||
|   private var previewRequest: CaptureRequest.Builder? = null |  | ||||||
|   private var photoOutput: PhotoOutput? = null |   private var photoOutput: PhotoOutput? = null | ||||||
|   private var videoOutput: VideoPipelineOutput? = null |   private var videoOutput: VideoPipelineOutput? = null | ||||||
|   private var codeScannerOutput: BarcodeScannerOutput? = null |   private var codeScannerOutput: BarcodeScannerOutput? = null | ||||||
| @@ -101,11 +78,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       field = value |       field = value | ||||||
|       updateVideoOutputs() |       updateVideoOutputs() | ||||||
|     } |     } | ||||||
|   var frameProcessor: FrameProcessor? = null |  | ||||||
|     set(value) { |  | ||||||
|       field = value |  | ||||||
|       updateVideoOutputs() |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|   val orientation: Orientation |   val orientation: Orientation | ||||||
|     get() { |     get() { | ||||||
| @@ -115,14 +87,9 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       return Orientation.fromRotationDegrees(sensorRotation) |       return Orientation.fromRotationDegrees(sensorRotation) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   init { |  | ||||||
|     cameraManager.registerAvailabilityCallback(this, CameraQueues.cameraQueue.handler) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   override fun close() { |   override fun close() { | ||||||
|     Log.i(TAG, "Closing CameraSession...") |     Log.i(TAG, "Closing CameraSession...") | ||||||
|     isDestroyed = true |     isDestroyed = true | ||||||
|     cameraManager.unregisterAvailabilityCallback(this) |  | ||||||
|     runBlocking { |     runBlocking { | ||||||
|       mutex.withLock { |       mutex.withLock { | ||||||
|         destroy() |         destroy() | ||||||
| @@ -132,18 +99,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     Log.i(TAG, "CameraSession closed!") |     Log.i(TAG, "CameraSession closed!") | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun onCameraAvailable(cameraId: String) { |  | ||||||
|     super.onCameraAvailable(cameraId) |  | ||||||
|     if (this.configuration?.cameraId == cameraId && cameraDevice == null && configuration?.isActive == true) { |  | ||||||
|       Log.i(TAG, "Camera #$cameraId is now available again, trying to re-open it now...") |  | ||||||
|       coroutineScope.launch { |  | ||||||
|         configure { |  | ||||||
|           // re-open CameraDevice if needed |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   suspend fun configure(lambda: (configuration: CameraConfiguration) -> Unit) { |   suspend fun configure(lambda: (configuration: CameraConfiguration) -> Unit) { | ||||||
|     Log.i(TAG, "configure { ... }: Waiting for lock...") |     Log.i(TAG, "configure { ... }: Waiting for lock...") | ||||||
|  |  | ||||||
| @@ -152,6 +107,12 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       val config = CameraConfiguration.copyOf(this.configuration) |       val config = CameraConfiguration.copyOf(this.configuration) | ||||||
|       lambda(config) |       lambda(config) | ||||||
|       val diff = CameraConfiguration.difference(this.configuration, config) |       val diff = CameraConfiguration.difference(this.configuration, config) | ||||||
|  |       this.configuration = config | ||||||
|  |  | ||||||
|  |       if (!diff.hasChanges) { | ||||||
|  |         Log.i(TAG, "Nothing changed, aborting configure { ... }") | ||||||
|  |         return@withLock | ||||||
|  |       } | ||||||
|  |  | ||||||
|       if (isDestroyed) { |       if (isDestroyed) { | ||||||
|         Log.i(TAG, "CameraSession is already destroyed. Skipping configure { ... }") |         Log.i(TAG, "CameraSession is already destroyed. Skipping configure { ... }") | ||||||
| @@ -161,29 +122,11 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       Log.i(TAG, "configure { ... }: Updating CameraSession Configuration... $diff") |       Log.i(TAG, "configure { ... }: Updating CameraSession Configuration... $diff") | ||||||
|  |  | ||||||
|       try { |       try { | ||||||
|         val needsRebuild = cameraDevice == null || captureSession == null |         captureSession.withConfiguration { | ||||||
|         if (needsRebuild) { |  | ||||||
|           Log.i(TAG, "Need to rebuild CameraDevice and CameraCaptureSession...") |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         // Since cameraDevice and captureSession are OS resources, we have three possible paths here: |  | ||||||
|         if (needsRebuild) { |  | ||||||
|           if (config.isActive) { |  | ||||||
|             // A: The Camera has been torn down by the OS and we want it to be active - rebuild everything |  | ||||||
|             Log.i(TAG, "Need to rebuild CameraDevice and CameraCaptureSession...") |  | ||||||
|             configureCameraDevice(config) |  | ||||||
|             configureOutputs(config) |  | ||||||
|             configureCaptureRequest(config) |  | ||||||
|           } else { |  | ||||||
|             // B: The Camera has been torn down by the OS but it's currently in the background - ignore this |  | ||||||
|             Log.i(TAG, "CameraDevice and CameraCaptureSession is torn down but Camera is not active, skipping update...") |  | ||||||
|           } |  | ||||||
|         } else { |  | ||||||
|           // C: The Camera has not been torn down and we just want to update some props - update incrementally |  | ||||||
|           // Build up session or update any props |           // Build up session or update any props | ||||||
|           if (diff.deviceChanged) { |           if (diff.deviceChanged) { | ||||||
|             // 1. cameraId changed, open device |             // 1. cameraId changed, open device | ||||||
|             configureCameraDevice(config) |             configureInput(config) | ||||||
|           } |           } | ||||||
|           if (diff.outputsChanged) { |           if (diff.outputsChanged) { | ||||||
|             // 2. outputs changed, build new session |             // 2. outputs changed, build new session | ||||||
| @@ -193,10 +136,18 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|             // 3. zoom etc changed, update repeating request |             // 3. zoom etc changed, update repeating request | ||||||
|             configureCaptureRequest(config) |             configureCaptureRequest(config) | ||||||
|           } |           } | ||||||
|  |           if (diff.isActiveChanged) { | ||||||
|  |             // 4. Either start or stop the session | ||||||
|  |             val isActive = config.isActive && config.preview.isEnabled | ||||||
|  |             captureSession.setIsActive(isActive) | ||||||
|  |           } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         Log.i(TAG, "Successfully updated CameraSession Configuration! isActive: ${config.isActive}") |         Log.i( | ||||||
|         this.configuration = config |           TAG, | ||||||
|  |           "configure { ... }: Completed CameraSession Configuration! (isActive: ${config.isActive}, isRunning: ${captureSession.isRunning})" | ||||||
|  |         ) | ||||||
|  |         isRunning = captureSession.isRunning | ||||||
|  |  | ||||||
|         // Notify about Camera initialization |         // Notify about Camera initialization | ||||||
|         if (diff.deviceChanged) { |         if (diff.deviceChanged) { | ||||||
| @@ -211,8 +162,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|  |  | ||||||
|   private fun destroy() { |   private fun destroy() { | ||||||
|     Log.i(TAG, "Destroying session..") |     Log.i(TAG, "Destroying session..") | ||||||
|     cameraDevice?.close() |     captureSession.close() | ||||||
|     cameraDevice = null |  | ||||||
|  |  | ||||||
|     photoOutput?.close() |     photoOutput?.close() | ||||||
|     photoOutput = null |     photoOutput = null | ||||||
| @@ -268,66 +218,20 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     Log.i(TAG, "Preview Output destroyed!") |     Log.i(TAG, "Preview Output destroyed!") | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |   private fun configureInput(configuration: CameraConfiguration) { | ||||||
|    * Set up the `CameraDevice` (`cameraId`) |     Log.i(TAG, "Configuring inputs for CameraSession...") | ||||||
|    */ |  | ||||||
|   private suspend fun configureCameraDevice(configuration: CameraConfiguration) { |  | ||||||
|     if (!configuration.isActive) { |  | ||||||
|       // If isActive=false, we don't care if the device is opened or closed. |  | ||||||
|       // Android OS can close the CameraDevice if it needs it, otherwise we keep it warm. |  | ||||||
|       Log.i(TAG, "isActive is false, skipping CameraDevice configuration.") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     if (cameraDevice != null) { |  | ||||||
|       // Close existing device |  | ||||||
|       Log.i(TAG, "Closing previous Camera #${cameraDevice?.id}...") |  | ||||||
|       cameraDevice?.close() |  | ||||||
|       cameraDevice = null |  | ||||||
|     } |  | ||||||
|     isRunning = false |  | ||||||
|  |  | ||||||
|     // Check Camera Permission |  | ||||||
|     val cameraPermission = ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) |  | ||||||
|     if (cameraPermission != PackageManager.PERMISSION_GRANTED) throw CameraPermissionError() |  | ||||||
|  |  | ||||||
|     // Open new device |  | ||||||
|     val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() |     val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() | ||||||
|     Log.i(TAG, "Configuring Camera #$cameraId...") |     val status = ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) | ||||||
|     cameraDevice = cameraManager.openCamera(cameraId, { device, error -> |     if (status != PackageManager.PERMISSION_GRANTED) throw CameraPermissionError() | ||||||
|       if (cameraDevice != device) { |  | ||||||
|         // a previous device has been disconnected, but we already have a new one. |  | ||||||
|         // this is just normal behavior |  | ||||||
|         return@openCamera |  | ||||||
|       } |  | ||||||
|  |  | ||||||
|       this.cameraDevice = null |  | ||||||
|     isRunning = false |     isRunning = false | ||||||
|  |     captureSession.setInput(cameraId) | ||||||
|       if (error != null) { |  | ||||||
|         Log.e(TAG, "Camera #${device.id} has been unexpectedly disconnected!", error) |  | ||||||
|         callback.onError(error) |  | ||||||
|       } else { |  | ||||||
|         Log.i(TAG, "Camera #${device.id} has been gracefully disconnected!") |  | ||||||
|       } |  | ||||||
|     }, CameraQueues.cameraQueue) |  | ||||||
|  |  | ||||||
|     Log.i(TAG, "Successfully configured Camera #$cameraId!") |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |   /** | ||||||
|    * Set up the `CaptureSession` with all outputs (preview, photo, video, codeScanner) and their HDR/Format settings. |    * Set up the `CaptureSession` with all outputs (preview, photo, video, codeScanner) and their HDR/Format settings. | ||||||
|    */ |    */ | ||||||
|   private suspend fun configureOutputs(configuration: CameraConfiguration) { |   private suspend fun configureOutputs(configuration: CameraConfiguration) { | ||||||
|     if (!configuration.isActive) { |     val cameraId = configuration.cameraId ?: throw NoCameraDeviceError() | ||||||
|       Log.i(TAG, "isActive is false, skipping CameraCaptureSession configuration.") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|     val cameraDevice = cameraDevice |  | ||||||
|     if (cameraDevice == null) { |  | ||||||
|       Log.i(TAG, "CameraSession hasn't configured a CameraDevice, skipping session configuration...") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Destroy previous outputs |     // Destroy previous outputs | ||||||
|     Log.i(TAG, "Destroying previous outputs...") |     Log.i(TAG, "Destroying previous outputs...") | ||||||
| @@ -339,20 +243,20 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     codeScannerOutput = null |     codeScannerOutput = null | ||||||
|     isRunning = false |     isRunning = false | ||||||
|  |  | ||||||
|     val characteristics = cameraManager.getCameraCharacteristics(cameraDevice.id) |     val deviceDetails = CameraDeviceDetails(cameraManager, cameraId) | ||||||
|     val format = configuration.format |     val format = configuration.format | ||||||
|  |  | ||||||
|     Log.i(TAG, "Creating outputs for Camera #${cameraDevice.id}...") |     Log.i(TAG, "Creating outputs for Camera #$cameraId...") | ||||||
|  |  | ||||||
|     val isSelfie = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT |     val isSelfie = deviceDetails.lensFacing == LensFacing.FRONT | ||||||
|  |  | ||||||
|     val outputs = mutableListOf<SurfaceOutput>() |     val outputs = mutableListOf<SurfaceOutput>() | ||||||
|  |  | ||||||
|     // Photo Output |     // Photo Output | ||||||
|     val photo = configuration.photo as? CameraConfiguration.Output.Enabled<CameraConfiguration.Photo> |     val photo = configuration.photo as? CameraConfiguration.Output.Enabled<CameraConfiguration.Photo> | ||||||
|     if (photo != null) { |     if (photo != null) { | ||||||
|       val imageFormat = ImageFormat.JPEG |       val imageFormat = deviceDetails.photoFormat | ||||||
|       val sizes = characteristics.getPhotoSizes(imageFormat) |       val sizes = deviceDetails.getPhotoSizes() | ||||||
|       val size = sizes.closestToOrMax(format?.photoSize) |       val size = sizes.closestToOrMax(format?.photoSize) | ||||||
|       val maxImages = 10 |       val maxImages = 10 | ||||||
|  |  | ||||||
| @@ -372,7 +276,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     val video = configuration.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> |     val video = configuration.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> | ||||||
|     if (video != null) { |     if (video != null) { | ||||||
|       val imageFormat = video.config.pixelFormat.toImageFormat() |       val imageFormat = video.config.pixelFormat.toImageFormat() | ||||||
|       val sizes = characteristics.getVideoSizes(cameraDevice.id, imageFormat) |       val sizes = deviceDetails.getVideoSizes(imageFormat) | ||||||
|       val size = sizes.closestToOrMax(format?.videoSize) |       val size = sizes.closestToOrMax(format?.videoSize) | ||||||
|  |  | ||||||
|       Log.i(TAG, "Adding ${size.width}x${size.height} Video Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") |       Log.i(TAG, "Adding ${size.width}x${size.height} Video Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") | ||||||
| @@ -382,6 +286,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|         video.config.pixelFormat, |         video.config.pixelFormat, | ||||||
|         isSelfie, |         isSelfie, | ||||||
|         video.config.enableFrameProcessor, |         video.config.enableFrameProcessor, | ||||||
|  |         video.config.enableGpuBuffers, | ||||||
|         callback |         callback | ||||||
|       ) |       ) | ||||||
|       val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr) |       val output = VideoPipelineOutput(videoPipeline, video.config.enableHdr) | ||||||
| @@ -394,7 +299,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     if (preview != null) { |     if (preview != null) { | ||||||
|       // Compute Preview Size based on chosen video size |       // Compute Preview Size based on chosen video size | ||||||
|       val videoSize = videoOutput?.size ?: format?.videoSize |       val videoSize = videoOutput?.size ?: format?.videoSize | ||||||
|       val size = characteristics.getPreviewTargetSize(videoSize) |       val sizes = deviceDetails.getPreviewSizes() | ||||||
|  |       val size = sizes.closestToOrMax(videoSize) | ||||||
|  |  | ||||||
|       val enableHdr = video?.config?.enableHdr ?: false |       val enableHdr = video?.config?.enableHdr ?: false | ||||||
|  |  | ||||||
| @@ -406,8 +312,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|         enableHdr |         enableHdr | ||||||
|       ) |       ) | ||||||
|       outputs.add(output) |       outputs.add(output) | ||||||
|       // Size is usually landscape, so we flip it here |  | ||||||
|       previewView?.size = Size(size.height, size.width) |       previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // CodeScanner Output |     // CodeScanner Output | ||||||
| @@ -420,7 +326,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|       } |       } | ||||||
|  |  | ||||||
|       val imageFormat = ImageFormat.YUV_420_888 |       val imageFormat = ImageFormat.YUV_420_888 | ||||||
|       val sizes = characteristics.getVideoSizes(cameraDevice.id, imageFormat) |       val sizes = deviceDetails.getVideoSizes(imageFormat) | ||||||
|       val size = sizes.closestToOrMax(Size(1280, 720)) |       val size = sizes.closestToOrMax(Size(1280, 720)) | ||||||
|  |  | ||||||
|       Log.i(TAG, "Adding ${size.width}x${size.height} CodeScanner Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") |       Log.i(TAG, "Adding ${size.width}x${size.height} CodeScanner Output in ${ImageFormatUtils.imageFormatToString(imageFormat)}...") | ||||||
| @@ -431,175 +337,63 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     // Create session |     // Create session | ||||||
|     captureSession = cameraDevice.createCaptureSession(cameraManager, outputs, { session -> |     captureSession.setOutputs(outputs) | ||||||
|       if (this.captureSession != session) { |  | ||||||
|         // a previous session has been closed, but we already have a new one. |  | ||||||
|         // this is just normal behavior |  | ||||||
|         return@createCaptureSession |  | ||||||
|       } |  | ||||||
|  |  | ||||||
|       // onClosed |     Log.i(TAG, "Successfully configured Session with ${outputs.size} outputs for Camera #$cameraId!") | ||||||
|       this.captureSession = null |  | ||||||
|       isRunning = false |  | ||||||
|  |  | ||||||
|       Log.i(TAG, "Camera Session $session has been closed.") |  | ||||||
|     }, CameraQueues.cameraQueue) |  | ||||||
|  |  | ||||||
|     Log.i(TAG, "Successfully configured Session with ${outputs.size} outputs for Camera #${cameraDevice.id}!") |  | ||||||
|  |  | ||||||
|     // Update Frame Processor and RecordingSession for newly changed output |     // Update Frame Processor and RecordingSession for newly changed output | ||||||
|     updateVideoOutputs() |     updateVideoOutputs() | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun createRepeatingRequest(device: CameraDevice, targets: List<Surface>, config: CameraConfiguration): CaptureRequest { |  | ||||||
|     val deviceDetails = cameraDeviceDetails ?: CameraDeviceDetails(cameraManager, device.id) |  | ||||||
|  |  | ||||||
|     val template = if (config.video.isEnabled) CameraDevice.TEMPLATE_RECORD else CameraDevice.TEMPLATE_PREVIEW |  | ||||||
|     val captureRequest = device.createCaptureRequest(template) |  | ||||||
|  |  | ||||||
|     targets.forEach { t -> captureRequest.addTarget(t) } |  | ||||||
|  |  | ||||||
|     val format = config.format |  | ||||||
|  |  | ||||||
|     // Set FPS |  | ||||||
|     val fps = config.fps |  | ||||||
|     if (fps != null) { |  | ||||||
|       if (format == null) throw PropRequiresFormatToBeNonNullError("fps") |  | ||||||
|       if (format.maxFps < fps) throw InvalidFpsError(fps) |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set Video Stabilization |  | ||||||
|     if (config.videoStabilizationMode != VideoStabilizationMode.OFF) { |  | ||||||
|       if (format == null) throw PropRequiresFormatToBeNonNullError("videoStabilizationMode") |  | ||||||
|       if (!format.videoStabilizationModes.contains( |  | ||||||
|           config.videoStabilizationMode |  | ||||||
|         ) |  | ||||||
|       ) { |  | ||||||
|         throw InvalidVideoStabilizationMode(config.videoStabilizationMode) |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|     when (config.videoStabilizationMode) { |  | ||||||
|       VideoStabilizationMode.OFF -> { |  | ||||||
|         // do nothing |  | ||||||
|       } |  | ||||||
|       VideoStabilizationMode.STANDARD -> { |  | ||||||
|         val mode = if (Build.VERSION.SDK_INT >= |  | ||||||
|           Build.VERSION_CODES.TIRAMISU |  | ||||||
|         ) { |  | ||||||
|           CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION |  | ||||||
|         } else { |  | ||||||
|           CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON |  | ||||||
|         } |  | ||||||
|         captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode) |  | ||||||
|       } |  | ||||||
|       VideoStabilizationMode.CINEMATIC, VideoStabilizationMode.CINEMATIC_EXTENDED -> { |  | ||||||
|         captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set HDR |  | ||||||
|     val video = config.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> |  | ||||||
|     val videoHdr = video?.config?.enableHdr |  | ||||||
|     if (videoHdr == true) { |  | ||||||
|       if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr") |  | ||||||
|       if (!format.supportsVideoHdr) throw InvalidVideoHdrError() |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) |  | ||||||
|     } else if (config.enableLowLightBoost) { |  | ||||||
|       if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError() |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set Exposure Bias |  | ||||||
|     val exposure = config.exposure?.toInt() |  | ||||||
|     if (exposure != null) { |  | ||||||
|       val clamped = deviceDetails.exposureRange.clamp(exposure) |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, clamped) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Set Zoom |  | ||||||
|     // TODO: Cache camera characteristics? Check perf. |  | ||||||
|     val cameraCharacteristics = cameraManager.getCameraCharacteristics(device.id) |  | ||||||
|     captureRequest.setZoom(config.zoom, cameraCharacteristics) |  | ||||||
|  |  | ||||||
|     // Set Torch |  | ||||||
|     if (config.torch == Torch.ON) { |  | ||||||
|       if (!deviceDetails.hasFlash) throw FlashUnavailableError() |  | ||||||
|       captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Start repeating request if the Camera is active |  | ||||||
|     return captureRequest.build() |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   private fun configureCaptureRequest(config: CameraConfiguration) { |   private fun configureCaptureRequest(config: CameraConfiguration) { | ||||||
|     val captureSession = captureSession |     val video = config.video as? CameraConfiguration.Output.Enabled<CameraConfiguration.Video> | ||||||
|  |     val enableVideo = video != null | ||||||
|  |     val enableVideoHdr = video?.config?.enableHdr == true | ||||||
|  |  | ||||||
|     if (!config.isActive) { |     captureSession.setRepeatingRequest( | ||||||
|       isRunning = false |       RepeatingCaptureRequest( | ||||||
|       try { |         enableVideo, | ||||||
|         captureSession?.stopRepeating() |         config.torch, | ||||||
|       } catch (e: IllegalStateException) { |         config.fps, | ||||||
|         // ignore - captureSession is already closed. |         config.videoStabilizationMode, | ||||||
|       } |         enableVideoHdr, | ||||||
|       return |         config.enableLowLightBoost, | ||||||
|     } |         config.exposure, | ||||||
|     if (captureSession == null) { |         config.zoom, | ||||||
|       Log.i(TAG, "CameraSession hasn't configured the capture session, skipping CaptureRequest...") |         config.format | ||||||
|       return |       ) | ||||||
|     } |     ) | ||||||
|  |  | ||||||
|     val preview = config.preview as? CameraConfiguration.Output.Enabled<CameraConfiguration.Preview> |  | ||||||
|     val previewSurface = preview?.config?.surface |  | ||||||
|     val targets = listOfNotNull(previewSurface, videoOutput?.surface, codeScannerOutput?.surface) |  | ||||||
|     if (targets.isEmpty()) { |  | ||||||
|       Log.i(TAG, "CameraSession has no repeating outputs (Preview, Video, CodeScanner), skipping CaptureRequest...") |  | ||||||
|       return |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     val request = createRepeatingRequest(captureSession.device, targets, config) |  | ||||||
|     captureSession.setRepeatingRequest(request, null, null) |  | ||||||
|     isRunning = true |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   suspend fun takePhoto( |   suspend fun takePhoto( | ||||||
|     qualityPrioritization: QualityPrioritization, |     qualityPrioritization: QualityPrioritization, | ||||||
|     flashMode: Flash, |     flash: Flash, | ||||||
|     enableShutterSound: Boolean, |     enableShutterSound: Boolean, | ||||||
|     enableRedEyeReduction: Boolean, |  | ||||||
|     enableAutoStabilization: Boolean, |     enableAutoStabilization: Boolean, | ||||||
|  |     enablePrecapture: Boolean, | ||||||
|     outputOrientation: Orientation |     outputOrientation: Orientation | ||||||
|   ): CapturedPhoto { |   ): CapturedPhoto { | ||||||
|     val captureSession = captureSession ?: throw CameraNotReadyError() |  | ||||||
|     val photoOutput = photoOutput ?: throw PhotoNotEnabledError() |     val photoOutput = photoOutput ?: throw PhotoNotEnabledError() | ||||||
|  |  | ||||||
|     Log.i(TAG, "Photo capture 0/3 - preparing capture request (${photoOutput.size.width}x${photoOutput.size.height})...") |     Log.i(TAG, "Photo capture 1/3 - capturing ${photoOutput.size.width}x${photoOutput.size.height} image...") | ||||||
|  |     val result = captureSession.capture( | ||||||
|     val zoom = configuration?.zoom ?: 1f |  | ||||||
|  |  | ||||||
|     val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) |  | ||||||
|     val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics) |  | ||||||
|     val captureRequest = captureSession.device.createPhotoCaptureRequest( |  | ||||||
|       cameraManager, |  | ||||||
|       photoOutput.surface, |  | ||||||
|       zoom, |  | ||||||
|       qualityPrioritization, |       qualityPrioritization, | ||||||
|       flashMode, |       flash, | ||||||
|       enableRedEyeReduction, |  | ||||||
|       enableAutoStabilization, |       enableAutoStabilization, | ||||||
|       photoOutput.enableHdr, |       photoOutput.enableHdr, | ||||||
|       orientation |       outputOrientation, | ||||||
|  |       enableShutterSound, | ||||||
|  |       enablePrecapture | ||||||
|     ) |     ) | ||||||
|     Log.i(TAG, "Photo capture 1/3 - starting capture...") |  | ||||||
|     val result = captureSession.capture(captureRequest, enableShutterSound) |  | ||||||
|     val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! |  | ||||||
|     Log.i(TAG, "Photo capture 2/3 complete - received metadata with timestamp $timestamp") |  | ||||||
|     try { |     try { | ||||||
|  |       val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! | ||||||
|  |       Log.i(TAG, "Photo capture 2/3 - waiting for image with timestamp $timestamp now...") | ||||||
|       val image = photoOutputSynchronizer.await(timestamp) |       val image = photoOutputSynchronizer.await(timestamp) | ||||||
|  |  | ||||||
|       val isMirrored = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT |       Log.i(TAG, "Photo capture 3/3 - received ${image.width} x ${image.height} image, preparing result...") | ||||||
|  |       val deviceDetails = captureSession.getActiveDeviceDetails() | ||||||
|       Log.i(TAG, "Photo capture 3/3 complete - received ${image.width} x ${image.height} image.") |       val isMirrored = deviceDetails?.lensFacing == LensFacing.FRONT | ||||||
|       return CapturedPhoto(image, result, orientation, isMirrored, image.format) |       return CapturedPhoto(image, result, orientation, isMirrored, image.format) | ||||||
|     } catch (e: CancellationException) { |     } catch (e: CancellationException) { | ||||||
|       throw CaptureAbortedError(false) |       throw CaptureAbortedError(false) | ||||||
| @@ -620,27 +414,30 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|   suspend fun startRecording( |   suspend fun startRecording( | ||||||
|     enableAudio: Boolean, |     enableAudio: Boolean, | ||||||
|     options: RecordVideoOptions, |     options: RecordVideoOptions, | ||||||
|  |     filePath: String, | ||||||
|     callback: (video: RecordingSession.Video) -> Unit, |     callback: (video: RecordingSession.Video) -> Unit, | ||||||
|     onError: (error: CameraError) -> Unit |     onError: (error: CameraError) -> Unit | ||||||
|   ) { |   ) { | ||||||
|     mutex.withLock { |     mutex.withLock { | ||||||
|       if (recording != null) throw RecordingInProgressError() |       if (recording != null) throw RecordingInProgressError() | ||||||
|       val videoOutput = videoOutput ?: throw VideoNotEnabledError() |       val videoOutput = videoOutput ?: throw VideoNotEnabledError() | ||||||
|       val cameraDevice = cameraDevice ?: throw CameraNotReadyError() |       val cameraId = configuration?.cameraId ?: throw NoCameraDeviceError() | ||||||
|  |  | ||||||
|       val fps = configuration?.fps ?: 30 |       val fps = configuration?.fps ?: 30 | ||||||
|  |  | ||||||
|       val recording = RecordingSession( |       val recording = RecordingSession( | ||||||
|         context, |         context, | ||||||
|         cameraDevice.id, |         cameraId, | ||||||
|         videoOutput.size, |         videoOutput.size, | ||||||
|         enableAudio, |         enableAudio, | ||||||
|         fps, |         fps, | ||||||
|         videoOutput.enableHdr, |         videoOutput.enableHdr, | ||||||
|         orientation, |         orientation, | ||||||
|         options, |         options, | ||||||
|  |         filePath, | ||||||
|         callback, |         callback, | ||||||
|         onError |         onError, | ||||||
|  |         this.callback, | ||||||
|       ) |       ) | ||||||
|       recording.start() |       recording.start() | ||||||
|       this.recording = recording |       this.recording = recording | ||||||
| @@ -670,40 +467,16 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   suspend fun focus(x: Int, y: Int): Unit = throw NotImplementedError("focus() is not yet implemented!") |   override fun onError(error: Throwable) { | ||||||
|  |     callback.onError(error) | ||||||
|   private suspend fun focus(point: Point) { |  | ||||||
|     mutex.withLock { |  | ||||||
|       // TODO: Fix this method |  | ||||||
|       val captureSession = captureSession ?: throw CameraNotReadyError() |  | ||||||
|       val request = previewRequest ?: throw CameraNotReadyError() |  | ||||||
|  |  | ||||||
|       val weight = MeteringRectangle.METERING_WEIGHT_MAX - 1 |  | ||||||
|       val focusAreaTouch = MeteringRectangle(point, Size(150, 150), weight) |  | ||||||
|  |  | ||||||
|       // Quickly pause preview |  | ||||||
|       captureSession.stopRepeating() |  | ||||||
|  |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL) |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF) |  | ||||||
|       captureSession.capture(request.build(), null, null) |  | ||||||
|  |  | ||||||
|       // Add AF trigger with focus region |  | ||||||
|       val characteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) |  | ||||||
|       val maxSupportedFocusRegions = characteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE) ?: 0 |  | ||||||
|       if (maxSupportedFocusRegions >= 1) { |  | ||||||
|         request.set(CaptureRequest.CONTROL_AF_REGIONS, arrayOf(focusAreaTouch)) |  | ||||||
|   } |   } | ||||||
|       request.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO) |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) |  | ||||||
|       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START) |  | ||||||
|  |  | ||||||
|       captureSession.capture(request.build(), false) |   suspend fun focus(x: Int, y: Int) { | ||||||
|  |     val previewView = previewView ?: throw CameraNotReadyError() | ||||||
|  |     val deviceDetails = captureSession.getActiveDeviceDetails() ?: throw CameraNotReadyError() | ||||||
|  |  | ||||||
|       // Resume preview |     val cameraPoint = previewView.convertLayerPointToCameraCoordinates(Point(x, y), deviceDetails) | ||||||
|       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE) |     captureSession.focus(cameraPoint) | ||||||
|       captureSession.setRepeatingRequest(request.build(), null, null) |  | ||||||
|     } |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   data class CapturedPhoto( |   data class CapturedPhoto( | ||||||
| @@ -724,6 +497,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam | |||||||
|     fun onInitialized() |     fun onInitialized() | ||||||
|     fun onStarted() |     fun onStarted() | ||||||
|     fun onStopped() |     fun onStopped() | ||||||
|  |     fun onVideoChunkReady(filepath: File, index: Int) | ||||||
|     fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) |     fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -13,12 +13,13 @@ import com.mrousavy.camera.types.RecordVideoOptions | |||||||
| import java.io.File | import java.io.File | ||||||
| import java.nio.ByteBuffer | import java.nio.ByteBuffer | ||||||
|  |  | ||||||
| class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) : | class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) : | ||||||
|   MediaCodec.Callback() { |   MediaCodec.Callback() { | ||||||
|   companion object { |   companion object { | ||||||
|     private const val TAG = "ChunkedRecorder" |     private const val TAG = "ChunkedRecorder" | ||||||
|  |  | ||||||
|     fun fromParams( |     fun fromParams( | ||||||
|  |       callbacks: CameraSession.Callback, | ||||||
|       size: Size, |       size: Size, | ||||||
|       enableAudio: Boolean, |       enableAudio: Boolean, | ||||||
|       fps: Int? = null, |       fps: Int? = null, | ||||||
| @@ -26,10 +27,11 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|       bitRate: Int, |       bitRate: Int, | ||||||
|       options: RecordVideoOptions, |       options: RecordVideoOptions, | ||||||
|       outputDirectory: File, |       outputDirectory: File, | ||||||
|       iFrameInterval: Int = 3 |       iFrameInterval: Int = 5 | ||||||
|     ): ChunkedRecordingManager { |     ): ChunkedRecordingManager { | ||||||
|       val mimeType = options.videoCodec.toMimeType() |       val mimeType = options.videoCodec.toMimeType() | ||||||
|       val orientationDegrees = cameraOrientation.toDegrees() |       val cameraOrientationDegrees = cameraOrientation.toDegrees() | ||||||
|  |       val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees(); | ||||||
|       val (width, height) = if (cameraOrientation.isLandscape()) { |       val (width, height) = if (cameraOrientation.isLandscape()) { | ||||||
|         size.height to size.width |         size.height to size.width | ||||||
|       } else { |       } else { | ||||||
| @@ -53,11 +55,13 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|       format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval) |       format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval) | ||||||
|       format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate) |       format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate) | ||||||
|  |  | ||||||
|       Log.i(TAG, "Video Format: $format, orientation $cameraOrientation") |       Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees") | ||||||
|       // Create a MediaCodec encoder, and configure it with our format.  Get a Surface |       // Create a MediaCodec encoder, and configure it with our format.  Get a Surface | ||||||
|       // we can use for input and wrap it with a class that handles the EGL work. |       // we can use for input and wrap it with a class that handles the EGL work. | ||||||
|       codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) |       codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) | ||||||
|       return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval) |       return ChunkedRecordingManager( | ||||||
|  |         codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks | ||||||
|  |       ) | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -79,7 +83,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   // Muxer specific |   // Muxer specific | ||||||
|   private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) { |   private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat, val callbacks: CameraSession.Callback,) { | ||||||
|     val videoTrack: Int = muxer.addTrack(encodedFormat) |     val videoTrack: Int = muxer.addTrack(encodedFormat) | ||||||
|     val startTimeUs: Long = startTimeUs |     val startTimeUs: Long = startTimeUs | ||||||
|  |  | ||||||
| @@ -91,6 +95,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|     fun finish() { |     fun finish() { | ||||||
|       muxer.stop() |       muxer.stop() | ||||||
|       muxer.release() |       muxer.release() | ||||||
|  |       callbacks.onVideoChunkReady(filepath, chunkIndex) | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -109,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu | |||||||
|     ) |     ) | ||||||
|     muxer.setOrientationHint(orientationHint) |     muxer.setOrientationHint(orientationHint) | ||||||
|     muxerContext = MuxerContext( |     muxerContext = MuxerContext( | ||||||
|       muxer, bufferInfo.presentationTimeUs, this.encodedFormat!! |         muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks | ||||||
|     ) |     ) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -39,7 +39,7 @@ class CodeScannerPipeline( | |||||||
|     var isBusy = false |     var isBusy = false | ||||||
|     imageReader = ImageReader.newInstance(size.width, size.height, format, MAX_IMAGES) |     imageReader = ImageReader.newInstance(size.width, size.height, format, MAX_IMAGES) | ||||||
|     imageReader.setOnImageAvailableListener({ reader -> |     imageReader.setOnImageAvailableListener({ reader -> | ||||||
|       val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener |       val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener | ||||||
|  |  | ||||||
|       if (isBusy) { |       if (isBusy) { | ||||||
|         // We're currently executing on a previous Frame, so we skip this one. |         // We're currently executing on a previous Frame, so we skip this one. | ||||||
|   | |||||||
| @@ -0,0 +1,378 @@ | |||||||
|  | package com.mrousavy.camera.core | ||||||
|  |  | ||||||
|  | import android.graphics.Point | ||||||
|  | import android.hardware.camera2.CameraAccessException | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CameraManager | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.hardware.camera2.TotalCaptureResult | ||||||
|  | import android.util.Log | ||||||
|  | import com.mrousavy.camera.core.capture.PhotoCaptureRequest | ||||||
|  | import com.mrousavy.camera.core.capture.RepeatingCaptureRequest | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.extensions.PrecaptureOptions | ||||||
|  | import com.mrousavy.camera.extensions.PrecaptureTrigger | ||||||
|  | import com.mrousavy.camera.extensions.capture | ||||||
|  | import com.mrousavy.camera.extensions.createCaptureSession | ||||||
|  | import com.mrousavy.camera.extensions.isValid | ||||||
|  | import com.mrousavy.camera.extensions.openCamera | ||||||
|  | import com.mrousavy.camera.extensions.precapture | ||||||
|  | import com.mrousavy.camera.extensions.tryAbortCaptures | ||||||
|  | import com.mrousavy.camera.extensions.tryStopRepeating | ||||||
|  | import com.mrousavy.camera.types.Flash | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
|  | import com.mrousavy.camera.types.QualityPrioritization | ||||||
|  | import java.io.Closeable | ||||||
|  | import kotlinx.coroutines.CoroutineScope | ||||||
|  | import kotlinx.coroutines.Job | ||||||
|  | import kotlinx.coroutines.coroutineScope | ||||||
|  | import kotlinx.coroutines.delay | ||||||
|  | import kotlinx.coroutines.isActive | ||||||
|  | import kotlinx.coroutines.launch | ||||||
|  | import kotlinx.coroutines.sync.Mutex | ||||||
|  | import kotlinx.coroutines.sync.withLock | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * A [CameraCaptureSession] wrapper that safely handles interruptions and remains open whenever available. | ||||||
|  |  * | ||||||
|  |  * This class aims to be similar to Apple's `AVCaptureSession`. | ||||||
|  |  */ | ||||||
|  | class PersistentCameraCaptureSession(private val cameraManager: CameraManager, private val callback: Callback) : Closeable { | ||||||
|  |   companion object { | ||||||
|  |     private const val TAG = "PersistentCameraCaptureSession" | ||||||
|  |     private const val FOCUS_RESET_TIMEOUT = 3000L | ||||||
|  |     private const val PRECAPTURE_LOCK_TIMEOUT = 5000L | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // Inputs/Dependencies | ||||||
|  |   private var cameraId: String? = null | ||||||
|  |   private var outputs: List<SurfaceOutput> = emptyList() | ||||||
|  |   private var repeatingRequest: RepeatingCaptureRequest? = null | ||||||
|  |   private var isActive = false | ||||||
|  |  | ||||||
|  |   // State/Dependants | ||||||
|  |   private var device: CameraDevice? = null // depends on [cameraId] | ||||||
|  |   private var session: CameraCaptureSession? = null // depends on [device, surfaceOutputs] | ||||||
|  |   private var cameraDeviceDetails: CameraDeviceDetails? = null // depends on [device] | ||||||
|  |  | ||||||
|  |   private val mutex = Mutex() | ||||||
|  |   private var didDestroyFromOutside = false | ||||||
|  |   private var focusJob: Job? = null | ||||||
|  |   private val coroutineScope = CoroutineScope(CameraQueues.cameraQueue.coroutineDispatcher) | ||||||
|  |  | ||||||
|  |   val isRunning: Boolean | ||||||
|  |     get() = isActive && session != null && device != null && !didDestroyFromOutside | ||||||
|  |  | ||||||
|  |   override fun close() { | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     session?.tryAbortCaptures() | ||||||
|  |     device?.close() | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun assertLocked(method: String) { | ||||||
|  |     if (!mutex.isLocked) { | ||||||
|  |       throw SessionIsNotLockedError("Failed to call $method, session is not locked! Call beginConfiguration() first.") | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun withConfiguration(block: suspend () -> Unit) { | ||||||
|  |     // Cancel any ongoing focus jobs | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     focusJob = null | ||||||
|  |  | ||||||
|  |     mutex.withLock { | ||||||
|  |       block() | ||||||
|  |       configure() | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setInput(cameraId: String) { | ||||||
|  |     Log.d(TAG, "--> setInput($cameraId)") | ||||||
|  |     assertLocked("setInput") | ||||||
|  |     if (this.cameraId != cameraId || device?.id != cameraId) { | ||||||
|  |       this.cameraId = cameraId | ||||||
|  |  | ||||||
|  |       // Abort any captures in the session so we get the onCaptureFailed handler for any outstanding photos | ||||||
|  |       session?.tryAbortCaptures() | ||||||
|  |       session = null | ||||||
|  |       // Closing the device will also close the session above - even faster than manually closing it. | ||||||
|  |       device?.close() | ||||||
|  |       device = null | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setOutputs(outputs: List<SurfaceOutput>) { | ||||||
|  |     Log.d(TAG, "--> setOutputs($outputs)") | ||||||
|  |     assertLocked("setOutputs") | ||||||
|  |     if (this.outputs != outputs) { | ||||||
|  |       this.outputs = outputs | ||||||
|  |  | ||||||
|  |       if (outputs.isNotEmpty()) { | ||||||
|  |         // Outputs have changed to something else, we don't wanna destroy the session directly | ||||||
|  |         // so the outputs can be kept warm. The session that gets created next will take over the outputs. | ||||||
|  |         session?.tryAbortCaptures() | ||||||
|  |       } else { | ||||||
|  |         // Just stop it, we don't have any outputs | ||||||
|  |         session?.close() | ||||||
|  |       } | ||||||
|  |       session = null | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setRepeatingRequest(request: RepeatingCaptureRequest) { | ||||||
|  |     assertLocked("setRepeatingRequest") | ||||||
|  |     Log.d(TAG, "--> setRepeatingRequest(...)") | ||||||
|  |     if (this.repeatingRequest != request) { | ||||||
|  |       this.repeatingRequest = request | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun setIsActive(isActive: Boolean) { | ||||||
|  |     assertLocked("setIsActive") | ||||||
|  |     Log.d(TAG, "--> setIsActive($isActive)") | ||||||
|  |     if (this.isActive != isActive) { | ||||||
|  |       this.isActive = isActive | ||||||
|  |     } | ||||||
|  |     if (isActive && didDestroyFromOutside) { | ||||||
|  |       didDestroyFromOutside = false | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun capture( | ||||||
|  |     qualityPrioritization: QualityPrioritization, | ||||||
|  |     flash: Flash, | ||||||
|  |     enableAutoStabilization: Boolean, | ||||||
|  |     enablePhotoHdr: Boolean, | ||||||
|  |     orientation: Orientation, | ||||||
|  |     enableShutterSound: Boolean, | ||||||
|  |     enablePrecapture: Boolean | ||||||
|  |   ): TotalCaptureResult { | ||||||
|  |     // Cancel any ongoing focus jobs | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     focusJob = null | ||||||
|  |  | ||||||
|  |     mutex.withLock { | ||||||
|  |       Log.i(TAG, "Capturing photo...") | ||||||
|  |       val session = session ?: throw CameraNotReadyError() | ||||||
|  |       val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() | ||||||
|  |       val photoRequest = PhotoCaptureRequest( | ||||||
|  |         repeatingRequest, | ||||||
|  |         qualityPrioritization, | ||||||
|  |         enableAutoStabilization, | ||||||
|  |         enablePhotoHdr, | ||||||
|  |         orientation | ||||||
|  |       ) | ||||||
|  |       val device = session.device | ||||||
|  |       val deviceDetails = getOrCreateCameraDeviceDetails(device) | ||||||
|  |  | ||||||
|  |       // Submit a single high-res capture to photo output as well as all preview outputs | ||||||
|  |       val outputs = outputs | ||||||
|  |       val repeatingOutputs = outputs.filter { it.isRepeating } | ||||||
|  |  | ||||||
|  |       val skipPrecapture = !enablePrecapture || qualityPrioritization == QualityPrioritization.SPEED | ||||||
|  |       if (skipPrecapture && flash == Flash.OFF) { | ||||||
|  |         // 0. We want to take a picture as fast as possible, so skip any precapture sequence and just capture one Frame. | ||||||
|  |         Log.i(TAG, "Using fast capture path without pre-capture sequence...") | ||||||
|  |         val singleRequest = photoRequest.createCaptureRequest(device, deviceDetails, outputs) | ||||||
|  |         return session.capture(singleRequest.build(), enableShutterSound) | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       Log.i(TAG, "Locking AF/AE/AWB...") | ||||||
|  |  | ||||||
|  |       // 1. Run precapture sequence | ||||||
|  |       var needsFlash: Boolean | ||||||
|  |       try { | ||||||
|  |         val precaptureRequest = repeatingRequest.createCaptureRequest(device, deviceDetails, repeatingOutputs) | ||||||
|  |         val skipIfPassivelyFocused = flash == Flash.OFF | ||||||
|  |         val options = PrecaptureOptions( | ||||||
|  |           listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE, PrecaptureTrigger.AWB), | ||||||
|  |           flash, | ||||||
|  |           emptyList(), | ||||||
|  |           skipIfPassivelyFocused, | ||||||
|  |           PRECAPTURE_LOCK_TIMEOUT | ||||||
|  |         ) | ||||||
|  |         val result = session.precapture(precaptureRequest, deviceDetails, options) | ||||||
|  |         needsFlash = result.needsFlash | ||||||
|  |       } catch (e: CaptureTimedOutError) { | ||||||
|  |         // the precapture just timed out after 5 seconds, take picture anyways without focus. | ||||||
|  |         needsFlash = false | ||||||
|  |       } catch (e: FocusCanceledError) { | ||||||
|  |         throw CaptureAbortedError(false) | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       try { | ||||||
|  |         // 2. Once precapture AF/AE/AWB successfully locked, capture the actual photo | ||||||
|  |         val singleRequest = photoRequest.createCaptureRequest(device, deviceDetails, outputs) | ||||||
|  |         if (needsFlash) { | ||||||
|  |           singleRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) | ||||||
|  |           singleRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE) | ||||||
|  |         } | ||||||
|  |         return session.capture(singleRequest.build(), enableShutterSound) | ||||||
|  |       } finally { | ||||||
|  |         // 3. After taking a photo we set the repeating request back to idle to remove the AE/AF/AWB locks again | ||||||
|  |         val idleRequest = repeatingRequest.createCaptureRequest(device, deviceDetails, repeatingOutputs) | ||||||
|  |         session.setRepeatingRequest(idleRequest.build(), null, null) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun focus(point: Point) { | ||||||
|  |     // Cancel any previous focus jobs | ||||||
|  |     focusJob?.cancel() | ||||||
|  |     focusJob = null | ||||||
|  |  | ||||||
|  |     mutex.withLock { | ||||||
|  |       Log.i(TAG, "Focusing to $point...") | ||||||
|  |       val session = session ?: throw CameraNotReadyError() | ||||||
|  |       val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() | ||||||
|  |       val device = session.device | ||||||
|  |       val deviceDetails = getOrCreateCameraDeviceDetails(device) | ||||||
|  |       if (!deviceDetails.supportsFocusRegions) { | ||||||
|  |         throw FocusNotSupportedError() | ||||||
|  |       } | ||||||
|  |       val outputs = outputs.filter { it.isRepeating } | ||||||
|  |  | ||||||
|  |       // 1. Run a precapture sequence for AF, AE and AWB. | ||||||
|  |       focusJob = coroutineScope.launch { | ||||||
|  |         val request = repeatingRequest.createCaptureRequest(device, deviceDetails, outputs) | ||||||
|  |         val options = | ||||||
|  |           PrecaptureOptions(listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE), Flash.OFF, listOf(point), false, FOCUS_RESET_TIMEOUT) | ||||||
|  |         session.precapture(request, deviceDetails, options) | ||||||
|  |       } | ||||||
|  |       focusJob?.join() | ||||||
|  |  | ||||||
|  |       // 2. Reset AF/AE/AWB again after 3 seconds timeout | ||||||
|  |       focusJob = coroutineScope.launch { | ||||||
|  |         delay(FOCUS_RESET_TIMEOUT) | ||||||
|  |         if (!this.isActive) { | ||||||
|  |           // this job got canceled from the outside | ||||||
|  |           return@launch | ||||||
|  |         } | ||||||
|  |         if (!isRunning || this@PersistentCameraCaptureSession.session != session) { | ||||||
|  |           // the view/session has already been destroyed in the meantime | ||||||
|  |           return@launch | ||||||
|  |         } | ||||||
|  |         Log.i(TAG, "Resetting focus to auto-focus...") | ||||||
|  |         repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request -> | ||||||
|  |           session.setRepeatingRequest(request.build(), null, null) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun getActiveDeviceDetails(): CameraDeviceDetails? { | ||||||
|  |     val device = device ?: return null | ||||||
|  |     return getOrCreateCameraDeviceDetails(device) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private suspend fun configure() { | ||||||
|  |     if (didDestroyFromOutside && !isActive) { | ||||||
|  |       Log.d(TAG, "CameraCaptureSession has been destroyed by Android, skipping configuration until isActive is set to `true` again.") | ||||||
|  |       return | ||||||
|  |     } | ||||||
|  |     Log.d(TAG, "Configure() with isActive: $isActive, ID: $cameraId, device: $device, session: $session") | ||||||
|  |     val cameraId = cameraId ?: throw NoCameraDeviceError() | ||||||
|  |     val repeatingRequest = repeatingRequest ?: throw CameraNotReadyError() | ||||||
|  |     val outputs = outputs | ||||||
|  |  | ||||||
|  |     try { | ||||||
|  |       didDestroyFromOutside = false | ||||||
|  |  | ||||||
|  |       val device = getOrCreateDevice(cameraId) | ||||||
|  |       if (didDestroyFromOutside) return | ||||||
|  |  | ||||||
|  |       if (outputs.isEmpty()) return | ||||||
|  |       val session = getOrCreateSession(device, outputs) | ||||||
|  |       if (didDestroyFromOutside) return | ||||||
|  |  | ||||||
|  |       if (isActive) { | ||||||
|  |         Log.d(TAG, "Updating repeating request...") | ||||||
|  |         val details = getOrCreateCameraDeviceDetails(device) | ||||||
|  |         val repeatingOutputs = outputs.filter { it.isRepeating } | ||||||
|  |         val builder = repeatingRequest.createCaptureRequest(device, details, repeatingOutputs) | ||||||
|  |         session.setRepeatingRequest(builder.build(), null, null) | ||||||
|  |       } else { | ||||||
|  |         Log.d(TAG, "Stopping repeating request...") | ||||||
|  |         session.tryStopRepeating() | ||||||
|  |       } | ||||||
|  |       Log.d(TAG, "Configure() done! isActive: $isActive, ID: $cameraId, device: $device, session: $session") | ||||||
|  |     } catch (e: CameraAccessException) { | ||||||
|  |       if (didDestroyFromOutside) { | ||||||
|  |         // Camera device has been destroyed in the meantime, that's fine. | ||||||
|  |         Log.d(TAG, "Configure() canceled, session has been destroyed in the meantime!") | ||||||
|  |       } else { | ||||||
|  |         // Camera should still be active, so not sure what went wrong. Rethrow | ||||||
|  |         throw e | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private suspend fun getOrCreateDevice(cameraId: String): CameraDevice { | ||||||
|  |     val currentDevice = device | ||||||
|  |     if (currentDevice?.id == cameraId && currentDevice.isValid) { | ||||||
|  |       return currentDevice | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     this.session?.tryAbortCaptures() | ||||||
|  |     this.device?.close() | ||||||
|  |     this.device = null | ||||||
|  |     this.session = null | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Creating new device...") | ||||||
|  |     val newDevice = cameraManager.openCamera(cameraId, { device, error -> | ||||||
|  |       Log.i(TAG, "Camera $device closed!") | ||||||
|  |       if (this.device == device) { | ||||||
|  |         this.didDestroyFromOutside = true | ||||||
|  |         this.session?.tryAbortCaptures() | ||||||
|  |         this.session = null | ||||||
|  |         this.device = null | ||||||
|  |         this.isActive = false | ||||||
|  |       } | ||||||
|  |       if (error != null) { | ||||||
|  |         callback.onError(error) | ||||||
|  |       } | ||||||
|  |     }, CameraQueues.videoQueue) | ||||||
|  |     this.device = newDevice | ||||||
|  |     return newDevice | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private suspend fun getOrCreateSession(device: CameraDevice, outputs: List<SurfaceOutput>): CameraCaptureSession { | ||||||
|  |     val currentSession = session | ||||||
|  |     if (currentSession?.device == device) { | ||||||
|  |       return currentSession | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if (outputs.isEmpty()) throw NoOutputsError() | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Creating new session...") | ||||||
|  |     val newSession = device.createCaptureSession(cameraManager, outputs, { session -> | ||||||
|  |       Log.i(TAG, "Session $session closed!") | ||||||
|  |       if (this.session == session) { | ||||||
|  |         this.didDestroyFromOutside = true | ||||||
|  |         this.session?.tryAbortCaptures() | ||||||
|  |         this.session = null | ||||||
|  |         this.isActive = false | ||||||
|  |       } | ||||||
|  |     }, CameraQueues.videoQueue) | ||||||
|  |     session = newSession | ||||||
|  |     return newSession | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun getOrCreateCameraDeviceDetails(device: CameraDevice): CameraDeviceDetails { | ||||||
|  |     val currentDetails = cameraDeviceDetails | ||||||
|  |     if (currentDetails?.cameraId == device.id) { | ||||||
|  |       return currentDetails | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     val newDetails = CameraDeviceDetails(cameraManager, device.id) | ||||||
|  |     cameraDeviceDetails = newDetails | ||||||
|  |     return newDetails | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   interface Callback { | ||||||
|  |     fun onError(error: Throwable) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   class SessionIsNotLockedError(message: String) : Error(message) | ||||||
|  | } | ||||||
| @@ -2,51 +2,123 @@ package com.mrousavy.camera.core | |||||||
|  |  | ||||||
| import android.annotation.SuppressLint | import android.annotation.SuppressLint | ||||||
| import android.content.Context | import android.content.Context | ||||||
|  | import android.graphics.Point | ||||||
| import android.util.Log | import android.util.Log | ||||||
| import android.util.Size | import android.util.Size | ||||||
| import android.view.Gravity |  | ||||||
| import android.view.SurfaceHolder | import android.view.SurfaceHolder | ||||||
| import android.view.SurfaceView | import android.view.SurfaceView | ||||||
| import android.widget.FrameLayout |  | ||||||
| import com.facebook.react.bridge.UiThreadUtil | import com.facebook.react.bridge.UiThreadUtil | ||||||
| import com.mrousavy.camera.extensions.getMaximumPreviewSize | import com.mrousavy.camera.extensions.resize | ||||||
|  | import com.mrousavy.camera.extensions.rotatedBy | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
| import com.mrousavy.camera.types.ResizeMode | import com.mrousavy.camera.types.ResizeMode | ||||||
| import kotlin.math.roundToInt | import kotlin.math.roundToInt | ||||||
|  | import kotlinx.coroutines.Dispatchers | ||||||
|  | import kotlinx.coroutines.withContext | ||||||
|  |  | ||||||
| @SuppressLint("ViewConstructor") | @SuppressLint("ViewConstructor") | ||||||
| class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceView(context) { | class PreviewView(context: Context, callback: SurfaceHolder.Callback) : | ||||||
|   var size: Size = getMaximumPreviewSize() |   SurfaceView(context), | ||||||
|  |   SurfaceHolder.Callback { | ||||||
|  |   var size: Size = CameraDeviceDetails.getMaximumPreviewSize() | ||||||
|     set(value) { |     set(value) { | ||||||
|  |       if (field != value) { | ||||||
|  |         Log.i(TAG, "Surface Size changed: $field -> $value") | ||||||
|         field = value |         field = value | ||||||
|       UiThreadUtil.runOnUiThread { |         updateLayout() | ||||||
|         Log.i(TAG, "Setting PreviewView Surface Size to $width x $height...") |  | ||||||
|         holder.setFixedSize(value.height, value.width) |  | ||||||
|         requestLayout() |  | ||||||
|         invalidate() |  | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   var resizeMode: ResizeMode = ResizeMode.COVER |   var resizeMode: ResizeMode = ResizeMode.COVER | ||||||
|     set(value) { |     set(value) { | ||||||
|  |       if (field != value) { | ||||||
|  |         Log.i(TAG, "Resize Mode changed: $field -> $value") | ||||||
|         field = value |         field = value | ||||||
|  |         updateLayout() | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   var orientation: Orientation = Orientation.PORTRAIT | ||||||
|  |       set(value) { | ||||||
|  |         if (field != value) { | ||||||
|  |           Log.i(TAG, "View Orientation changed: $field -> $value") | ||||||
|  |           field = value | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |   private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT | ||||||
|  |     set(value) { | ||||||
|  |       if (field != value) { | ||||||
|  |         Log.i(TAG, "Input Orientation changed: $field -> $value") | ||||||
|  |         field = value | ||||||
|  |         updateLayout() | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   private val viewSize: Size | ||||||
|  |     get() { | ||||||
|  |       val displayMetrics = context.resources.displayMetrics | ||||||
|  |       val dpX = width / displayMetrics.density | ||||||
|  |       val dpY = height / displayMetrics.density | ||||||
|  |       return Size(dpX.toInt(), dpY.toInt()) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |   init { | ||||||
|  |     Log.i(TAG, "Creating PreviewView...") | ||||||
|  |     holder.setKeepScreenOn(true) | ||||||
|  |     holder.addCallback(this) | ||||||
|  |     holder.addCallback(callback) | ||||||
|  |     holder.setFixedSize(size.width, size.height) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun surfaceCreated(holder: SurfaceHolder) = Unit | ||||||
|  |   override fun surfaceDestroyed(holder: SurfaceHolder) = Unit | ||||||
|  |   override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { | ||||||
|  |     size = Size(width, height) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   suspend fun setSurfaceSize(width: Int, height: Int, cameraSensorOrientation: Orientation) { | ||||||
|  |     withContext(Dispatchers.Main) { | ||||||
|  |       inputOrientation = cameraSensorOrientation | ||||||
|  |       holder.resize(width, height) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point { | ||||||
|  |     val sensorOrientation = cameraDeviceDetails.sensorOrientation | ||||||
|  |     val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height()) | ||||||
|  |     val viewOrientation = Orientation.PORTRAIT | ||||||
|  |  | ||||||
|  |     val rotated = point.rotatedBy(viewSize, cameraSize, viewOrientation, sensorOrientation) | ||||||
|  |     Log.i(TAG, "Converted layer point $point to camera point $rotated! ($sensorOrientation, $cameraSize -> $viewSize)") | ||||||
|  |     return rotated | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun updateLayout() { | ||||||
|     UiThreadUtil.runOnUiThread { |     UiThreadUtil.runOnUiThread { | ||||||
|       requestLayout() |       requestLayout() | ||||||
|       invalidate() |       invalidate() | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   init { |   override fun requestLayout() { | ||||||
|     Log.i(TAG, "Creating PreviewView...") |     super.requestLayout() | ||||||
|     layoutParams = FrameLayout.LayoutParams( |     // Manually trigger measure & layout, as RN on Android skips those. | ||||||
|       FrameLayout.LayoutParams.MATCH_PARENT, |     // See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427 | ||||||
|       FrameLayout.LayoutParams.MATCH_PARENT, |     post { | ||||||
|       Gravity.CENTER |       measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)) | ||||||
|     ) |       layout(left, top, right, bottom) | ||||||
|     holder.addCallback(callback) |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { |   private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { | ||||||
|  |     var contentSize = contentSize | ||||||
|  |     // Swap dimensions if orientation is landscape | ||||||
|  |     if (orientation.isLandscape()) { | ||||||
|  |       contentSize = Size(contentSize.height, contentSize.width) | ||||||
|  |     } | ||||||
|     val contentAspectRatio = contentSize.width.toDouble() / contentSize.height |     val contentAspectRatio = contentSize.width.toDouble() / contentSize.height | ||||||
|     val containerAspectRatio = containerSize.width.toDouble() / containerSize.height |     val containerAspectRatio = containerSize.width.toDouble() / containerSize.height | ||||||
|  |     if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { | ||||||
|  |       // One of the aspect ratios is 0 or NaN, maybe the view hasn't been laid out yet. | ||||||
|  |       return contentSize | ||||||
|  |     } | ||||||
|  |  | ||||||
|     val widthOverHeight = when (resizeMode) { |     val widthOverHeight = when (resizeMode) { | ||||||
|       ResizeMode.COVER -> contentAspectRatio > containerAspectRatio |       ResizeMode.COVER -> contentAspectRatio > containerAspectRatio | ||||||
| @@ -68,10 +140,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) : SurfaceV | |||||||
|   override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { |   override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { | ||||||
|     super.onMeasure(widthMeasureSpec, heightMeasureSpec) |     super.onMeasure(widthMeasureSpec, heightMeasureSpec) | ||||||
|  |  | ||||||
|     val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) |     val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) | ||||||
|     val fittedSize = getSize(size, viewSize, resizeMode) |     val surfaceSize = size.rotatedBy(inputOrientation) | ||||||
|  |     val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode) | ||||||
|  |  | ||||||
|     Log.i(TAG, "PreviewView is $viewSize, rendering $size content. Resizing to: $fittedSize ($resizeMode)") |     Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)") | ||||||
|     setMeasuredDimension(fittedSize.width, fittedSize.height) |     setMeasuredDimension(fittedSize.width, fittedSize.height) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -23,8 +23,10 @@ class RecordingSession( | |||||||
|   private val hdr: Boolean = false, |   private val hdr: Boolean = false, | ||||||
|   private val cameraOrientation: Orientation, |   private val cameraOrientation: Orientation, | ||||||
|   private val options: RecordVideoOptions, |   private val options: RecordVideoOptions, | ||||||
|  |   private val filePath: String, | ||||||
|   private val callback: (video: Video) -> Unit, |   private val callback: (video: Video) -> Unit, | ||||||
|   private val onError: (error: CameraError) -> Unit |   private val onError: (error: CameraError) -> Unit, | ||||||
|  |   private val allCallbacks: CameraSession.Callback, | ||||||
| ) { | ) { | ||||||
|   companion object { |   companion object { | ||||||
|     private const val TAG = "RecordingSession" |     private const val TAG = "RecordingSession" | ||||||
| @@ -36,15 +38,11 @@ class RecordingSession( | |||||||
|  |  | ||||||
|   data class Video(val path: String, val durationMs: Long, val size: Size) |   data class Video(val path: String, val durationMs: Long, val size: Size) | ||||||
|  |  | ||||||
|   private val outputPath = run { |   private val outputPath: File = File(filePath) | ||||||
|     val videoDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES) |  | ||||||
|     val sdf = SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US) |  | ||||||
|     val videoFileName = "VID_${sdf.format(Date())}" |  | ||||||
|     File(videoDir!!, videoFileName) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   private val bitRate = getBitRate() |   private val bitRate = getBitRate() | ||||||
|   private val recorder = ChunkedRecordingManager.fromParams( |   private val recorder = ChunkedRecordingManager.fromParams( | ||||||
|  |     allCallbacks, | ||||||
|     size, |     size, | ||||||
|     enableAudio, |     enableAudio, | ||||||
|     fps, |     fps, | ||||||
|   | |||||||
| @@ -33,6 +33,7 @@ class VideoPipeline( | |||||||
|   val format: PixelFormat = PixelFormat.NATIVE, |   val format: PixelFormat = PixelFormat.NATIVE, | ||||||
|   private val isMirrored: Boolean = false, |   private val isMirrored: Boolean = false, | ||||||
|   private val enableFrameProcessor: Boolean = false, |   private val enableFrameProcessor: Boolean = false, | ||||||
|  |   enableGpuBuffers: Boolean = false, | ||||||
|   private val callback: CameraSession.Callback |   private val callback: CameraSession.Callback | ||||||
| ) : SurfaceTexture.OnFrameAvailableListener, | ) : SurfaceTexture.OnFrameAvailableListener, | ||||||
|   Closeable { |   Closeable { | ||||||
| @@ -79,17 +80,25 @@ class VideoPipeline( | |||||||
|       val format = getImageReaderFormat() |       val format = getImageReaderFormat() | ||||||
|       Log.i(TAG, "Using ImageReader round-trip (format: #$format)") |       Log.i(TAG, "Using ImageReader round-trip (format: #$format)") | ||||||
|  |  | ||||||
|       if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { |       // Create ImageReader | ||||||
|         Log.i(TAG, "Using API 29 for GPU ImageReader...") |       if (enableGpuBuffers && Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { | ||||||
|         val usageFlags = getRecommendedHardwareBufferFlags() |         val usageFlags = getRecommendedHardwareBufferFlags() | ||||||
|         Log.i(TAG, "Using ImageReader flags: $usageFlags") |         Log.i(TAG, "Creating ImageReader with GPU-optimized usage flags: $usageFlags") | ||||||
|         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags) |         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags) | ||||||
|  |       } else { | ||||||
|  |         Log.i(TAG, "Creating ImageReader with default usage flags...") | ||||||
|  |         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES) | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |       // Create ImageWriter | ||||||
|  |       if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { | ||||||
|  |         Log.i(TAG, "Creating ImageWriter with format #$format...") | ||||||
|         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format) |         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format) | ||||||
|       } else { |       } else { | ||||||
|         Log.i(TAG, "Using legacy API for CPU ImageReader...") |         Log.i(TAG, "Creating ImageWriter with default format...") | ||||||
|         imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES) |  | ||||||
|         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES) |         imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES) | ||||||
|       } |       } | ||||||
|  |  | ||||||
|       imageReader!!.setOnImageAvailableListener({ reader -> |       imageReader!!.setOnImageAvailableListener({ reader -> | ||||||
|         // Log.i(TAG, "ImageReader::onImageAvailable!")s |         // Log.i(TAG, "ImageReader::onImageAvailable!")s | ||||||
|         val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener |         val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener | ||||||
| @@ -107,7 +116,7 @@ class VideoPipeline( | |||||||
|           } |           } | ||||||
|         } catch (e: Throwable) { |         } catch (e: Throwable) { | ||||||
|           Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e) |           Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e) | ||||||
|           throw e |           callback.onError(e) | ||||||
|         } finally { |         } finally { | ||||||
|           frame.decrementRefCount() |           frame.decrementRefCount() | ||||||
|         } |         } | ||||||
| @@ -125,8 +134,11 @@ class VideoPipeline( | |||||||
|       isActive = false |       isActive = false | ||||||
|       imageWriter?.close() |       imageWriter?.close() | ||||||
|       imageReader?.close() |       imageReader?.close() | ||||||
|  |       removeRecordingSessionOutputSurface() | ||||||
|       recordingSession = null |       recordingSession = null | ||||||
|  |       surfaceTexture.setOnFrameAvailableListener(null, null) | ||||||
|       surfaceTexture.release() |       surfaceTexture.release() | ||||||
|  |       surface.release() | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| @@ -173,7 +185,7 @@ class VideoPipeline( | |||||||
|     synchronized(this) { |     synchronized(this) { | ||||||
|       if (recordingSession != null) { |       if (recordingSession != null) { | ||||||
|         // Configure OpenGL pipeline to stream Frames into the Recording Session's surface |         // Configure OpenGL pipeline to stream Frames into the Recording Session's surface | ||||||
|         Log.i(TAG, "Setting $width x $height RecordingSession Output...") |         Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...") | ||||||
|         setRecordingSessionOutputSurface(recordingSession.surface) |         setRecordingSessionOutputSurface(recordingSession.surface) | ||||||
|         this.recordingSession = recordingSession |         this.recordingSession = recordingSession | ||||||
|       } else { |       } else { | ||||||
| @@ -228,7 +240,11 @@ class VideoPipeline( | |||||||
|   @RequiresApi(Build.VERSION_CODES.Q) |   @RequiresApi(Build.VERSION_CODES.Q) | ||||||
|   private fun supportsHardwareBufferFlags(flags: Long): Boolean { |   private fun supportsHardwareBufferFlags(flags: Long): Boolean { | ||||||
|     val hardwareBufferFormat = format.toHardwareBufferFormat() |     val hardwareBufferFormat = format.toHardwareBufferFormat() | ||||||
|  |     try { | ||||||
|       return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags) |       return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags) | ||||||
|  |     } catch (_: Throwable) { | ||||||
|  |       return false | ||||||
|  |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   private external fun getInputTextureId(): Int |   private external fun getInputTextureId(): Int | ||||||
|   | |||||||
| @@ -0,0 +1,88 @@ | |||||||
|  | package com.mrousavy.camera.core.capture | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.FlashUnavailableError | ||||||
|  | import com.mrousavy.camera.core.InvalidVideoHdrError | ||||||
|  | import com.mrousavy.camera.core.LowLightBoostNotSupportedError | ||||||
|  | import com.mrousavy.camera.core.PropRequiresFormatToBeNonNullError | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.extensions.setZoom | ||||||
|  | import com.mrousavy.camera.types.CameraDeviceFormat | ||||||
|  | import com.mrousavy.camera.types.Torch | ||||||
|  |  | ||||||
|  | abstract class CameraCaptureRequest( | ||||||
|  |   private val torch: Torch = Torch.OFF, | ||||||
|  |   private val enableVideoHdr: Boolean = false, | ||||||
|  |   val enableLowLightBoost: Boolean = false, | ||||||
|  |   val exposureBias: Double? = null, | ||||||
|  |   val zoom: Float = 1.0f, | ||||||
|  |   val format: CameraDeviceFormat? = null | ||||||
|  | ) { | ||||||
|  |   enum class Template { | ||||||
|  |     RECORD, | ||||||
|  |     PHOTO, | ||||||
|  |     PHOTO_ZSL, | ||||||
|  |     PHOTO_SNAPSHOT, | ||||||
|  |     PREVIEW; | ||||||
|  |  | ||||||
|  |     fun toRequestTemplate(): Int = | ||||||
|  |       when (this) { | ||||||
|  |         RECORD -> CameraDevice.TEMPLATE_RECORD | ||||||
|  |         PHOTO -> CameraDevice.TEMPLATE_STILL_CAPTURE | ||||||
|  |         PHOTO_ZSL -> CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG | ||||||
|  |         PHOTO_SNAPSHOT -> CameraDevice.TEMPLATE_VIDEO_SNAPSHOT | ||||||
|  |         PREVIEW -> CameraDevice.TEMPLATE_PREVIEW | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   abstract fun createCaptureRequest( | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder | ||||||
|  |  | ||||||
|  |   protected open fun createCaptureRequest( | ||||||
|  |     template: Template, | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val builder = device.createCaptureRequest(template.toRequestTemplate()) | ||||||
|  |  | ||||||
|  |     // Add all repeating output surfaces | ||||||
|  |     outputs.forEach { output -> | ||||||
|  |       builder.addTarget(output.surface) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set HDR | ||||||
|  |     if (enableVideoHdr) { | ||||||
|  |       if (format == null) throw PropRequiresFormatToBeNonNullError("videoHdr") | ||||||
|  |       if (!format.supportsVideoHdr) throw InvalidVideoHdrError() | ||||||
|  |       builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE) | ||||||
|  |     } else if (enableLowLightBoost) { | ||||||
|  |       if (!deviceDetails.supportsLowLightBoost) throw LowLightBoostNotSupportedError() | ||||||
|  |       builder.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_NIGHT) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set Exposure Bias | ||||||
|  |     if (exposureBias != null) { | ||||||
|  |       val clamped = deviceDetails.exposureRange.clamp(exposureBias.toInt()) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, clamped) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set Zoom | ||||||
|  |     builder.setZoom(zoom, deviceDetails) | ||||||
|  |  | ||||||
|  |     // Set Torch | ||||||
|  |     if (torch == Torch.ON) { | ||||||
|  |       if (!deviceDetails.hasFlash) throw FlashUnavailableError() | ||||||
|  |       builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return builder | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,149 @@ | |||||||
|  | package com.mrousavy.camera.core.capture | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCharacteristics | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.os.Build | ||||||
|  | import android.util.Log | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
|  | import com.mrousavy.camera.types.QualityPrioritization | ||||||
|  | import com.mrousavy.camera.types.Torch | ||||||
|  |  | ||||||
|  | class PhotoCaptureRequest( | ||||||
|  |   repeatingRequest: RepeatingCaptureRequest, | ||||||
|  |   private val qualityPrioritization: QualityPrioritization, | ||||||
|  |   private val enableAutoStabilization: Boolean, | ||||||
|  |   enablePhotoHdr: Boolean, | ||||||
|  |   private val outputOrientation: Orientation | ||||||
|  | ) : CameraCaptureRequest( | ||||||
|  |   Torch.OFF, | ||||||
|  |   enablePhotoHdr, | ||||||
|  |   repeatingRequest.enableLowLightBoost, | ||||||
|  |   repeatingRequest.exposureBias, | ||||||
|  |   repeatingRequest.zoom, | ||||||
|  |   repeatingRequest.format | ||||||
|  | ) { | ||||||
|  |   companion object { | ||||||
|  |     private const val TAG = "PhotoCaptureRequest" | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val template = when (qualityPrioritization) { | ||||||
|  |       QualityPrioritization.QUALITY -> Template.PHOTO | ||||||
|  |       QualityPrioritization.BALANCED -> { | ||||||
|  |         if (deviceDetails.supportsZsl) { | ||||||
|  |           Template.PHOTO_ZSL | ||||||
|  |         } else { | ||||||
|  |           Template.PHOTO | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       QualityPrioritization.SPEED -> { | ||||||
|  |         if (deviceDetails.supportsSnapshotCapture) { | ||||||
|  |           Template.PHOTO_SNAPSHOT | ||||||
|  |         } else if (deviceDetails.supportsZsl) { | ||||||
|  |           Template.PHOTO_ZSL | ||||||
|  |         } else { | ||||||
|  |           Template.PHOTO | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     Log.i(TAG, "Using CaptureRequest Template $template...") | ||||||
|  |     return this.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     template: Template, | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val builder = super.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |  | ||||||
|  |     // Set various speed vs quality optimization flags | ||||||
|  |     when (qualityPrioritization) { | ||||||
|  |       QualityPrioritization.SPEED -> { | ||||||
|  |         if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.FULL)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_FAST) | ||||||
|  |           if (deviceDetails.availableEdgeModes.contains(CaptureRequest.EDGE_MODE_FAST)) { | ||||||
|  |             builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_FAST) | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableAberrationModes.contains(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableHotPixelModes.contains(CaptureRequest.HOT_PIXEL_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableDistortionCorrectionModes.contains(CaptureRequest.DISTORTION_CORRECTION_MODE_FAST) && | ||||||
|  |           Build.VERSION.SDK_INT >= Build.VERSION_CODES.P | ||||||
|  |         ) { | ||||||
|  |           builder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableNoiseReductionModes.contains(CaptureRequest.NOISE_REDUCTION_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableShadingModes.contains(CaptureRequest.SHADING_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_FAST)) { | ||||||
|  |           builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST) | ||||||
|  |         } | ||||||
|  |         builder.set(CaptureRequest.JPEG_QUALITY, 85) | ||||||
|  |       } | ||||||
|  |       QualityPrioritization.BALANCED -> { | ||||||
|  |         builder.set(CaptureRequest.JPEG_QUALITY, 92) | ||||||
|  |       } | ||||||
|  |       QualityPrioritization.QUALITY -> { | ||||||
|  |         if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.FULL)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_MODE, CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY) | ||||||
|  |           if (deviceDetails.availableEdgeModes.contains(CaptureRequest.EDGE_MODE_HIGH_QUALITY)) { | ||||||
|  |             builder.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_HIGH_QUALITY) | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableAberrationModes.contains(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableHotPixelModes.contains(CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.HOT_PIXEL_MODE, CaptureRequest.HOT_PIXEL_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableDistortionCorrectionModes.contains(CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY) && | ||||||
|  |           Build.VERSION.SDK_INT >= Build.VERSION_CODES.P | ||||||
|  |         ) { | ||||||
|  |           builder.set(CaptureRequest.DISTORTION_CORRECTION_MODE, CaptureRequest.DISTORTION_CORRECTION_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableNoiseReductionModes.contains(CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.NOISE_REDUCTION_MODE, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableShadingModes.contains(CaptureRequest.SHADING_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.SHADING_MODE, CaptureRequest.SHADING_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         if (deviceDetails.availableToneMapModes.contains(CaptureRequest.TONEMAP_MODE_HIGH_QUALITY)) { | ||||||
|  |           builder.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_HIGH_QUALITY) | ||||||
|  |         } | ||||||
|  |         builder.set(CaptureRequest.JPEG_QUALITY, 100) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set JPEG Orientation | ||||||
|  |     val targetOrientation = outputOrientation.toSensorRelativeOrientation(deviceDetails) | ||||||
|  |     builder.set(CaptureRequest.JPEG_ORIENTATION, targetOrientation.toDegrees()) | ||||||
|  |  | ||||||
|  |     // Set stabilization for this Frame | ||||||
|  |     if (enableAutoStabilization) { | ||||||
|  |       if (deviceDetails.opticalStabilizationModes.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON)) { | ||||||
|  |         builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) | ||||||
|  |       } else if (deviceDetails.digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) { | ||||||
|  |         builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return builder | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,113 @@ | |||||||
|  | package com.mrousavy.camera.core.capture | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCharacteristics | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.os.Build | ||||||
|  | import android.util.Range | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.InvalidFpsError | ||||||
|  | import com.mrousavy.camera.core.InvalidVideoStabilizationMode | ||||||
|  | import com.mrousavy.camera.core.PropRequiresFormatToBeNonNullError | ||||||
|  | import com.mrousavy.camera.core.outputs.SurfaceOutput | ||||||
|  | import com.mrousavy.camera.types.CameraDeviceFormat | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  | import com.mrousavy.camera.types.Torch | ||||||
|  | import com.mrousavy.camera.types.VideoStabilizationMode | ||||||
|  |  | ||||||
|  | class RepeatingCaptureRequest( | ||||||
|  |   private val enableVideoPipeline: Boolean, | ||||||
|  |   torch: Torch = Torch.OFF, | ||||||
|  |   private val fps: Int? = null, | ||||||
|  |   private val videoStabilizationMode: VideoStabilizationMode = VideoStabilizationMode.OFF, | ||||||
|  |   enableVideoHdr: Boolean = false, | ||||||
|  |   enableLowLightBoost: Boolean = false, | ||||||
|  |   exposureBias: Double? = null, | ||||||
|  |   zoom: Float = 1.0f, | ||||||
|  |   format: CameraDeviceFormat? = null | ||||||
|  | ) : CameraCaptureRequest(torch, enableVideoHdr, enableLowLightBoost, exposureBias, zoom, format) { | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val template = if (enableVideoPipeline) Template.RECORD else Template.PREVIEW | ||||||
|  |     return this.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private fun getBestDigitalStabilizationMode(deviceDetails: CameraDeviceDetails): Int { | ||||||
|  |     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { | ||||||
|  |       if (deviceDetails.digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION)) { | ||||||
|  |         return CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     return CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   override fun createCaptureRequest( | ||||||
|  |     template: Template, | ||||||
|  |     device: CameraDevice, | ||||||
|  |     deviceDetails: CameraDeviceDetails, | ||||||
|  |     outputs: List<SurfaceOutput> | ||||||
|  |   ): CaptureRequest.Builder { | ||||||
|  |     val builder = super.createCaptureRequest(template, device, deviceDetails, outputs) | ||||||
|  |  | ||||||
|  |     if (deviceDetails.modes.contains(CameraCharacteristics.CONTROL_MODE_AUTO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set AF | ||||||
|  |     if (enableVideoPipeline && deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) | ||||||
|  |     } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE) | ||||||
|  |     } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_AUTO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) | ||||||
|  |     } else if (deviceDetails.afModes.contains(CameraCharacteristics.CONTROL_AF_MODE_OFF)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF) | ||||||
|  |       builder.set(CaptureRequest.LENS_FOCUS_DISTANCE, 0f) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set AE | ||||||
|  |     if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_ON)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) | ||||||
|  |     } else if (deviceDetails.aeModes.contains(CameraCharacteristics.CONTROL_AE_MODE_OFF)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set AWB | ||||||
|  |     if (deviceDetails.awbModes.contains(CameraCharacteristics.CONTROL_AWB_MODE_AUTO)) { | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set FPS | ||||||
|  |     if (fps != null) { | ||||||
|  |       if (format == null) throw PropRequiresFormatToBeNonNullError("fps") | ||||||
|  |       if (format.maxFps < fps) throw InvalidFpsError(fps) | ||||||
|  |       builder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, Range(fps, fps)) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Set Video Stabilization | ||||||
|  |     if (videoStabilizationMode != VideoStabilizationMode.OFF) { | ||||||
|  |       if (format == null) throw PropRequiresFormatToBeNonNullError("videoStabilizationMode") | ||||||
|  |       if (!format.videoStabilizationModes.contains(videoStabilizationMode)) { | ||||||
|  |         throw InvalidVideoStabilizationMode(videoStabilizationMode) | ||||||
|  |       } | ||||||
|  |       when (videoStabilizationMode) { | ||||||
|  |         VideoStabilizationMode.STANDARD -> { | ||||||
|  |           builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, getBestDigitalStabilizationMode(deviceDetails)) | ||||||
|  |         } | ||||||
|  |         VideoStabilizationMode.CINEMATIC, VideoStabilizationMode.CINEMATIC_EXTENDED -> { | ||||||
|  |           if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED)) { | ||||||
|  |             builder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) | ||||||
|  |           } else { | ||||||
|  |             builder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, getBestDigitalStabilizationMode(deviceDetails)) | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         else -> throw InvalidVideoStabilizationMode(videoStabilizationMode) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     return builder | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -10,13 +10,7 @@ import android.view.Surface | |||||||
| import androidx.annotation.RequiresApi | import androidx.annotation.RequiresApi | ||||||
| import java.io.Closeable | import java.io.Closeable | ||||||
|  |  | ||||||
| open class SurfaceOutput( | open class SurfaceOutput(val surface: Surface, val size: Size, val outputType: OutputType, val enableHdr: Boolean = false) : Closeable { | ||||||
|   val surface: Surface, |  | ||||||
|   val size: Size, |  | ||||||
|   val outputType: OutputType, |  | ||||||
|   val enableHdr: Boolean = false, |  | ||||||
|   private val closeSurfaceOnEnd: Boolean = false |  | ||||||
| ) : Closeable { |  | ||||||
|   companion object { |   companion object { | ||||||
|     const val TAG = "SurfaceOutput" |     const val TAG = "SurfaceOutput" | ||||||
|  |  | ||||||
| @@ -52,12 +46,18 @@ open class SurfaceOutput( | |||||||
|     return result |     return result | ||||||
|   } |   } | ||||||
|  |  | ||||||
|  |   val isRepeating: Boolean | ||||||
|  |     get() { | ||||||
|  |       return when (outputType) { | ||||||
|  |         OutputType.VIDEO, OutputType.PREVIEW, OutputType.VIDEO_AND_PREVIEW -> true | ||||||
|  |         OutputType.PHOTO -> false | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|   override fun toString(): String = "$outputType (${size.width} x ${size.height})" |   override fun toString(): String = "$outputType (${size.width} x ${size.height})" | ||||||
|  |  | ||||||
|   override fun close() { |   override fun close() { | ||||||
|     if (closeSurfaceOnEnd) { |     // close() does nothing by default | ||||||
|       surface.release() |  | ||||||
|     } |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   enum class OutputType { |   enum class OutputType { | ||||||
|   | |||||||
| @@ -5,38 +5,60 @@ import android.hardware.camera2.CaptureFailure | |||||||
| import android.hardware.camera2.CaptureRequest | import android.hardware.camera2.CaptureRequest | ||||||
| import android.hardware.camera2.TotalCaptureResult | import android.hardware.camera2.TotalCaptureResult | ||||||
| import android.media.MediaActionSound | import android.media.MediaActionSound | ||||||
| import com.mrousavy.camera.core.CameraQueues | import android.util.Log | ||||||
| import com.mrousavy.camera.core.CaptureAbortedError | import com.mrousavy.camera.core.CaptureAbortedError | ||||||
|  | import com.mrousavy.camera.core.CaptureTimedOutError | ||||||
| import com.mrousavy.camera.core.UnknownCaptureError | import com.mrousavy.camera.core.UnknownCaptureError | ||||||
| import kotlin.coroutines.resume | import kotlin.coroutines.resume | ||||||
| import kotlin.coroutines.resumeWithException | import kotlin.coroutines.resumeWithException | ||||||
| import kotlin.coroutines.suspendCoroutine | import kotlinx.coroutines.CoroutineScope | ||||||
|  | import kotlinx.coroutines.Dispatchers | ||||||
|  | import kotlinx.coroutines.delay | ||||||
|  | import kotlinx.coroutines.launch | ||||||
|  | import kotlinx.coroutines.suspendCancellableCoroutine | ||||||
|  |  | ||||||
|  | private const val TAG = "CameraCaptureSession" | ||||||
|  |  | ||||||
| suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult = | suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult = | ||||||
|   suspendCoroutine { continuation -> |   suspendCancellableCoroutine { continuation -> | ||||||
|     val shutterSound = if (enableShutterSound) MediaActionSound() else null |     val shutterSound = if (enableShutterSound) MediaActionSound() else null | ||||||
|     shutterSound?.load(MediaActionSound.SHUTTER_CLICK) |     shutterSound?.load(MediaActionSound.SHUTTER_CLICK) | ||||||
|  |  | ||||||
|  |     CoroutineScope(Dispatchers.Default).launch { | ||||||
|  |       delay(5000) // after 5s, cancel capture | ||||||
|  |       if (continuation.isActive) { | ||||||
|  |         Log.e(TAG, "Capture timed out after 5 seconds!") | ||||||
|  |         continuation.resumeWithException(CaptureTimedOutError()) | ||||||
|  |         tryAbortCaptures() | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|     this.capture( |     this.capture( | ||||||
|       captureRequest, |       captureRequest, | ||||||
|       object : CameraCaptureSession.CaptureCallback() { |       object : CameraCaptureSession.CaptureCallback() { | ||||||
|         override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { |         override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { | ||||||
|           super.onCaptureCompleted(session, request, result) |           super.onCaptureCompleted(session, request, result) | ||||||
|  |  | ||||||
|  |           if (request == captureRequest) { | ||||||
|             continuation.resume(result) |             continuation.resume(result) | ||||||
|             shutterSound?.release() |             shutterSound?.release() | ||||||
|           } |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|         override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) { |         override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) { | ||||||
|           super.onCaptureStarted(session, request, timestamp, frameNumber) |           super.onCaptureStarted(session, request, timestamp, frameNumber) | ||||||
|  |  | ||||||
|  |           if (request == captureRequest) { | ||||||
|             if (enableShutterSound) { |             if (enableShutterSound) { | ||||||
|               shutterSound?.play(MediaActionSound.SHUTTER_CLICK) |               shutterSound?.play(MediaActionSound.SHUTTER_CLICK) | ||||||
|             } |             } | ||||||
|           } |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|         override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { |         override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { | ||||||
|           super.onCaptureFailed(session, request, failure) |           super.onCaptureFailed(session, request, failure) | ||||||
|  |  | ||||||
|  |           if (request == captureRequest) { | ||||||
|             val wasImageCaptured = failure.wasImageCaptured() |             val wasImageCaptured = failure.wasImageCaptured() | ||||||
|             val error = when (failure.reason) { |             val error = when (failure.reason) { | ||||||
|               CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured) |               CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured) | ||||||
| @@ -45,7 +67,8 @@ suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableS | |||||||
|             } |             } | ||||||
|             continuation.resumeWithException(error) |             continuation.resumeWithException(error) | ||||||
|           } |           } | ||||||
|  |         } | ||||||
|       }, |       }, | ||||||
|       CameraQueues.cameraQueue.handler |       null | ||||||
|     ) |     ) | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -0,0 +1,151 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.graphics.Point | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.hardware.camera2.CaptureResult | ||||||
|  | import android.hardware.camera2.params.MeteringRectangle | ||||||
|  | import android.util.Log | ||||||
|  | import android.util.Size | ||||||
|  | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.core.FocusCanceledError | ||||||
|  | import com.mrousavy.camera.types.Flash | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  | import kotlin.coroutines.coroutineContext | ||||||
|  | import kotlinx.coroutines.isActive | ||||||
|  |  | ||||||
|  | data class PrecaptureOptions( | ||||||
|  |   val modes: List<PrecaptureTrigger>, | ||||||
|  |   val flash: Flash = Flash.OFF, | ||||||
|  |   val pointsOfInterest: List<Point>, | ||||||
|  |   val skipIfPassivelyFocused: Boolean, | ||||||
|  |   val timeoutMs: Long | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | data class PrecaptureResult(val needsFlash: Boolean) | ||||||
|  |  | ||||||
|  | private const val TAG = "Precapture" | ||||||
|  | private val DEFAULT_METERING_SIZE = Size(100, 100) | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Run a precapture sequence to trigger an AF, AE or AWB scan and lock to the optimal values. | ||||||
|  |  * After this function completes, you can capture high quality photos as AF/AE/AWB are in focused state. | ||||||
|  |  * | ||||||
|  |  * To reset to auto-focus again, create a new `RepeatingRequest` with a fresh set of CONTROL_MODEs set. | ||||||
|  |  */ | ||||||
|  | suspend fun CameraCaptureSession.precapture( | ||||||
|  |   request: CaptureRequest.Builder, | ||||||
|  |   deviceDetails: CameraDeviceDetails, | ||||||
|  |   options: PrecaptureOptions | ||||||
|  | ): PrecaptureResult { | ||||||
|  |   Log.i(TAG, "Running precapture sequence... ($options)") | ||||||
|  |   request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO) | ||||||
|  |  | ||||||
|  |   var enableFlash = options.flash == Flash.ON | ||||||
|  |   var afState = FocusState.Inactive | ||||||
|  |   var aeState = ExposureState.Inactive | ||||||
|  |   var awbState = WhiteBalanceState.Inactive | ||||||
|  |   val precaptureModes = options.modes.toMutableList() | ||||||
|  |  | ||||||
|  |   // 1. Cancel any ongoing precapture sequences | ||||||
|  |   request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL) | ||||||
|  |   request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) | ||||||
|  |   if (options.flash == Flash.AUTO || options.skipIfPassivelyFocused) { | ||||||
|  |     // We want to read the current AE/AF/AWB values to determine if we need flash or can skip AF/AE/AWB precapture | ||||||
|  |     val result = this.capture(request.build(), false) | ||||||
|  |  | ||||||
|  |     afState = FocusState.fromAFState(result.get(CaptureResult.CONTROL_AF_STATE) ?: CaptureResult.CONTROL_AF_STATE_INACTIVE) | ||||||
|  |     aeState = ExposureState.fromAEState(result.get(CaptureResult.CONTROL_AE_STATE) ?: CaptureResult.CONTROL_AE_STATE_INACTIVE) | ||||||
|  |     awbState = WhiteBalanceState.fromAWBState(result.get(CaptureResult.CONTROL_AWB_STATE) ?: CaptureResult.CONTROL_AWB_STATE_INACTIVE) | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Precapture current states: AF: $afState, AE: $aeState, AWB: $awbState") | ||||||
|  |     enableFlash = aeState == ExposureState.FlashRequired && options.flash == Flash.AUTO | ||||||
|  |   } else { | ||||||
|  |     // we either want Flash ON or OFF, so we don't care about lighting conditions - do a fast capture. | ||||||
|  |     this.capture(request.build(), null, null) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   if (!coroutineContext.isActive) throw FocusCanceledError() | ||||||
|  |  | ||||||
|  |   val meteringWeight = MeteringRectangle.METERING_WEIGHT_MAX - 1 | ||||||
|  |   val meteringRectangles = options.pointsOfInterest.map { point -> | ||||||
|  |     MeteringRectangle(point, DEFAULT_METERING_SIZE, meteringWeight) | ||||||
|  |   }.toTypedArray() | ||||||
|  |  | ||||||
|  |   if (options.skipIfPassivelyFocused) { | ||||||
|  |     // If user allows us to skip precapture for values that are already focused, remove them from the precapture modes. | ||||||
|  |     if (afState.isPassivelyFocused) { | ||||||
|  |       Log.i(TAG, "AF is already focused, skipping...") | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AF) | ||||||
|  |     } | ||||||
|  |     if (aeState.isPassivelyFocused) { | ||||||
|  |       Log.i(TAG, "AE is already focused, skipping...") | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AE) | ||||||
|  |     } | ||||||
|  |     if (awbState.isPassivelyFocused) { | ||||||
|  |       Log.i(TAG, "AWB is already focused, skipping...") | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AWB) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   // 2. Submit a precapture start sequence | ||||||
|  |   if (enableFlash && deviceDetails.hasFlash) { | ||||||
|  |     request.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) | ||||||
|  |   } | ||||||
|  |   if (precaptureModes.contains(PrecaptureTrigger.AF)) { | ||||||
|  |     // AF Precapture | ||||||
|  |     if (deviceDetails.afModes.contains(CaptureRequest.CONTROL_AF_MODE_AUTO)) { | ||||||
|  |       request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO) | ||||||
|  |       request.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START) | ||||||
|  |       if (meteringRectangles.isNotEmpty() && deviceDetails.supportsFocusRegions) { | ||||||
|  |         request.set(CaptureRequest.CONTROL_AF_REGIONS, meteringRectangles) | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // AF is not supported on this device. | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AF) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |   if (precaptureModes.contains(PrecaptureTrigger.AE)) { | ||||||
|  |     // AE Precapture | ||||||
|  |     if (deviceDetails.aeModes.contains(CaptureRequest.CONTROL_AE_MODE_ON) && deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED)) { | ||||||
|  |       request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) | ||||||
|  |       request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START) | ||||||
|  |       if (meteringRectangles.isNotEmpty() && | ||||||
|  |         deviceDetails.supportsExposureRegions && | ||||||
|  |         deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED) | ||||||
|  |       ) { | ||||||
|  |         request.set(CaptureRequest.CONTROL_AE_REGIONS, meteringRectangles) | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // AE is not supported on this device. | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AE) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |   if (precaptureModes.contains(PrecaptureTrigger.AWB)) { | ||||||
|  |     // AWB Precapture | ||||||
|  |     if (deviceDetails.awbModes.contains(CaptureRequest.CONTROL_AWB_MODE_AUTO)) { | ||||||
|  |       request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO) | ||||||
|  |       if (meteringRectangles.isNotEmpty() && deviceDetails.supportsWhiteBalanceRegions) { | ||||||
|  |         request.set(CaptureRequest.CONTROL_AWB_REGIONS, meteringRectangles) | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // AWB is not supported on this device. | ||||||
|  |       precaptureModes.remove(PrecaptureTrigger.AWB) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |   this.capture(request.build(), null, null) | ||||||
|  |  | ||||||
|  |   if (!coroutineContext.isActive) throw FocusCanceledError() | ||||||
|  |  | ||||||
|  |   // 3. Start a repeating request without the trigger and wait until AF/AE/AWB locks | ||||||
|  |   request.set(CaptureRequest.CONTROL_AF_TRIGGER, null) | ||||||
|  |   request.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, null) | ||||||
|  |   val result = this.setRepeatingRequestAndWaitForPrecapture(request.build(), options.timeoutMs, *precaptureModes.toTypedArray()) | ||||||
|  |  | ||||||
|  |   if (!coroutineContext.isActive) throw FocusCanceledError() | ||||||
|  |  | ||||||
|  |   Log.i(TAG, "AF/AE/AWB successfully locked!") | ||||||
|  |  | ||||||
|  |   val needsFlash = result.exposureState == ExposureState.FlashRequired | ||||||
|  |   return PrecaptureResult(needsFlash) | ||||||
|  | } | ||||||
| @@ -0,0 +1,193 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  | import android.hardware.camera2.CaptureFailure | ||||||
|  | import android.hardware.camera2.CaptureRequest | ||||||
|  | import android.hardware.camera2.CaptureResult | ||||||
|  | import android.hardware.camera2.TotalCaptureResult | ||||||
|  | import android.util.Log | ||||||
|  | import com.mrousavy.camera.core.CaptureAbortedError | ||||||
|  | import com.mrousavy.camera.core.CaptureTimedOutError | ||||||
|  | import kotlin.coroutines.resume | ||||||
|  | import kotlin.coroutines.resumeWithException | ||||||
|  | import kotlinx.coroutines.CoroutineScope | ||||||
|  | import kotlinx.coroutines.Dispatchers | ||||||
|  | import kotlinx.coroutines.delay | ||||||
|  | import kotlinx.coroutines.launch | ||||||
|  | import kotlinx.coroutines.suspendCancellableCoroutine | ||||||
|  |  | ||||||
|  | private const val TAG = "CameraCaptureSession" | ||||||
|  |  | ||||||
|  | enum class PrecaptureTrigger { | ||||||
|  |   AE, | ||||||
|  |   AF, | ||||||
|  |   AWB | ||||||
|  | } | ||||||
|  |  | ||||||
|  | interface AutoState { | ||||||
|  |   val isCompleted: Boolean | ||||||
|  |   val isPassivelyFocused: Boolean | ||||||
|  | } | ||||||
|  |  | ||||||
|  | enum class FocusState : AutoState { | ||||||
|  |   Unknown, | ||||||
|  |   Inactive, | ||||||
|  |   Scanning, | ||||||
|  |   Focused, | ||||||
|  |   Unfocused, | ||||||
|  |   PassiveScanning, | ||||||
|  |   PassiveFocused, | ||||||
|  |   PassiveUnfocused; | ||||||
|  |  | ||||||
|  |   override val isCompleted: Boolean | ||||||
|  |     get() = this == Focused || this == Unfocused | ||||||
|  |   override val isPassivelyFocused: Boolean | ||||||
|  |     get() = this == PassiveFocused | ||||||
|  |  | ||||||
|  |   companion object { | ||||||
|  |     fun fromAFState(afState: Int): FocusState = | ||||||
|  |       when (afState) { | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_INACTIVE -> Inactive | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN -> Scanning | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED -> Focused | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED -> Unfocused | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN -> PassiveScanning | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED -> PassiveFocused | ||||||
|  |         CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED -> PassiveUnfocused | ||||||
|  |         else -> Unknown | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  | enum class ExposureState : AutoState { | ||||||
|  |   Unknown, | ||||||
|  |   Locked, | ||||||
|  |   Inactive, | ||||||
|  |   Precapture, | ||||||
|  |   Searching, | ||||||
|  |   Converged, | ||||||
|  |   FlashRequired; | ||||||
|  |  | ||||||
|  |   override val isCompleted: Boolean | ||||||
|  |     get() = this == Converged || this == FlashRequired | ||||||
|  |   override val isPassivelyFocused: Boolean | ||||||
|  |     get() = this == Converged | ||||||
|  |  | ||||||
|  |   companion object { | ||||||
|  |     fun fromAEState(aeState: Int): ExposureState = | ||||||
|  |       when (aeState) { | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_INACTIVE -> Inactive | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_SEARCHING -> Searching | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_PRECAPTURE -> Precapture | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_CONVERGED -> Converged | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED -> FlashRequired | ||||||
|  |         CaptureResult.CONTROL_AE_STATE_LOCKED -> Locked | ||||||
|  |         else -> Unknown | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | enum class WhiteBalanceState : AutoState { | ||||||
|  |   Unknown, | ||||||
|  |   Inactive, | ||||||
|  |   Locked, | ||||||
|  |   Searching, | ||||||
|  |   Converged; | ||||||
|  |  | ||||||
|  |   override val isCompleted: Boolean | ||||||
|  |     get() = this == Converged | ||||||
|  |   override val isPassivelyFocused: Boolean | ||||||
|  |     get() = this == Converged | ||||||
|  |  | ||||||
|  |   companion object { | ||||||
|  |     fun fromAWBState(awbState: Int): WhiteBalanceState = | ||||||
|  |       when (awbState) { | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_INACTIVE -> Inactive | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_SEARCHING -> Searching | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_CONVERGED -> Converged | ||||||
|  |         CaptureResult.CONTROL_AWB_STATE_LOCKED -> Locked | ||||||
|  |         else -> Unknown | ||||||
|  |       } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | data class ResultState(val focusState: FocusState, val exposureState: ExposureState, val whiteBalanceState: WhiteBalanceState) | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * Set a new repeating request for the [CameraCaptureSession] that contains a precapture trigger, and wait until the given precaptures have locked. | ||||||
|  |  */ | ||||||
|  | suspend fun CameraCaptureSession.setRepeatingRequestAndWaitForPrecapture( | ||||||
|  |   request: CaptureRequest, | ||||||
|  |   timeoutMs: Long, | ||||||
|  |   vararg precaptureTriggers: PrecaptureTrigger | ||||||
|  | ): ResultState = | ||||||
|  |   suspendCancellableCoroutine { continuation -> | ||||||
|  |     // Map<PrecaptureTrigger, Boolean> of all completed precaptures | ||||||
|  |     val completed = precaptureTriggers.associateWith { false }.toMutableMap() | ||||||
|  |  | ||||||
|  |     CoroutineScope(Dispatchers.Default).launch { | ||||||
|  |       delay(timeoutMs) // after timeout, cancel capture | ||||||
|  |       if (continuation.isActive) { | ||||||
|  |         Log.e(TAG, "Precapture timed out after ${timeoutMs / 1000} seconds!") | ||||||
|  |         continuation.resumeWithException(CaptureTimedOutError()) | ||||||
|  |         try { | ||||||
|  |           setRepeatingRequest(request, null, null) | ||||||
|  |         } catch (e: Throwable) { | ||||||
|  |           // session might have already been closed | ||||||
|  |           Log.e(TAG, "Error resetting session repeating request..", e) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     this.setRepeatingRequest( | ||||||
|  |       request, | ||||||
|  |       object : CameraCaptureSession.CaptureCallback() { | ||||||
|  |         override fun onCaptureCompleted(session: CameraCaptureSession, request: CaptureRequest, result: TotalCaptureResult) { | ||||||
|  |           super.onCaptureCompleted(session, request, result) | ||||||
|  |  | ||||||
|  |           if (continuation.isActive) { | ||||||
|  |             val afState = FocusState.fromAFState(result.get(CaptureResult.CONTROL_AF_STATE) ?: CaptureResult.CONTROL_AF_STATE_INACTIVE) | ||||||
|  |             val aeState = ExposureState.fromAEState( | ||||||
|  |               result.get(CaptureResult.CONTROL_AE_STATE) ?: CaptureResult.CONTROL_AE_STATE_INACTIVE | ||||||
|  |             ) | ||||||
|  |             val awbState = WhiteBalanceState.fromAWBState( | ||||||
|  |               result.get(CaptureResult.CONTROL_AWB_STATE) ?: CaptureResult.CONTROL_AWB_STATE_INACTIVE | ||||||
|  |             ) | ||||||
|  |             Log.i(TAG, "Precapture state: AF: $afState, AE: $aeState, AWB: $awbState") | ||||||
|  |  | ||||||
|  |             // AF Precapture | ||||||
|  |             if (precaptureTriggers.contains(PrecaptureTrigger.AF)) { | ||||||
|  |               completed[PrecaptureTrigger.AF] = afState.isCompleted | ||||||
|  |             } | ||||||
|  |             // AE Precapture | ||||||
|  |             if (precaptureTriggers.contains(PrecaptureTrigger.AE)) { | ||||||
|  |               completed[PrecaptureTrigger.AE] = aeState.isCompleted | ||||||
|  |             } | ||||||
|  |             // AWB Precapture | ||||||
|  |             if (precaptureTriggers.contains(PrecaptureTrigger.AWB)) { | ||||||
|  |               completed[PrecaptureTrigger.AWB] = awbState.isCompleted | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             if (completed.values.all { it == true }) { | ||||||
|  |               // All precaptures did complete! | ||||||
|  |               continuation.resume(ResultState(afState, aeState, awbState)) | ||||||
|  |               session.setRepeatingRequest(request, null, null) | ||||||
|  |             } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |         override fun onCaptureFailed(session: CameraCaptureSession, request: CaptureRequest, failure: CaptureFailure) { | ||||||
|  |           super.onCaptureFailed(session, request, failure) | ||||||
|  |  | ||||||
|  |           if (continuation.isActive) { | ||||||
|  |             // Capture failed or session closed. | ||||||
|  |             continuation.resumeWithException(CaptureAbortedError(failure.wasImageCaptured())) | ||||||
|  |             try { | ||||||
|  |               session.setRepeatingRequest(request, null, null) | ||||||
|  |             } catch (e: Throwable) { | ||||||
|  |               Log.e(TAG, "Failed to continue repeating request!", e) | ||||||
|  |             } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |       }, | ||||||
|  |       null | ||||||
|  |     ) | ||||||
|  |   } | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  |  | ||||||
|  | fun CameraCaptureSession.tryAbortCaptures() { | ||||||
|  |   try { | ||||||
|  |     abortCaptures() | ||||||
|  |   } catch (_: Throwable) {} | ||||||
|  | } | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraCaptureSession | ||||||
|  |  | ||||||
|  | fun CameraCaptureSession.tryStopRepeating() { | ||||||
|  |   try { | ||||||
|  |     stopRepeating() | ||||||
|  |   } catch (_: Throwable) {} | ||||||
|  | } | ||||||
| @@ -1,39 +1,13 @@ | |||||||
| package com.mrousavy.camera.extensions | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics | import android.hardware.camera2.CameraCharacteristics | ||||||
| import android.media.CamcorderProfile |  | ||||||
| import android.os.Build |  | ||||||
| import android.util.Size | import android.util.Size | ||||||
|  | import com.mrousavy.camera.utils.CamcorderProfileUtils | ||||||
| private fun getMaximumVideoSize(cameraId: String): Size? { |  | ||||||
|   try { |  | ||||||
|     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { |  | ||||||
|       val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH) |  | ||||||
|       if (profiles != null) { |  | ||||||
|         val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height } |  | ||||||
|         if (largestProfile != null) { |  | ||||||
|           return Size(largestProfile.width, largestProfile.height) |  | ||||||
|         } |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     val cameraIdInt = cameraId.toIntOrNull() |  | ||||||
|     if (cameraIdInt != null) { |  | ||||||
|       val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH) |  | ||||||
|       return Size(profile.videoFrameWidth, profile.videoFrameHeight) |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     return null |  | ||||||
|   } catch (e: Throwable) { |  | ||||||
|     // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. |  | ||||||
|     return null |  | ||||||
|   } |  | ||||||
| } |  | ||||||
|  |  | ||||||
| fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List<Size> { | fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List<Size> { | ||||||
|   val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! |   val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! | ||||||
|   val sizes = config.getOutputSizes(format) ?: emptyArray() |   val sizes = config.getOutputSizes(format) ?: emptyArray() | ||||||
|   val maxVideoSize = getMaximumVideoSize(cameraId) |   val maxVideoSize = CamcorderProfileUtils.getMaximumVideoSize(cameraId) | ||||||
|   if (maxVideoSize != null) { |   if (maxVideoSize != null) { | ||||||
|     return sizes.filter { it.bigger <= maxVideoSize.bigger } |     return sizes.filter { it.bigger <= maxVideoSize.bigger } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -1,29 +0,0 @@ | |||||||
| package com.mrousavy.camera.extensions |  | ||||||
|  |  | ||||||
| import android.content.res.Resources |  | ||||||
| import android.hardware.camera2.CameraCharacteristics |  | ||||||
| import android.util.Size |  | ||||||
| import android.view.SurfaceHolder |  | ||||||
|  |  | ||||||
| fun getMaximumPreviewSize(): Size { |  | ||||||
|   // See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap |  | ||||||
|   // According to the Android Developer documentation, PREVIEW streams can have a resolution |  | ||||||
|   // of up to the phone's display's resolution, with a maximum of 1920x1080. |  | ||||||
|   val display1080p = Size(1080, 1920) |  | ||||||
|   val displaySize = Size( |  | ||||||
|     Resources.getSystem().displayMetrics.widthPixels, |  | ||||||
|     Resources.getSystem().displayMetrics.heightPixels |  | ||||||
|   ) |  | ||||||
|   val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller |  | ||||||
|  |  | ||||||
|   return if (isHighResScreen) display1080p else displaySize |  | ||||||
| } |  | ||||||
|  |  | ||||||
| fun CameraCharacteristics.getPreviewTargetSize(targetSize: Size?): Size { |  | ||||||
|   val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! |  | ||||||
|   val maximumPreviewSize = getMaximumPreviewSize() |  | ||||||
|   val outputSizes = config.getOutputSizes(SurfaceHolder::class.java) |  | ||||||
|     .filter { it.bigger <= maximumPreviewSize.bigger && it.smaller <= maximumPreviewSize.smaller } |  | ||||||
|  |  | ||||||
|   return outputSizes.closestToOrMax(targetSize) |  | ||||||
| } |  | ||||||
| @@ -1,104 +0,0 @@ | |||||||
| package com.mrousavy.camera.extensions |  | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics |  | ||||||
| import android.hardware.camera2.CameraDevice |  | ||||||
| import android.hardware.camera2.CameraManager |  | ||||||
| import android.hardware.camera2.CaptureRequest |  | ||||||
| import android.view.Surface |  | ||||||
| import com.mrousavy.camera.types.Flash |  | ||||||
| import com.mrousavy.camera.types.Orientation |  | ||||||
| import com.mrousavy.camera.types.QualityPrioritization |  | ||||||
|  |  | ||||||
| private fun supportsSnapshotCapture(cameraCharacteristics: CameraCharacteristics): Boolean { |  | ||||||
|   // As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation: |  | ||||||
|   val hardwareLevel = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!! |  | ||||||
|   if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) return false |  | ||||||
|  |  | ||||||
|   val capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!! |  | ||||||
|   val hasDepth = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) |  | ||||||
|   val isBackwardsCompatible = !capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) |  | ||||||
|   if (hasDepth && !isBackwardsCompatible) return false |  | ||||||
|  |  | ||||||
|   return true |  | ||||||
| } |  | ||||||
|  |  | ||||||
| fun CameraDevice.createPhotoCaptureRequest( |  | ||||||
|   cameraManager: CameraManager, |  | ||||||
|   surface: Surface, |  | ||||||
|   zoom: Float, |  | ||||||
|   qualityPrioritization: QualityPrioritization, |  | ||||||
|   flashMode: Flash, |  | ||||||
|   enableRedEyeReduction: Boolean, |  | ||||||
|   enableAutoStabilization: Boolean, |  | ||||||
|   enableHdr: Boolean, |  | ||||||
|   orientation: Orientation |  | ||||||
| ): CaptureRequest { |  | ||||||
|   val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id) |  | ||||||
|  |  | ||||||
|   val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) { |  | ||||||
|     CameraDevice.TEMPLATE_VIDEO_SNAPSHOT |  | ||||||
|   } else { |  | ||||||
|     CameraDevice.TEMPLATE_STILL_CAPTURE |  | ||||||
|   } |  | ||||||
|   val captureRequest = this.createCaptureRequest(template) |  | ||||||
|   captureRequest.addTarget(surface) |  | ||||||
|  |  | ||||||
|   // TODO: Maybe we can even expose that prop directly? |  | ||||||
|   val jpegQuality = when (qualityPrioritization) { |  | ||||||
|     QualityPrioritization.SPEED -> 85 |  | ||||||
|     QualityPrioritization.BALANCED -> 92 |  | ||||||
|     QualityPrioritization.QUALITY -> 100 |  | ||||||
|   } |  | ||||||
|   captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte()) |  | ||||||
|  |  | ||||||
|   captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees()) |  | ||||||
|  |  | ||||||
|   // TODO: Use the same options as from the preview request. This is duplicate code! |  | ||||||
|  |  | ||||||
|   when (flashMode) { |  | ||||||
|     // Set the Flash Mode |  | ||||||
|     Flash.OFF -> { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) |  | ||||||
|       captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF) |  | ||||||
|     } |  | ||||||
|     Flash.ON -> { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) |  | ||||||
|       captureRequest.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH) |  | ||||||
|     } |  | ||||||
|     Flash.AUTO -> { |  | ||||||
|       if (enableRedEyeReduction) { |  | ||||||
|         captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) |  | ||||||
|       } else { |  | ||||||
|         captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH) |  | ||||||
|       } |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   if (enableAutoStabilization) { |  | ||||||
|     // Enable optical or digital image stabilization |  | ||||||
|     val digitalStabilization = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) |  | ||||||
|     val hasDigitalStabilization = digitalStabilization?.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON) ?: false |  | ||||||
|  |  | ||||||
|     val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) |  | ||||||
|     val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false |  | ||||||
|     if (hasOpticalStabilization) { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF) |  | ||||||
|       captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) |  | ||||||
|     } else if (hasDigitalStabilization) { |  | ||||||
|       captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) |  | ||||||
|     } else { |  | ||||||
|       // no stabilization is supported. ignore it |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   // TODO: Check if that zoom value is even supported. |  | ||||||
|   captureRequest.setZoom(zoom, cameraCharacteristics) |  | ||||||
|  |  | ||||||
|   // Set HDR |  | ||||||
|   // TODO: Check if that value is even supported |  | ||||||
|   if (enableHdr) { |  | ||||||
|     captureRequest.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   return captureRequest.build() |  | ||||||
| } |  | ||||||
| @@ -0,0 +1,13 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.hardware.camera2.CameraDevice | ||||||
|  |  | ||||||
|  | val CameraDevice.isValid: Boolean | ||||||
|  |   get() { | ||||||
|  |     try { | ||||||
|  |       this.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW) | ||||||
|  |       return true | ||||||
|  |     } catch (e: Throwable) { | ||||||
|  |       return false | ||||||
|  |     } | ||||||
|  |   } | ||||||
| @@ -1,20 +1,18 @@ | |||||||
| package com.mrousavy.camera.extensions | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics |  | ||||||
| import android.hardware.camera2.CaptureRequest | import android.hardware.camera2.CaptureRequest | ||||||
| import android.os.Build | import android.os.Build | ||||||
| import android.util.Range | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  | import com.mrousavy.camera.types.HardwareLevel | ||||||
|  |  | ||||||
| fun CaptureRequest.Builder.setZoom(zoom: Float, cameraCharacteristics: CameraCharacteristics) { | fun CaptureRequest.Builder.setZoom(zoom: Float, deviceDetails: CameraDeviceDetails) { | ||||||
|   if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { |   val zoomRange = deviceDetails.zoomRange | ||||||
|     val zoomRange = cameraCharacteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE) ?: Range(1f, 1f) |  | ||||||
|   val zoomClamped = zoomRange.clamp(zoom) |   val zoomClamped = zoomRange.clamp(zoom) | ||||||
|  |  | ||||||
|  |   if (deviceDetails.hardwareLevel.isAtLeast(HardwareLevel.LIMITED) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { | ||||||
|     this.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomClamped) |     this.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoomClamped) | ||||||
|   } else { |   } else { | ||||||
|     val maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) |     val size = deviceDetails.activeSize | ||||||
|     val zoomRange = Range(1f, maxZoom ?: 1f) |  | ||||||
|     val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! |  | ||||||
|     val zoomClamped = zoomRange.clamp(zoom) |  | ||||||
|     this.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoomClamped)) |     this.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoomClamped)) | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -0,0 +1,25 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.graphics.Point | ||||||
|  | import android.graphics.PointF | ||||||
|  | import android.util.Log | ||||||
|  | import android.util.Size | ||||||
|  | import com.mrousavy.camera.types.Orientation | ||||||
|  |  | ||||||
|  | fun Point.rotatedBy(fromSize: Size, toSize: Size, fromOrientation: Orientation, toOrientation: Orientation): Point { | ||||||
|  |   val differenceDegrees = (fromOrientation.toDegrees() + toOrientation.toDegrees()) % 360 | ||||||
|  |   val difference = Orientation.fromRotationDegrees(differenceDegrees) | ||||||
|  |   val normalizedPoint = PointF(this.x / fromSize.width.toFloat(), this.y / fromSize.height.toFloat()) | ||||||
|  |  | ||||||
|  |   val rotatedNormalizedPoint = when (difference) { | ||||||
|  |     Orientation.PORTRAIT -> normalizedPoint | ||||||
|  |     Orientation.PORTRAIT_UPSIDE_DOWN -> PointF(1 - normalizedPoint.x, 1 - normalizedPoint.y) | ||||||
|  |     Orientation.LANDSCAPE_LEFT -> PointF(normalizedPoint.y, 1 - normalizedPoint.x) | ||||||
|  |     Orientation.LANDSCAPE_RIGHT -> PointF(1 - normalizedPoint.y, normalizedPoint.x) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   val rotatedX = rotatedNormalizedPoint.x * toSize.width | ||||||
|  |   val rotatedY = rotatedNormalizedPoint.y * toSize.height | ||||||
|  |   Log.i("ROTATE", "$this -> $normalizedPoint -> $difference -> $rotatedX, $rotatedY") | ||||||
|  |   return Point(rotatedX.toInt(), rotatedY.toInt()) | ||||||
|  | } | ||||||
| @@ -4,9 +4,9 @@ import android.media.CamcorderProfile | |||||||
| import android.media.MediaRecorder.VideoEncoder | import android.media.MediaRecorder.VideoEncoder | ||||||
| import android.os.Build | import android.os.Build | ||||||
| import android.util.Log | import android.util.Log | ||||||
| import android.util.Size |  | ||||||
| import com.mrousavy.camera.core.RecordingSession | import com.mrousavy.camera.core.RecordingSession | ||||||
| import com.mrousavy.camera.types.VideoCodec | import com.mrousavy.camera.types.VideoCodec | ||||||
|  | import com.mrousavy.camera.utils.CamcorderProfileUtils | ||||||
| import kotlin.math.abs | import kotlin.math.abs | ||||||
|  |  | ||||||
| data class RecommendedProfile( | data class RecommendedProfile( | ||||||
| @@ -23,7 +23,7 @@ fun RecordingSession.getRecommendedBitRate(fps: Int, codec: VideoCodec, hdr: Boo | |||||||
|   val targetResolution = size |   val targetResolution = size | ||||||
|   val encoder = codec.toVideoEncoder() |   val encoder = codec.toVideoEncoder() | ||||||
|   val bitDepth = if (hdr) 10 else 8 |   val bitDepth = if (hdr) 10 else 8 | ||||||
|   val quality = findClosestCamcorderProfileQuality(cameraId, targetResolution) |   val quality = CamcorderProfileUtils.findClosestCamcorderProfileQuality(cameraId, targetResolution, true) | ||||||
|   Log.i("CamcorderProfile", "Closest matching CamcorderProfile: $quality") |   Log.i("CamcorderProfile", "Closest matching CamcorderProfile: $quality") | ||||||
|  |  | ||||||
|   var recommendedProfile: RecommendedProfile? = null |   var recommendedProfile: RecommendedProfile? = null | ||||||
| @@ -75,39 +75,3 @@ fun RecordingSession.getRecommendedBitRate(fps: Int, codec: VideoCodec, hdr: Boo | |||||||
|   } |   } | ||||||
|   return bitRate.toInt() |   return bitRate.toInt() | ||||||
| } | } | ||||||
|  |  | ||||||
| private fun getResolutionForCamcorderProfileQuality(camcorderProfile: Int): Int = |  | ||||||
|   when (camcorderProfile) { |  | ||||||
|     CamcorderProfile.QUALITY_QCIF -> 176 * 144 |  | ||||||
|     CamcorderProfile.QUALITY_QVGA -> 320 * 240 |  | ||||||
|     CamcorderProfile.QUALITY_CIF -> 352 * 288 |  | ||||||
|     CamcorderProfile.QUALITY_VGA -> 640 * 480 |  | ||||||
|     CamcorderProfile.QUALITY_480P -> 720 * 480 |  | ||||||
|     CamcorderProfile.QUALITY_720P -> 1280 * 720 |  | ||||||
|     CamcorderProfile.QUALITY_1080P -> 1920 * 1080 |  | ||||||
|     CamcorderProfile.QUALITY_2K -> 2048 * 1080 |  | ||||||
|     CamcorderProfile.QUALITY_QHD -> 2560 * 1440 |  | ||||||
|     CamcorderProfile.QUALITY_2160P -> 3840 * 2160 |  | ||||||
|     CamcorderProfile.QUALITY_4KDCI -> 4096 * 2160 |  | ||||||
|     CamcorderProfile.QUALITY_8KUHD -> 7680 * 4320 |  | ||||||
|     else -> throw Error("Invalid CamcorderProfile \"$camcorderProfile\"!") |  | ||||||
|   } |  | ||||||
|  |  | ||||||
| private fun findClosestCamcorderProfileQuality(cameraId: String, resolution: Size): Int { |  | ||||||
|   // Iterate through all available CamcorderProfiles and find the one that matches the closest |  | ||||||
|   val targetResolution = resolution.width * resolution.height |  | ||||||
|   val cameraIdInt = cameraId.toIntOrNull() |  | ||||||
|  |  | ||||||
|   val profiles = (CamcorderProfile.QUALITY_QCIF..CamcorderProfile.QUALITY_8KUHD).filter { profile -> |  | ||||||
|     if (cameraIdInt != null) { |  | ||||||
|       return@filter CamcorderProfile.hasProfile(cameraIdInt, profile) |  | ||||||
|     } else { |  | ||||||
|       return@filter CamcorderProfile.hasProfile(profile) |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
|   val closestProfile = profiles.minBy { profile -> |  | ||||||
|     val currentResolution = getResolutionForCamcorderProfileQuality(profile) |  | ||||||
|     return@minBy abs(currentResolution - targetResolution) |  | ||||||
|   } |  | ||||||
|   return closestProfile |  | ||||||
| } |  | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ package com.mrousavy.camera.extensions | |||||||
|  |  | ||||||
| import android.util.Size | import android.util.Size | ||||||
| import android.util.SizeF | import android.util.SizeF | ||||||
| import android.view.Surface | import com.mrousavy.camera.types.Orientation | ||||||
| import kotlin.math.abs | import kotlin.math.abs | ||||||
| import kotlin.math.max | import kotlin.math.max | ||||||
| import kotlin.math.min | import kotlin.math.min | ||||||
| @@ -14,13 +14,10 @@ fun List<Size>.closestToOrMax(size: Size?): Size = | |||||||
|     this.maxBy { it.width * it.height } |     this.maxBy { it.width * it.height } | ||||||
|   } |   } | ||||||
|  |  | ||||||
| fun Size.rotated(surfaceRotation: Int): Size = | fun Size.rotatedBy(orientation: Orientation): Size = | ||||||
|   when (surfaceRotation) { |   when (orientation) { | ||||||
|     Surface.ROTATION_0 -> Size(width, height) |     Orientation.PORTRAIT, Orientation.PORTRAIT_UPSIDE_DOWN -> this | ||||||
|     Surface.ROTATION_90 -> Size(height, width) |     Orientation.LANDSCAPE_LEFT, Orientation.LANDSCAPE_RIGHT -> Size(height, width) | ||||||
|     Surface.ROTATION_180 -> Size(width, height) |  | ||||||
|     Surface.ROTATION_270 -> Size(height, width) |  | ||||||
|     else -> Size(width, height) |  | ||||||
|   } |   } | ||||||
|  |  | ||||||
| val Size.bigger: Int | val Size.bigger: Int | ||||||
|   | |||||||
| @@ -0,0 +1,41 @@ | |||||||
|  | package com.mrousavy.camera.extensions | ||||||
|  |  | ||||||
|  | import android.util.Log | ||||||
|  | import android.view.SurfaceHolder | ||||||
|  | import androidx.annotation.UiThread | ||||||
|  | import kotlin.coroutines.resume | ||||||
|  | import kotlinx.coroutines.suspendCancellableCoroutine | ||||||
|  |  | ||||||
|  | private const val TAG = "SurfaceHolder" | ||||||
|  |  | ||||||
|  | @UiThread | ||||||
|  | suspend fun SurfaceHolder.resize(targetWidth: Int, targetHeight: Int) { | ||||||
|  |   return suspendCancellableCoroutine { continuation -> | ||||||
|  |     val currentSize = this.surfaceFrame | ||||||
|  |     if (currentSize.width() == targetWidth && currentSize.height() == targetHeight) { | ||||||
|  |       // Already in target size | ||||||
|  |       continuation.resume(Unit) | ||||||
|  |       return@suspendCancellableCoroutine | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     Log.i(TAG, "Resizing SurfaceHolder to $targetWidth x $targetHeight...") | ||||||
|  |  | ||||||
|  |     val callback = object : SurfaceHolder.Callback { | ||||||
|  |       override fun surfaceCreated(holder: SurfaceHolder) = Unit | ||||||
|  |       override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) { | ||||||
|  |         if (width == targetWidth && height == targetHeight) { | ||||||
|  |           holder.removeCallback(this) | ||||||
|  |           Log.i(TAG, "Resized SurfaceHolder to $width x $height!") | ||||||
|  |           continuation.resume(Unit) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       override fun surfaceDestroyed(holder: SurfaceHolder) { | ||||||
|  |         holder.removeCallback(this) | ||||||
|  |         Log.e(TAG, "Failed to resize SurfaceHolder to $targetWidth x $targetHeight!") | ||||||
|  |         continuation.cancel(Error("Tried to resize SurfaceView, but Surface has been destroyed!")) | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     this.addCallback(callback) | ||||||
|  |     this.setFixedSize(targetWidth, targetHeight) | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -4,6 +4,7 @@ import android.hardware.HardwareBuffer; | |||||||
| import android.media.Image; | import android.media.Image; | ||||||
| import android.os.Build; | import android.os.Build; | ||||||
| import com.facebook.proguard.annotations.DoNotStrip; | import com.facebook.proguard.annotations.DoNotStrip; | ||||||
|  | import com.mrousavy.camera.core.FrameInvalidError; | ||||||
| import com.mrousavy.camera.core.HardwareBuffersNotAvailableError; | import com.mrousavy.camera.core.HardwareBuffersNotAvailableError; | ||||||
| import com.mrousavy.camera.types.PixelFormat; | import com.mrousavy.camera.types.PixelFormat; | ||||||
| import com.mrousavy.camera.types.Orientation; | import com.mrousavy.camera.types.Orientation; | ||||||
| @@ -23,42 +24,17 @@ public class Frame { | |||||||
|         this.isMirrored = isMirrored; |         this.isMirrored = isMirrored; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public Image getImage() { |     private void assertIsValid() throws FrameInvalidError { | ||||||
|         synchronized (this) { |         if (!getIsImageValid(image)) { | ||||||
|             Image img = image; |             throw new FrameInvalidError(); | ||||||
|             if (!getIsImageValid(img)) { |  | ||||||
|                 throw new RuntimeException("Frame is already closed! " + |  | ||||||
|                     "Are you trying to access the Image data outside of a Frame Processor's lifetime?\n" + |  | ||||||
|                     "- If you want to use `console.log(frame)`, use `console.log(frame.toString())` instead.\n" + |  | ||||||
|                     "- If you want to do async processing, use `runAsync(...)` instead.\n" + |  | ||||||
|                     "- If you want to use runOnJS, increment it's ref-count: `frame.incrementRefCount()`"); |  | ||||||
|             } |  | ||||||
|             return img; |  | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     private synchronized boolean getIsImageValid(Image image) { | ||||||
|     @DoNotStrip |         if (refCount <= 0) return false; | ||||||
|     public int getWidth() { |  | ||||||
|         return getImage().getWidth(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |  | ||||||
|     @DoNotStrip |  | ||||||
|     public int getHeight() { |  | ||||||
|         return getImage().getHeight(); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |  | ||||||
|     @DoNotStrip |  | ||||||
|     public boolean getIsValid() { |  | ||||||
|         return getIsImageValid(getImage()); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private boolean getIsImageValid(Image image) { |  | ||||||
|         try { |         try { | ||||||
|             // will throw an exception if the image is already closed |             // will throw an exception if the image is already closed | ||||||
|             synchronized (this) { image.getFormat(); } |             image.getFormat(); | ||||||
|             // no exception thrown, image must still be valid. |             // no exception thrown, image must still be valid. | ||||||
|             return true; |             return true; | ||||||
|         } catch (IllegalStateException e) { |         } catch (IllegalStateException e) { | ||||||
| @@ -67,78 +43,104 @@ public class Frame { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     public synchronized Image getImage() { | ||||||
|  |         return image; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public boolean getIsMirrored() { |     public synchronized int getWidth() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|  |         return image.getWidth(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SuppressWarnings("unused") | ||||||
|  |     @DoNotStrip | ||||||
|  |     public synchronized int getHeight() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|  |         return image.getHeight(); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SuppressWarnings("unused") | ||||||
|  |     @DoNotStrip | ||||||
|  |     public synchronized boolean getIsValid() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|  |         return getIsImageValid(image); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     @SuppressWarnings("unused") | ||||||
|  |     @DoNotStrip | ||||||
|  |     public synchronized boolean getIsMirrored() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|         return isMirrored; |         return isMirrored; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public long getTimestamp() { |     public synchronized long getTimestamp() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|         return timestamp; |         return timestamp; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public Orientation getOrientation() { |     public synchronized Orientation getOrientation() throws FrameInvalidError { | ||||||
|  |         assertIsValid(); | ||||||
|         return orientation; |         return orientation; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public PixelFormat getPixelFormat() { |     public synchronized PixelFormat getPixelFormat() throws FrameInvalidError { | ||||||
|         return PixelFormat.Companion.fromImageFormat(getImage().getFormat()); |         assertIsValid(); | ||||||
|  |         return PixelFormat.Companion.fromImageFormat(image.getFormat()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public int getPlanesCount() { |     public synchronized int getPlanesCount() throws FrameInvalidError { | ||||||
|         return getImage().getPlanes().length; |         assertIsValid(); | ||||||
|  |         return image.getPlanes().length; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public int getBytesPerRow() { |     public synchronized int getBytesPerRow() throws FrameInvalidError { | ||||||
|         return getImage().getPlanes()[0].getRowStride(); |         assertIsValid(); | ||||||
|  |         return image.getPlanes()[0].getRowStride(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError { |     private Object getHardwareBufferBoxed() throws HardwareBuffersNotAvailableError, FrameInvalidError { | ||||||
|         return getHardwareBuffer(); |         return getHardwareBuffer(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     public HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError { |     public synchronized HardwareBuffer getHardwareBuffer() throws HardwareBuffersNotAvailableError, FrameInvalidError { | ||||||
|         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { |         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { | ||||||
|             throw new HardwareBuffersNotAvailableError(); |             throw new HardwareBuffersNotAvailableError(); | ||||||
|         } |         } | ||||||
|         return getImage().getHardwareBuffer(); |         assertIsValid(); | ||||||
|  |         return image.getHardwareBuffer(); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public void incrementRefCount() { |     public synchronized void incrementRefCount() { | ||||||
|         synchronized (this) { |  | ||||||
|         refCount++; |         refCount++; | ||||||
|     } |     } | ||||||
|     } |  | ||||||
|  |  | ||||||
|     @SuppressWarnings("unused") |     @SuppressWarnings("unused") | ||||||
|     @DoNotStrip |     @DoNotStrip | ||||||
|     public void decrementRefCount() { |     public synchronized void decrementRefCount() { | ||||||
|         synchronized (this) { |  | ||||||
|         refCount--; |         refCount--; | ||||||
|         if (refCount <= 0) { |         if (refCount <= 0) { | ||||||
|             // If no reference is held on this Image, close it. |             // If no reference is held on this Image, close it. | ||||||
|             close(); |             close(); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     } |  | ||||||
|  |  | ||||||
|     private void close() { |     private synchronized void close() { | ||||||
|         synchronized (this) { |  | ||||||
|         image.close(); |         image.close(); | ||||||
|     } |     } | ||||||
|     } |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -21,6 +21,8 @@ public final class FrameProcessor { | |||||||
|     @Keep |     @Keep | ||||||
|     private final HybridData mHybridData; |     private final HybridData mHybridData; | ||||||
|  |  | ||||||
|  |     @DoNotStrip | ||||||
|  |     @Keep | ||||||
|     public FrameProcessor(HybridData hybridData) { |     public FrameProcessor(HybridData hybridData) { | ||||||
|         mHybridData = hybridData; |         mHybridData = hybridData; | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ import com.mrousavy.camera.core.ViewNotFoundError | |||||||
| import java.lang.ref.WeakReference | import java.lang.ref.WeakReference | ||||||
|  |  | ||||||
| @Suppress("KotlinJniMissingFunction") // we use fbjni. | @Suppress("KotlinJniMissingFunction") // we use fbjni. | ||||||
| class VisionCameraProxy(context: ReactApplicationContext) { | class VisionCameraProxy(private val reactContext: ReactApplicationContext) { | ||||||
|   companion object { |   companion object { | ||||||
|     const val TAG = "VisionCameraProxy" |     const val TAG = "VisionCameraProxy" | ||||||
|   } |   } | ||||||
| @@ -24,6 +24,8 @@ class VisionCameraProxy(context: ReactApplicationContext) { | |||||||
|   private var mHybridData: HybridData |   private var mHybridData: HybridData | ||||||
|   private var mContext: WeakReference<ReactApplicationContext> |   private var mContext: WeakReference<ReactApplicationContext> | ||||||
|   private var mScheduler: VisionCameraScheduler |   private var mScheduler: VisionCameraScheduler | ||||||
|  |   val context: ReactApplicationContext | ||||||
|  |     get() = reactContext | ||||||
|  |  | ||||||
|   init { |   init { | ||||||
|     val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl |     val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl | ||||||
|   | |||||||
| @@ -3,20 +3,12 @@ package com.mrousavy.camera.types | |||||||
| import com.facebook.react.bridge.ReadableMap | import com.facebook.react.bridge.ReadableMap | ||||||
| import com.mrousavy.camera.core.InvalidTypeScriptUnionError | import com.mrousavy.camera.core.InvalidTypeScriptUnionError | ||||||
|  |  | ||||||
| class CodeScannerOptions(map: ReadableMap) { | data class CodeScannerOptions(val codeTypes: List<CodeType>) { | ||||||
|   val codeTypes: List<CodeType> |   companion object { | ||||||
|  |     fun fromJSValue(value: ReadableMap): CodeScannerOptions { | ||||||
|   init { |       val jsCodeTypes = value.getArray("codeTypes") ?: throw InvalidTypeScriptUnionError("codeScanner", value.toString()) | ||||||
|     val codeTypes = map.getArray("codeTypes")?.toArrayList() ?: throw InvalidTypeScriptUnionError("codeScanner", map.toString()) |       val codeTypes = jsCodeTypes.toArrayList().map { CodeType.fromUnionValue(it as String) } | ||||||
|     this.codeTypes = codeTypes.map { |       return CodeScannerOptions(codeTypes) | ||||||
|       return@map CodeType.fromUnionValue(it as String) |  | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   override fun equals(other: Any?): Boolean { |  | ||||||
|     if (other !is CodeScannerOptions) return false |  | ||||||
|     return codeTypes.size == other.codeTypes.size && codeTypes.containsAll(other.codeTypes) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   override fun hashCode(): Int = codeTypes.hashCode() |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -13,6 +13,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|   EAN_8("ean-8"), |   EAN_8("ean-8"), | ||||||
|   ITF("itf"), |   ITF("itf"), | ||||||
|   UPC_E("upc-e"), |   UPC_E("upc-e"), | ||||||
|  |   UPC_A("upc-a"), | ||||||
|   QR("qr"), |   QR("qr"), | ||||||
|   PDF_417("pdf-417"), |   PDF_417("pdf-417"), | ||||||
|   AZTEC("aztec"), |   AZTEC("aztec"), | ||||||
| @@ -29,6 +30,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|       EAN_8 -> Barcode.FORMAT_EAN_8 |       EAN_8 -> Barcode.FORMAT_EAN_8 | ||||||
|       ITF -> Barcode.FORMAT_ITF |       ITF -> Barcode.FORMAT_ITF | ||||||
|       UPC_E -> Barcode.FORMAT_UPC_E |       UPC_E -> Barcode.FORMAT_UPC_E | ||||||
|  |       UPC_A -> Barcode.FORMAT_UPC_A | ||||||
|       QR -> Barcode.FORMAT_QR_CODE |       QR -> Barcode.FORMAT_QR_CODE | ||||||
|       PDF_417 -> Barcode.FORMAT_PDF417 |       PDF_417 -> Barcode.FORMAT_PDF417 | ||||||
|       AZTEC -> Barcode.FORMAT_AZTEC |       AZTEC -> Barcode.FORMAT_AZTEC | ||||||
| @@ -47,6 +49,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|         Barcode.FORMAT_EAN_8 -> EAN_8 |         Barcode.FORMAT_EAN_8 -> EAN_8 | ||||||
|         Barcode.FORMAT_ITF -> ITF |         Barcode.FORMAT_ITF -> ITF | ||||||
|         Barcode.FORMAT_UPC_E -> UPC_E |         Barcode.FORMAT_UPC_E -> UPC_E | ||||||
|  |         Barcode.FORMAT_UPC_A -> UPC_A | ||||||
|         Barcode.FORMAT_QR_CODE -> QR |         Barcode.FORMAT_QR_CODE -> QR | ||||||
|         Barcode.FORMAT_PDF417 -> PDF_417 |         Barcode.FORMAT_PDF417 -> PDF_417 | ||||||
|         Barcode.FORMAT_AZTEC -> AZTEC |         Barcode.FORMAT_AZTEC -> AZTEC | ||||||
| @@ -64,6 +67,7 @@ enum class CodeType(override val unionValue: String) : JSUnionValue { | |||||||
|         "ean-8" -> EAN_8 |         "ean-8" -> EAN_8 | ||||||
|         "itf" -> ITF |         "itf" -> ITF | ||||||
|         "upc-e" -> UPC_E |         "upc-e" -> UPC_E | ||||||
|  |         "upc-a" -> UPC_A | ||||||
|         "qr" -> QR |         "qr" -> QR | ||||||
|         "pdf-417" -> PDF_417 |         "pdf-417" -> PDF_417 | ||||||
|         "aztec" -> AZTEC |         "aztec" -> AZTEC | ||||||
|   | |||||||
| @@ -0,0 +1,36 @@ | |||||||
|  | package com.mrousavy.camera.types | ||||||
|  |  | ||||||
|  | import com.facebook.react.bridge.Arguments | ||||||
|  | import com.facebook.react.bridge.WritableMap | ||||||
|  | import com.facebook.react.uimanager.events.Event | ||||||
|  |  | ||||||
|  | class CameraInitializedEvent(surfaceId: Int, viewId: Int) : Event<CameraInitializedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraInitialized" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraStartedEvent(surfaceId: Int, viewId: Int) : Event<CameraStartedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraStarted" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraStoppedEvent(surfaceId: Int, viewId: Int) : Event<CameraStoppedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraStopped" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraErrorEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : Event<CameraErrorEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraError" | ||||||
|  |   override fun getEventData() = data | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraViewReadyEvent(surfaceId: Int, viewId: Int) : Event<CameraViewReadyEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraViewReady" | ||||||
|  |   override fun getEventData(): WritableMap = Arguments.createMap() | ||||||
|  | } | ||||||
|  |  | ||||||
|  | class CameraCodeScannedEvent(surfaceId: Int, viewId: Int, private val data: WritableMap) : | ||||||
|  |   Event<CameraCodeScannedEvent>(surfaceId, viewId) { | ||||||
|  |   override fun getEventName() = "cameraCodeScanned" | ||||||
|  |   override fun getEventData() = data | ||||||
|  | } | ||||||
| @@ -9,6 +9,19 @@ enum class HardwareLevel(override val unionValue: String) : JSUnionValue { | |||||||
|   FULL("full"), |   FULL("full"), | ||||||
|   LEVEL_3("full"); |   LEVEL_3("full"); | ||||||
|  |  | ||||||
|  |   private val rank: Int | ||||||
|  |     get() { | ||||||
|  |       return when (this) { | ||||||
|  |         LEGACY -> 0 | ||||||
|  |         LIMITED -> 1 | ||||||
|  |         EXTERNAL -> 1 | ||||||
|  |         FULL -> 2 | ||||||
|  |         LEVEL_3 -> 3 | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |   fun isAtLeast(level: HardwareLevel): Boolean = this.rank >= level.rank | ||||||
|  |  | ||||||
|   companion object { |   companion object { | ||||||
|     fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel = |     fun fromCameraCharacteristics(cameraCharacteristics: CameraCharacteristics): HardwareLevel = | ||||||
|       when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { |       when (cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)) { | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| package com.mrousavy.camera.types | package com.mrousavy.camera.types | ||||||
|  |  | ||||||
| import android.hardware.camera2.CameraCharacteristics | import com.mrousavy.camera.core.CameraDeviceDetails | ||||||
|  |  | ||||||
| enum class Orientation(override val unionValue: String) : JSUnionValue { | enum class Orientation(override val unionValue: String) : JSUnionValue { | ||||||
|   PORTRAIT("portrait"), |   PORTRAIT("portrait"), | ||||||
| @@ -19,23 +19,22 @@ enum class Orientation(override val unionValue: String) : JSUnionValue { | |||||||
|   fun toDegrees(): Int = |   fun toDegrees(): Int = | ||||||
|     when (this) { |     when (this) { | ||||||
|       PORTRAIT -> 0 |       PORTRAIT -> 0 | ||||||
|       LANDSCAPE_RIGHT -> 90 |       LANDSCAPE_LEFT -> 90 | ||||||
|       PORTRAIT_UPSIDE_DOWN -> 180 |       PORTRAIT_UPSIDE_DOWN -> 180 | ||||||
|       LANDSCAPE_LEFT -> 270 |       LANDSCAPE_RIGHT -> 270 | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   fun toSensorRelativeOrientation(cameraCharacteristics: CameraCharacteristics): Orientation { |   fun toSensorRelativeOrientation(deviceDetails: CameraDeviceDetails): Orientation { | ||||||
|     val sensorOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! |  | ||||||
|  |  | ||||||
|     // Convert target orientation to rotation degrees (0, 90, 180, 270) |     // Convert target orientation to rotation degrees (0, 90, 180, 270) | ||||||
|     var rotationDegrees = this.toDegrees() |     var rotationDegrees = this.toDegrees() | ||||||
|  |  | ||||||
|     // Reverse device orientation for front-facing cameras |     // Reverse device orientation for front-facing cameras | ||||||
|     val facingFront = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT |     if (deviceDetails.lensFacing == LensFacing.FRONT) { | ||||||
|     if (facingFront) rotationDegrees = -rotationDegrees |       rotationDegrees = -rotationDegrees | ||||||
|  |     } | ||||||
|  |  | ||||||
|     // Rotate sensor rotation by target rotation |     // Rotate sensor rotation by target rotation | ||||||
|     val newRotationDegrees = (sensorOrientation + rotationDegrees + 360) % 360 |     val newRotationDegrees = (deviceDetails.sensorOrientation.toDegrees() + rotationDegrees + 360) % 360 | ||||||
|  |  | ||||||
|     return fromRotationDegrees(newRotationDegrees) |     return fromRotationDegrees(newRotationDegrees) | ||||||
|   } |   } | ||||||
| @@ -52,9 +51,9 @@ enum class Orientation(override val unionValue: String) : JSUnionValue { | |||||||
|  |  | ||||||
|     fun fromRotationDegrees(rotationDegrees: Int): Orientation = |     fun fromRotationDegrees(rotationDegrees: Int): Orientation = | ||||||
|       when (rotationDegrees) { |       when (rotationDegrees) { | ||||||
|         in 45..135 -> LANDSCAPE_RIGHT |         in 45..135 -> LANDSCAPE_LEFT | ||||||
|         in 135..225 -> PORTRAIT_UPSIDE_DOWN |         in 135..225 -> PORTRAIT_UPSIDE_DOWN | ||||||
|         in 225..315 -> LANDSCAPE_LEFT |         in 225..315 -> LANDSCAPE_RIGHT | ||||||
|         else -> PORTRAIT |         else -> PORTRAIT | ||||||
|       } |       } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -8,6 +8,7 @@ class RecordVideoOptions(map: ReadableMap) { | |||||||
|   var videoCodec = VideoCodec.H264 |   var videoCodec = VideoCodec.H264 | ||||||
|   var videoBitRateOverride: Double? = null |   var videoBitRateOverride: Double? = null | ||||||
|   var videoBitRateMultiplier: Double? = null |   var videoBitRateMultiplier: Double? = null | ||||||
|  |   var orientation: Orientation? = null | ||||||
|  |  | ||||||
|   init { |   init { | ||||||
|     if (map.hasKey("fileType")) { |     if (map.hasKey("fileType")) { | ||||||
| @@ -25,5 +26,8 @@ class RecordVideoOptions(map: ReadableMap) { | |||||||
|     if (map.hasKey("videoBitRateMultiplier")) { |     if (map.hasKey("videoBitRateMultiplier")) { | ||||||
|       videoBitRateMultiplier = map.getDouble("videoBitRateMultiplier") |       videoBitRateMultiplier = map.getDouble("videoBitRateMultiplier") | ||||||
|     } |     } | ||||||
|  |     if (map.hasKey("orientation")) { | ||||||
|  |       orientation = Orientation.fromUnionValue(map.getString("orientation")) | ||||||
|  |     } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,5 +1,7 @@ | |||||||
| package com.mrousavy.camera.types | package com.mrousavy.camera.types | ||||||
|  |  | ||||||
|  | import com.mrousavy.camera.core.InvalidTypeScriptUnionError | ||||||
|  |  | ||||||
| enum class ResizeMode(override val unionValue: String) : JSUnionValue { | enum class ResizeMode(override val unionValue: String) : JSUnionValue { | ||||||
|   COVER("cover"), |   COVER("cover"), | ||||||
|   CONTAIN("contain"); |   CONTAIN("contain"); | ||||||
| @@ -9,7 +11,7 @@ enum class ResizeMode(override val unionValue: String) : JSUnionValue { | |||||||
|       when (unionValue) { |       when (unionValue) { | ||||||
|         "cover" -> COVER |         "cover" -> COVER | ||||||
|         "contain" -> CONTAIN |         "contain" -> CONTAIN | ||||||
|         else -> COVER |         else -> throw InvalidTypeScriptUnionError("resizeMode", unionValue) | ||||||
|       } |       } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -13,21 +13,6 @@ enum class VideoStabilizationMode(override val unionValue: String) : JSUnionValu | |||||||
|   CINEMATIC("cinematic"), |   CINEMATIC("cinematic"), | ||||||
|   CINEMATIC_EXTENDED("cinematic-extended"); |   CINEMATIC_EXTENDED("cinematic-extended"); | ||||||
|  |  | ||||||
|   fun toDigitalStabilizationMode(): Int = |  | ||||||
|     when (this) { |  | ||||||
|       OFF -> CONTROL_VIDEO_STABILIZATION_MODE_OFF |  | ||||||
|       STANDARD -> CONTROL_VIDEO_STABILIZATION_MODE_ON |  | ||||||
|       CINEMATIC -> 2 // TODO: CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION |  | ||||||
|       else -> CONTROL_VIDEO_STABILIZATION_MODE_OFF |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|   fun toOpticalStabilizationMode(): Int = |  | ||||||
|     when (this) { |  | ||||||
|       OFF -> LENS_OPTICAL_STABILIZATION_MODE_OFF |  | ||||||
|       CINEMATIC_EXTENDED -> LENS_OPTICAL_STABILIZATION_MODE_ON |  | ||||||
|       else -> LENS_OPTICAL_STABILIZATION_MODE_OFF |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|   companion object : JSUnionValue.Companion<VideoStabilizationMode> { |   companion object : JSUnionValue.Companion<VideoStabilizationMode> { | ||||||
|     override fun fromUnionValue(unionValue: String?): VideoStabilizationMode = |     override fun fromUnionValue(unionValue: String?): VideoStabilizationMode = | ||||||
|       when (unionValue) { |       when (unionValue) { | ||||||
|   | |||||||
| @@ -0,0 +1,101 @@ | |||||||
|  | package com.mrousavy.camera.utils | ||||||
|  |  | ||||||
|  | import android.media.CamcorderProfile | ||||||
|  | import android.os.Build | ||||||
|  | import android.util.Size | ||||||
|  | import kotlin.math.abs | ||||||
|  |  | ||||||
|  | class CamcorderProfileUtils { | ||||||
|  |   companion object { | ||||||
|  |     private fun getResolutionForCamcorderProfileQuality(camcorderProfile: Int): Int = | ||||||
|  |       when (camcorderProfile) { | ||||||
|  |         CamcorderProfile.QUALITY_QCIF -> 176 * 144 | ||||||
|  |         CamcorderProfile.QUALITY_QVGA -> 320 * 240 | ||||||
|  |         CamcorderProfile.QUALITY_CIF -> 352 * 288 | ||||||
|  |         CamcorderProfile.QUALITY_VGA -> 640 * 480 | ||||||
|  |         CamcorderProfile.QUALITY_480P -> 720 * 480 | ||||||
|  |         CamcorderProfile.QUALITY_720P -> 1280 * 720 | ||||||
|  |         CamcorderProfile.QUALITY_1080P -> 1920 * 1080 | ||||||
|  |         CamcorderProfile.QUALITY_2K -> 2048 * 1080 | ||||||
|  |         CamcorderProfile.QUALITY_QHD -> 2560 * 1440 | ||||||
|  |         CamcorderProfile.QUALITY_2160P -> 3840 * 2160 | ||||||
|  |         CamcorderProfile.QUALITY_4KDCI -> 4096 * 2160 | ||||||
|  |         CamcorderProfile.QUALITY_8KUHD -> 7680 * 4320 | ||||||
|  |         else -> throw Error("Invalid CamcorderProfile \"$camcorderProfile\"!") | ||||||
|  |       } | ||||||
|  |  | ||||||
|  |     fun findClosestCamcorderProfileQuality(cameraId: String, resolution: Size, allowLargerSize: Boolean): Int { | ||||||
|  |       // Iterate through all available CamcorderProfiles and find the one that matches the closest | ||||||
|  |       val targetResolution = resolution.width * resolution.height | ||||||
|  |       val cameraIdInt = cameraId.toIntOrNull() | ||||||
|  |  | ||||||
|  |       var profiles = (CamcorderProfile.QUALITY_QCIF..CamcorderProfile.QUALITY_8KUHD).filter { profile -> | ||||||
|  |         if (cameraIdInt != null) { | ||||||
|  |           return@filter CamcorderProfile.hasProfile(cameraIdInt, profile) | ||||||
|  |         } else { | ||||||
|  |           return@filter CamcorderProfile.hasProfile(profile) | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       if (!allowLargerSize) { | ||||||
|  |         profiles = profiles.filter { profile -> | ||||||
|  |           val currentResolution = getResolutionForCamcorderProfileQuality(profile) | ||||||
|  |           return@filter currentResolution <= targetResolution | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |       val closestProfile = profiles.minBy { profile -> | ||||||
|  |         val currentResolution = getResolutionForCamcorderProfileQuality(profile) | ||||||
|  |         return@minBy abs(currentResolution - targetResolution) | ||||||
|  |       } | ||||||
|  |       return closestProfile | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     fun getMaximumVideoSize(cameraId: String): Size? { | ||||||
|  |       try { | ||||||
|  |         if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { | ||||||
|  |           val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH) | ||||||
|  |           if (profiles != null) { | ||||||
|  |             val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height } | ||||||
|  |             if (largestProfile != null) { | ||||||
|  |               return Size(largestProfile.width, largestProfile.height) | ||||||
|  |             } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         val cameraIdInt = cameraId.toIntOrNull() | ||||||
|  |         if (cameraIdInt != null) { | ||||||
|  |           val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH) | ||||||
|  |           return Size(profile.videoFrameWidth, profile.videoFrameHeight) | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return null | ||||||
|  |       } catch (e: Throwable) { | ||||||
|  |         // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. | ||||||
|  |         return null | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     fun getMaximumFps(cameraId: String, size: Size): Int? { | ||||||
|  |       try { | ||||||
|  |         val quality = findClosestCamcorderProfileQuality(cameraId, size, false) | ||||||
|  |  | ||||||
|  |         if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { | ||||||
|  |           val profiles = CamcorderProfile.getAll(cameraId, quality) | ||||||
|  |           if (profiles != null) { | ||||||
|  |             return profiles.videoProfiles.maxOf { profile -> profile.frameRate } | ||||||
|  |           } | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         val cameraIdInt = cameraId.toIntOrNull() | ||||||
|  |         if (cameraIdInt != null) { | ||||||
|  |           val profile = CamcorderProfile.get(cameraIdInt, quality) | ||||||
|  |           return profile.videoFrameRate | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         return null | ||||||
|  |       } catch (e: Throwable) { | ||||||
|  |         // some Samsung phones just crash when trying to get the CamcorderProfile. Only god knows why. | ||||||
|  |         return null | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -30,7 +30,7 @@ To try the playground out for yourself, run the following commands: | |||||||
|  |  | ||||||
| ```sh | ```sh | ||||||
| git clone https://github.com/mrousavy/react-native-vision-camera | git clone https://github.com/mrousavy/react-native-vision-camera | ||||||
| cd react-native-vision-camera | cd react-native-vision-camera/package | ||||||
| yarn bootstrap | yarn bootstrap | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
|   | |||||||
| @@ -484,7 +484,7 @@ PODS: | |||||||
|     - libwebp (~> 1.0) |     - libwebp (~> 1.0) | ||||||
|     - SDWebImage/Core (~> 5.10) |     - SDWebImage/Core (~> 5.10) | ||||||
|   - SocketRocket (0.6.1) |   - SocketRocket (0.6.1) | ||||||
|   - VisionCamera (3.8.2): |   - VisionCamera (3.9.0-beta.6): | ||||||
|     - React |     - React | ||||||
|     - React-callinvoker |     - React-callinvoker | ||||||
|     - React-Core |     - React-Core | ||||||
| @@ -724,9 +724,9 @@ SPEC CHECKSUMS: | |||||||
|   SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d |   SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d | ||||||
|   SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d |   SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d | ||||||
|   SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 |   SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 | ||||||
|   VisionCamera: edbcd00e27a438b2228f67823e2b8d15a189065f |   VisionCamera: 33c90675adf75528199f840f81dfbe74a2fe6c3f | ||||||
|   Yoga: 4c3aa327e4a6a23eeacd71f61c81df1bcdf677d5 |   Yoga: 4c3aa327e4a6a23eeacd71f61c81df1bcdf677d5 | ||||||
|  |  | ||||||
| PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb | PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb | ||||||
|  |  | ||||||
| COCOAPODS: 1.11.3 | COCOAPODS: 1.14.3 | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ import AVFoundation | |||||||
| // MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate | // MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate | ||||||
|  |  | ||||||
| extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { | extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { | ||||||
|   func startRecording(options: NSDictionary, callback jsCallback: @escaping RCTResponseSenderBlock) { |   func startRecording(options: NSDictionary, filePath: String, callback jsCallback: @escaping RCTResponseSenderBlock) { | ||||||
|     // Type-safety |     // Type-safety | ||||||
|     let callback = Callback(jsCallback) |     let callback = Callback(jsCallback) | ||||||
|  |  | ||||||
| @@ -21,6 +21,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud | |||||||
|       // Start Recording with success and error callbacks |       // Start Recording with success and error callbacks | ||||||
|       cameraSession.startRecording( |       cameraSession.startRecording( | ||||||
|         options: options, |         options: options, | ||||||
|  |         filePath: filePath, | ||||||
|         onVideoRecorded: { video in |         onVideoRecorded: { video in | ||||||
|           callback.resolve(video.toJSValue()) |           callback.resolve(video.toJSValue()) | ||||||
|         }, |         }, | ||||||
|   | |||||||
| @@ -62,6 +62,8 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|   @objc var onStarted: RCTDirectEventBlock? |   @objc var onStarted: RCTDirectEventBlock? | ||||||
|   @objc var onStopped: RCTDirectEventBlock? |   @objc var onStopped: RCTDirectEventBlock? | ||||||
|   @objc var onViewReady: RCTDirectEventBlock? |   @objc var onViewReady: RCTDirectEventBlock? | ||||||
|  |   @objc var onInitReady: RCTDirectEventBlock? | ||||||
|  |   @objc var onVideoChunkReady: RCTDirectEventBlock? | ||||||
|   @objc var onCodeScanned: RCTDirectEventBlock? |   @objc var onCodeScanned: RCTDirectEventBlock? | ||||||
|   // zoom |   // zoom | ||||||
|   @objc var enableZoomGesture = false { |   @objc var enableZoomGesture = false { | ||||||
| @@ -302,6 +304,15 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|     onInitialized([:]) |     onInitialized([:]) | ||||||
|   } |   } | ||||||
|    |    | ||||||
|  |   func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) { | ||||||
|  |     guard let configuration, let difference else { return } | ||||||
|  |      | ||||||
|  |     if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection { | ||||||
|  |       let videoPreviewLayer = previewView.videoPreviewLayer | ||||||
|  |       connection.setOrientation(configuration.orientation) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|   func onCameraStarted() { |   func onCameraStarted() { | ||||||
|     ReactLogger.log(level: .info, message: "Camera started!") |     ReactLogger.log(level: .info, message: "Camera started!") | ||||||
|     guard let onStarted = onStarted else { |     guard let onStarted = onStarted else { | ||||||
| @@ -336,6 +347,31 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|     #endif |     #endif | ||||||
|   } |   } | ||||||
|    |    | ||||||
|  |   func onVideoChunkReady(chunk: ChunkedRecorder.Chunk) { | ||||||
|  |     ReactLogger.log(level: .info, message: "Chunk ready: \(chunk)") | ||||||
|  |      | ||||||
|  |     guard let onVideoChunkReady, let onInitReady else { | ||||||
|  |       ReactLogger.log(level: .warning, message: "Either onInitReady or onVideoChunkReady are not valid!") | ||||||
|  |       return | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     switch chunk.type { | ||||||
|  |     case .initialization: | ||||||
|  |       onInitReady([ | ||||||
|  |         "filepath": chunk.url.path, | ||||||
|  |       ]) | ||||||
|  |     case let .data(index: index, duration: duration): | ||||||
|  |       var data: [String: Any] = [ | ||||||
|  |         "filepath": chunk.url.path, | ||||||
|  |         "index": index, | ||||||
|  |       ] | ||||||
|  |       if let duration { | ||||||
|  |         data["duration"] = duration.seconds | ||||||
|  |       } | ||||||
|  |       onVideoChunkReady(data) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|   func onCodeScanned(codes: [CameraSession.Code], scannerFrame: CameraSession.CodeScannerFrame) { |   func onCodeScanned(codes: [CameraSession.Code], scannerFrame: CameraSession.CodeScannerFrame) { | ||||||
|     guard let onCodeScanned = onCodeScanned else { |     guard let onCodeScanned = onCodeScanned else { | ||||||
|       return |       return | ||||||
|   | |||||||
| @@ -55,6 +55,8 @@ RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock); | |||||||
| RCT_EXPORT_VIEW_PROPERTY(onStarted, RCTDirectEventBlock); | RCT_EXPORT_VIEW_PROPERTY(onStarted, RCTDirectEventBlock); | ||||||
| RCT_EXPORT_VIEW_PROPERTY(onStopped, RCTDirectEventBlock); | RCT_EXPORT_VIEW_PROPERTY(onStopped, RCTDirectEventBlock); | ||||||
| RCT_EXPORT_VIEW_PROPERTY(onViewReady, RCTDirectEventBlock); | RCT_EXPORT_VIEW_PROPERTY(onViewReady, RCTDirectEventBlock); | ||||||
|  | RCT_EXPORT_VIEW_PROPERTY(onInitReady, RCTDirectEventBlock); | ||||||
|  | RCT_EXPORT_VIEW_PROPERTY(onVideoChunkReady, RCTDirectEventBlock); | ||||||
| // Code Scanner | // Code Scanner | ||||||
| RCT_EXPORT_VIEW_PROPERTY(codeScannerOptions, NSDictionary); | RCT_EXPORT_VIEW_PROPERTY(codeScannerOptions, NSDictionary); | ||||||
| RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTDirectEventBlock); | RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTDirectEventBlock); | ||||||
| @@ -62,7 +64,8 @@ RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTDirectEventBlock); | |||||||
| // Camera View Functions | // Camera View Functions | ||||||
| RCT_EXTERN_METHOD(startRecording | RCT_EXTERN_METHOD(startRecording | ||||||
|                   : (nonnull NSNumber*)node options |                   : (nonnull NSNumber*)node options | ||||||
|                   : (NSDictionary*)options onRecordCallback |                   : (NSDictionary*)options filePath | ||||||
|  |                   : (NSString*)filePath onRecordCallback | ||||||
|                   : (RCTResponseSenderBlock)onRecordCallback); |                   : (RCTResponseSenderBlock)onRecordCallback); | ||||||
| RCT_EXTERN_METHOD(pauseRecording | RCT_EXTERN_METHOD(pauseRecording | ||||||
|                   : (nonnull NSNumber*)node resolve |                   : (nonnull NSNumber*)node resolve | ||||||
|   | |||||||
| @@ -43,9 +43,9 @@ final class CameraViewManager: RCTViewManager { | |||||||
|   //       This means that any errors that occur in this function have to be delegated through |   //       This means that any errors that occur in this function have to be delegated through | ||||||
|   //       the callback, but I'd prefer for them to throw for the original function instead. |   //       the callback, but I'd prefer for them to throw for the original function instead. | ||||||
|   @objc |   @objc | ||||||
|   final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) { |   final func startRecording(_ node: NSNumber, options: NSDictionary, filePath: NSString, onRecordCallback: @escaping RCTResponseSenderBlock) { | ||||||
|     let component = getCameraView(withTag: node) |     let component = getCameraView(withTag: node) | ||||||
|     component.startRecording(options: options, callback: onRecordCallback) |     component.startRecording(options: options, filePath: filePath as String, callback: onRecordCallback) | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   @objc |   @objc | ||||||
|   | |||||||
| @@ -93,7 +93,7 @@ enum DeviceError: String { | |||||||
|     case .lowLightBoostNotSupported: |     case .lowLightBoostNotSupported: | ||||||
|       return "The currently selected camera device does not support low-light boost! Select a device where `device.supportsLowLightBoost` is true." |       return "The currently selected camera device does not support low-light boost! Select a device where `device.supportsLowLightBoost` is true." | ||||||
|     case .focusNotSupported: |     case .focusNotSupported: | ||||||
|       return "The currently selected camera device does not support focussing!" |       return "The currently selected camera device does not support focusing!" | ||||||
|     case .microphoneUnavailable: |     case .microphoneUnavailable: | ||||||
|       return "The microphone was unavailable." |       return "The microphone was unavailable." | ||||||
|     case .notAvailableOnSimulator: |     case .notAvailableOnSimulator: | ||||||
| @@ -176,6 +176,7 @@ enum CaptureError { | |||||||
|   case noRecordingInProgress |   case noRecordingInProgress | ||||||
|   case fileError |   case fileError | ||||||
|   case createTempFileError(message: String? = nil) |   case createTempFileError(message: String? = nil) | ||||||
|  |   case createRecordingDirectoryError(message: String? = nil) | ||||||
|   case createRecorderError(message: String? = nil) |   case createRecorderError(message: String? = nil) | ||||||
|   case videoNotEnabled |   case videoNotEnabled | ||||||
|   case photoNotEnabled |   case photoNotEnabled | ||||||
| @@ -193,6 +194,8 @@ enum CaptureError { | |||||||
|       return "file-io-error" |       return "file-io-error" | ||||||
|     case .createTempFileError: |     case .createTempFileError: | ||||||
|       return "create-temp-file-error" |       return "create-temp-file-error" | ||||||
|  |     case .createRecordingDirectoryError: | ||||||
|  |       return "create-recording-directory-error" | ||||||
|     case .createRecorderError: |     case .createRecorderError: | ||||||
|       return "create-recorder-error" |       return "create-recorder-error" | ||||||
|     case .videoNotEnabled: |     case .videoNotEnabled: | ||||||
| @@ -218,6 +221,8 @@ enum CaptureError { | |||||||
|       return "An unexpected File IO error occured!" |       return "An unexpected File IO error occured!" | ||||||
|     case let .createTempFileError(message: message): |     case let .createTempFileError(message: message): | ||||||
|       return "Failed to create a temporary file! \(message ?? "(no additional message)")" |       return "Failed to create a temporary file! \(message ?? "(no additional message)")" | ||||||
|  |     case let .createRecordingDirectoryError(message: message): | ||||||
|  |       return "Failed to create a recording directory! \(message ?? "(no additional message)")" | ||||||
|     case let .createRecorderError(message: message): |     case let .createRecorderError(message: message): | ||||||
|       return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")" |       return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")" | ||||||
|     case .videoNotEnabled: |     case .videoNotEnabled: | ||||||
|   | |||||||
| @@ -15,6 +15,7 @@ extension CameraSession { | |||||||
|    Starts a video + audio recording with a custom Asset Writer. |    Starts a video + audio recording with a custom Asset Writer. | ||||||
|    */ |    */ | ||||||
|   func startRecording(options: RecordVideoOptions, |   func startRecording(options: RecordVideoOptions, | ||||||
|  |                       filePath: String, | ||||||
|                       onVideoRecorded: @escaping (_ video: Video) -> Void, |                       onVideoRecorded: @escaping (_ video: Video) -> Void, | ||||||
|                       onError: @escaping (_ error: CameraError) -> Void) { |                       onError: @escaping (_ error: CameraError) -> Void) { | ||||||
|     // Run on Camera Queue |     // Run on Camera Queue | ||||||
| @@ -34,6 +35,14 @@ extension CameraSession { | |||||||
|  |  | ||||||
|       let enableAudio = self.configuration?.audio != .disabled |       let enableAudio = self.configuration?.audio != .disabled | ||||||
|        |        | ||||||
|  |       // Callback for when new chunks are ready | ||||||
|  |       let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in | ||||||
|  |         guard let delegate = self.delegate else { | ||||||
|  |           return | ||||||
|  |         } | ||||||
|  |         delegate.onVideoChunkReady(chunk: chunk) | ||||||
|  |       } | ||||||
|  |  | ||||||
|       // Callback for when the recording ends |       // Callback for when the recording ends | ||||||
|       let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in |       let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in | ||||||
|         defer { |         defer { | ||||||
| @@ -62,7 +71,7 @@ extension CameraSession { | |||||||
|         } else { |         } else { | ||||||
|           if status == .completed { |           if status == .completed { | ||||||
|             // Recording was successfully saved |             // Recording was successfully saved | ||||||
|             let video = Video(path: recordingSession.url.absoluteString, |             let video = Video(path: recordingSession.outputDiretory.absoluteString, | ||||||
|                               duration: recordingSession.duration, |                               duration: recordingSession.duration, | ||||||
|                               size: recordingSession.size ?? CGSize.zero) |                               size: recordingSession.size ?? CGSize.zero) | ||||||
|             onVideoRecorded(video) |             onVideoRecorded(video) | ||||||
| @@ -73,22 +82,22 @@ extension CameraSession { | |||||||
|         } |         } | ||||||
|       } |       } | ||||||
|  |  | ||||||
|       // Create temporary file |       if !FileManager.default.fileExists(atPath: filePath) { | ||||||
|       let errorPointer = ErrorPointer(nilLiteral: ()) |         do { | ||||||
|       let fileExtension = options.fileType.descriptor ?? "mov" |           try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true) | ||||||
|       guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else { |         } catch { | ||||||
|         let message = errorPointer?.pointee?.description |           onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription))) | ||||||
|         onError(.capture(.createTempFileError(message: message))) |  | ||||||
|           return |           return | ||||||
|         } |         } | ||||||
|  |       } | ||||||
|  |  | ||||||
|       ReactLogger.log(level: .info, message: "Will record to temporary file: \(tempFilePath)") |       ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)") | ||||||
|       let tempURL = URL(string: "file://\(tempFilePath)")! |  | ||||||
|  |  | ||||||
|       do { |       do { | ||||||
|         // Create RecordingSession for the temp file |         // Create RecordingSession for the temp file | ||||||
|         let recordingSession = try RecordingSession(url: tempURL, |         let recordingSession = try RecordingSession(outputDiretory: filePath, | ||||||
|                                                     fileType: options.fileType, |                                                     fileType: options.fileType, | ||||||
|  |                                                     onChunkReady: onChunkReady, | ||||||
|                                                     completion: onFinish) |                                                     completion: onFinish) | ||||||
|  |  | ||||||
|         // Init Audio + Activate Audio Session (optional) |         // Init Audio + Activate Audio Session (optional) | ||||||
|   | |||||||
| @@ -109,6 +109,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|         try lambda(config) |         try lambda(config) | ||||||
|       } catch { |       } catch { | ||||||
|         self.onConfigureError(error) |         self.onConfigureError(error) | ||||||
|  |         return | ||||||
|       } |       } | ||||||
|       let difference = CameraConfiguration.Difference(between: self.configuration, and: config) |       let difference = CameraConfiguration.Difference(between: self.configuration, and: config) | ||||||
|  |  | ||||||
| @@ -117,7 +118,8 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|       do { |       do { | ||||||
|         // If needed, configure the AVCaptureSession (inputs, outputs) |         // If needed, configure the AVCaptureSession (inputs, outputs) | ||||||
|         if difference.isSessionConfigurationDirty { |         if difference.isSessionConfigurationDirty { | ||||||
|           try self.withSessionLock { |           self.captureSession.beginConfiguration() | ||||||
|  |  | ||||||
|           // 1. Update input device |           // 1. Update input device | ||||||
|           if difference.inputChanged { |           if difference.inputChanged { | ||||||
|             try self.configureDevice(configuration: config) |             try self.configureDevice(configuration: config) | ||||||
| @@ -135,11 +137,18 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|             self.configureOrientation(configuration: config) |             self.configureOrientation(configuration: config) | ||||||
|           } |           } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         guard let device = self.videoDeviceInput?.device else { | ||||||
|  |           throw CameraError.device(.noDevice) | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) |         // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) | ||||||
|         if difference.isDeviceConfigurationDirty { |         if difference.isDeviceConfigurationDirty { | ||||||
|           try self.withDeviceLock { device in |           try device.lockForConfiguration() | ||||||
|  |           defer { | ||||||
|  |             device.unlockForConfiguration() | ||||||
|  |           } | ||||||
|  |  | ||||||
|           // 4. Configure format |           // 4. Configure format | ||||||
|           if difference.formatChanged { |           if difference.formatChanged { | ||||||
|             try self.configureFormat(configuration: config, device: device) |             try self.configureFormat(configuration: config, device: device) | ||||||
| @@ -162,6 +171,11 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|             self.configureExposure(configuration: config, device: device) |             self.configureExposure(configuration: config, device: device) | ||||||
|           } |           } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         if difference.isSessionConfigurationDirty { | ||||||
|  |           // We commit the session config updates AFTER the device config, | ||||||
|  |           // that way we can also batch those changes into one update instead of doing two updates. | ||||||
|  |           self.captureSession.commitConfiguration() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // 9. Start or stop the session if needed |         // 9. Start or stop the session if needed | ||||||
| @@ -169,9 +183,11 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|  |  | ||||||
|         // 10. Enable or disable the Torch if needed (requires session to be running) |         // 10. Enable or disable the Torch if needed (requires session to be running) | ||||||
|         if difference.torchChanged { |         if difference.torchChanged { | ||||||
|           try self.withDeviceLock { device in |           try device.lockForConfiguration() | ||||||
|             try self.configureTorch(configuration: config, device: device) |           defer { | ||||||
|  |             device.unlockForConfiguration() | ||||||
|           } |           } | ||||||
|  |           try self.configureTorch(configuration: config, device: device) | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         // Notify about Camera initialization |         // Notify about Camera initialization | ||||||
| @@ -179,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|           self.delegate?.onSessionInitialized() |           self.delegate?.onSessionInitialized() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         self.delegate?.onCameraConfigurationChanged(config, difference) | ||||||
|         // After configuring, set this to the new configuration. |         // After configuring, set this to the new configuration. | ||||||
|         self.configuration = config |         self.configuration = config | ||||||
|       } catch { |       } catch { | ||||||
| @@ -206,41 +223,6 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    Runs the given [lambda] under an AVCaptureSession configuration lock (`beginConfiguration()`) |  | ||||||
|    */ |  | ||||||
|   private func withSessionLock(_ lambda: () throws -> Void) throws { |  | ||||||
|     // Lock Capture Session for configuration |  | ||||||
|     ReactLogger.log(level: .info, message: "Beginning CameraSession configuration...") |  | ||||||
|     captureSession.beginConfiguration() |  | ||||||
|     defer { |  | ||||||
|       // Unlock Capture Session again and submit configuration to Hardware |  | ||||||
|       self.captureSession.commitConfiguration() |  | ||||||
|       ReactLogger.log(level: .info, message: "Committed CameraSession configuration!") |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Call lambda |  | ||||||
|     try lambda() |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |  | ||||||
|    Runs the given [lambda] under an AVCaptureDevice configuration lock (`lockForConfiguration()`) |  | ||||||
|    */ |  | ||||||
|   private func withDeviceLock(_ lambda: (_ device: AVCaptureDevice) throws -> Void) throws { |  | ||||||
|     guard let device = videoDeviceInput?.device else { |  | ||||||
|       throw CameraError.session(.cameraNotReady) |  | ||||||
|     } |  | ||||||
|     ReactLogger.log(level: .info, message: "Beginning CaptureDevice configuration...") |  | ||||||
|     try device.lockForConfiguration() |  | ||||||
|     defer { |  | ||||||
|       device.unlockForConfiguration() |  | ||||||
|       ReactLogger.log(level: .info, message: "Committed CaptureDevice configuration!") |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Call lambda with Device |  | ||||||
|     try lambda(device) |  | ||||||
|   } |  | ||||||
|  |  | ||||||
|   /** |   /** | ||||||
|    Starts or stops the CaptureSession if needed (`isActive`) |    Starts or stops the CaptureSession if needed (`isActive`) | ||||||
|    */ |    */ | ||||||
|   | |||||||
| @@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject { | |||||||
|    Called when the [CameraSession] successfully initializes |    Called when the [CameraSession] successfully initializes | ||||||
|    */ |    */ | ||||||
|   func onSessionInitialized() |   func onSessionInitialized() | ||||||
|  |    | ||||||
|  |   func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) | ||||||
|   /** |   /** | ||||||
|    Called when the [CameraSession] starts streaming frames. (isActive=true) |    Called when the [CameraSession] starts streaming frames. (isActive=true) | ||||||
|    */ |    */ | ||||||
| @@ -33,6 +35,10 @@ protocol CameraSessionDelegate: AnyObject { | |||||||
|    Called for every frame (if video or frameProcessor is enabled) |    Called for every frame (if video or frameProcessor is enabled) | ||||||
|    */ |    */ | ||||||
|   func onFrame(sampleBuffer: CMSampleBuffer) |   func onFrame(sampleBuffer: CMSampleBuffer) | ||||||
|  |   /** | ||||||
|  |    Called whenever a new video chunk is available | ||||||
|  |    */ | ||||||
|  |   func onVideoChunkReady(chunk: ChunkedRecorder.Chunk) | ||||||
|   /** |   /** | ||||||
|    Called whenever a QR/Barcode has been scanned. Only if the CodeScanner Output is enabled |    Called whenever a QR/Barcode has been scanned. Only if the CodeScanner Output is enabled | ||||||
|    */ |    */ | ||||||
|   | |||||||
							
								
								
									
										88
									
								
								package/ios/Core/ChunkedRecorder.swift
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										88
									
								
								package/ios/Core/ChunkedRecorder.swift
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,88 @@ | |||||||
|  | // | ||||||
|  | //  ChunkedRecorder.swift | ||||||
|  | //  VisionCamera | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 12/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | import Foundation | ||||||
|  | import AVFoundation | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ChunkedRecorder: NSObject { | ||||||
|  |  | ||||||
|  |   enum ChunkType { | ||||||
|  |     case initialization | ||||||
|  |     case data(index: UInt64, duration: CMTime?) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   struct Chunk { | ||||||
|  |     let url: URL | ||||||
|  |     let type: ChunkType | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   let outputURL: URL | ||||||
|  |   let onChunkReady: ((Chunk) -> Void) | ||||||
|  |  | ||||||
|  |   private var chunkIndex: UInt64 = 0 | ||||||
|  |  | ||||||
|  |   init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws { | ||||||
|  |     self.outputURL = outputURL | ||||||
|  |     self.onChunkReady = onChunkReady | ||||||
|  |     guard FileManager.default.fileExists(atPath: outputURL.path) else { | ||||||
|  |       throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
|  | extension ChunkedRecorder: AVAssetWriterDelegate { | ||||||
|  |  | ||||||
|  |   func assetWriter(_ writer: AVAssetWriter, | ||||||
|  |                    didOutputSegmentData segmentData: Data, | ||||||
|  |                    segmentType: AVAssetSegmentType, | ||||||
|  |                    segmentReport: AVAssetSegmentReport?) { | ||||||
|  |  | ||||||
|  |     switch segmentType { | ||||||
|  |     case .initialization: | ||||||
|  |       saveInitSegment(segmentData) | ||||||
|  |     case .separable: | ||||||
|  |       saveSegment(segmentData, report: segmentReport) | ||||||
|  |     @unknown default: | ||||||
|  |       fatalError("Unknown AVAssetSegmentType!") | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private func saveInitSegment(_ data: Data) { | ||||||
|  |     let url = outputURL.appendingPathComponent("init.mp4") | ||||||
|  |     save(data: data, url: url) | ||||||
|  |     onChunkReady(url: url, type: .initialization) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) { | ||||||
|  |     let name = "\(chunkIndex).mp4" | ||||||
|  |     let url = outputURL.appendingPathComponent(name) | ||||||
|  |     save(data: data, url: url) | ||||||
|  |     let duration = report? | ||||||
|  |       .trackReports | ||||||
|  |       .filter { $0.mediaType == .video } | ||||||
|  |       .first? | ||||||
|  |       .duration | ||||||
|  |     onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration)) | ||||||
|  |     chunkIndex += 1 | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private func save(data: Data, url: URL) { | ||||||
|  |     do { | ||||||
|  |       try data.write(to: url) | ||||||
|  |     } catch { | ||||||
|  |       ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)") | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |  | ||||||
|  |   private func onChunkReady(url: URL, type: ChunkType) { | ||||||
|  |     onChunkReady(Chunk(url: url, type: type)) | ||||||
|  |   } | ||||||
|  |  | ||||||
|  | } | ||||||
| @@ -29,6 +29,7 @@ class RecordingSession { | |||||||
|   private let assetWriter: AVAssetWriter |   private let assetWriter: AVAssetWriter | ||||||
|   private var audioWriter: AVAssetWriterInput? |   private var audioWriter: AVAssetWriterInput? | ||||||
|   private var videoWriter: AVAssetWriterInput? |   private var videoWriter: AVAssetWriterInput? | ||||||
|  |   private let recorder: ChunkedRecorder | ||||||
|   private let completionHandler: (RecordingSession, AVAssetWriter.Status, Error?) -> Void |   private let completionHandler: (RecordingSession, AVAssetWriter.Status, Error?) -> Void | ||||||
|  |  | ||||||
|   private var startTimestamp: CMTime? |   private var startTimestamp: CMTime? | ||||||
| @@ -48,8 +49,8 @@ class RecordingSession { | |||||||
|   /** |   /** | ||||||
|    Gets the file URL of the recorded video. |    Gets the file URL of the recorded video. | ||||||
|    */ |    */ | ||||||
|   var url: URL { |   var outputDiretory: URL { | ||||||
|     return assetWriter.outputURL |     return recorder.outputURL | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   /** |   /** | ||||||
| @@ -70,14 +71,32 @@ class RecordingSession { | |||||||
|     return (lastWrittenTimestamp - startTimestamp).seconds |     return (lastWrittenTimestamp - startTimestamp).seconds | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   init(url: URL, |   init(outputDiretory: String, | ||||||
|        fileType: AVFileType, |        fileType: AVFileType, | ||||||
|  |        onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void), | ||||||
|        completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws { |        completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws { | ||||||
|     completionHandler = completion |     completionHandler = completion | ||||||
|  |  | ||||||
|     do { |     do { | ||||||
|       assetWriter = try AVAssetWriter(outputURL: url, fileType: fileType) |       let outputURL = URL(fileURLWithPath: outputDiretory) | ||||||
|  |       recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady) | ||||||
|  |       assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!) | ||||||
|       assetWriter.shouldOptimizeForNetworkUse = false |       assetWriter.shouldOptimizeForNetworkUse = false | ||||||
|  |       assetWriter.outputFileTypeProfile = .mpeg4AppleHLS | ||||||
|  |       assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1) | ||||||
|  |  | ||||||
|  |       /* | ||||||
|  |         Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove | ||||||
|  |         audio priming duration which advanced audio formats like AAC have, since the sample tables | ||||||
|  |         are empty.  As a result, if the output PTS of the first non-fully trimmed audio sample buffer is | ||||||
|  |         kCMTimeZero, the audio samples’ presentation time in segment files may be pushed forward by the | ||||||
|  |         audio priming duration.  This may cause audio and video to be out of sync.  You should add a time | ||||||
|  |         offset to all samples to avoid this situation. | ||||||
|  |       */ | ||||||
|  |       let startTimeOffset = CMTime(value: 10, timescale: 1) | ||||||
|  |       assetWriter.initialSegmentStartTime = startTimeOffset | ||||||
|  |  | ||||||
|  |       assetWriter.delegate = recorder | ||||||
|     } catch let error as NSError { |     } catch let error as NSError { | ||||||
|       throw CameraError.capture(.createRecorderError(message: error.description)) |       throw CameraError.capture(.createRecorderError(message: error.description)) | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -32,6 +32,15 @@ extension AVCaptureOutput { | |||||||
|   func setOrientation(_ orientation: Orientation) { |   func setOrientation(_ orientation: Orientation) { | ||||||
|     // Set orientation for each connection |     // Set orientation for each connection | ||||||
|     for connection in connections { |     for connection in connections { | ||||||
|  |       connection.setOrientation(orientation) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  | extension AVCaptureConnection { | ||||||
|  |   func setOrientation(_ orientation: Orientation) { | ||||||
|     #if swift(>=5.9) |     #if swift(>=5.9) | ||||||
|       if #available(iOS 17.0, *) { |       if #available(iOS 17.0, *) { | ||||||
|         // Camera Sensors are always in landscape rotation (90deg). |         // Camera Sensors are always in landscape rotation (90deg). | ||||||
| @@ -41,19 +50,18 @@ extension AVCaptureOutput { | |||||||
|  |  | ||||||
|         // TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording. |         // TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording. | ||||||
|         //       Does that work when we flip the camera? |         //       Does that work when we flip the camera? | ||||||
|           if connection.isVideoRotationAngleSupported(degrees) { |         if isVideoRotationAngleSupported(degrees) { | ||||||
|             connection.videoRotationAngle = degrees |           videoRotationAngle = degrees | ||||||
|         } |         } | ||||||
|       } else { |       } else { | ||||||
|           if connection.isVideoOrientationSupported { |         if isVideoOrientationSupported { | ||||||
|             connection.videoOrientation = orientation.toAVCaptureVideoOrientation() |           videoOrientation = orientation.toAVCaptureVideoOrientation() | ||||||
|         } |         } | ||||||
|       } |       } | ||||||
|     #else |     #else | ||||||
|         if connection.isVideoOrientationSupported { |       if isVideoOrientationSupported { | ||||||
|           connection.videoOrientation = orientation.toAVCaptureVideoOrientation() |         videoOrientation = orientation.toAVCaptureVideoOrientation() | ||||||
|       } |       } | ||||||
|     #endif |     #endif | ||||||
|   } |   } | ||||||
|   } |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -40,6 +40,9 @@ extension AVMetadataObject.ObjectType { | |||||||
|     case "upc-e": |     case "upc-e": | ||||||
|       self = .upce |       self = .upce | ||||||
|       return |       return | ||||||
|  |     case "upc-a": | ||||||
|  |       self = .ean13 | ||||||
|  |       return | ||||||
|     case "qr": |     case "qr": | ||||||
|       self = .qr |       self = .qr | ||||||
|       return |       return | ||||||
|   | |||||||
							
								
								
									
										37
									
								
								package/ios/TestRecorder/AppDelegate.swift
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								package/ios/TestRecorder/AppDelegate.swift
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | |||||||
|  | // | ||||||
|  | //  AppDelegate.swift | ||||||
|  | //  TestRecorder | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 11/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | import UIKit | ||||||
|  |  | ||||||
|  | @main | ||||||
|  | class AppDelegate: UIResponder, UIApplicationDelegate { | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { | ||||||
|  |         // Override point for customization after application launch. | ||||||
|  |         return true | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // MARK: UISceneSession Lifecycle | ||||||
|  |  | ||||||
|  |     func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { | ||||||
|  |         // Called when a new scene session is being created. | ||||||
|  |         // Use this method to select a configuration to create the new scene with. | ||||||
|  |         return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set<UISceneSession>) { | ||||||
|  |         // Called when the user discards a scene session. | ||||||
|  |         // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. | ||||||
|  |         // Use this method to release any resources that were specific to the discarded scenes, as they will not return. | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -0,0 +1,11 @@ | |||||||
|  | { | ||||||
|  |   "colors" : [ | ||||||
|  |     { | ||||||
|  |       "idiom" : "universal" | ||||||
|  |     } | ||||||
|  |   ], | ||||||
|  |   "info" : { | ||||||
|  |     "author" : "xcode", | ||||||
|  |     "version" : 1 | ||||||
|  |   } | ||||||
|  | } | ||||||
| @@ -0,0 +1,13 @@ | |||||||
|  | { | ||||||
|  |   "images" : [ | ||||||
|  |     { | ||||||
|  |       "idiom" : "universal", | ||||||
|  |       "platform" : "ios", | ||||||
|  |       "size" : "1024x1024" | ||||||
|  |     } | ||||||
|  |   ], | ||||||
|  |   "info" : { | ||||||
|  |     "author" : "xcode", | ||||||
|  |     "version" : 1 | ||||||
|  |   } | ||||||
|  | } | ||||||
							
								
								
									
										6
									
								
								package/ios/TestRecorder/Assets.xcassets/Contents.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								package/ios/TestRecorder/Assets.xcassets/Contents.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | { | ||||||
|  |   "info" : { | ||||||
|  |     "author" : "xcode", | ||||||
|  |     "version" : 1 | ||||||
|  |   } | ||||||
|  | } | ||||||
							
								
								
									
										25
									
								
								package/ios/TestRecorder/Base.lproj/LaunchScreen.storyboard
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								package/ios/TestRecorder/Base.lproj/LaunchScreen.storyboard
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||||||
|  | <document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM"> | ||||||
|  |     <dependencies> | ||||||
|  |         <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/> | ||||||
|  |         <capability name="Safe area layout guides" minToolsVersion="9.0"/> | ||||||
|  |         <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> | ||||||
|  |     </dependencies> | ||||||
|  |     <scenes> | ||||||
|  |         <!--View Controller--> | ||||||
|  |         <scene sceneID="EHf-IW-A2E"> | ||||||
|  |             <objects> | ||||||
|  |                 <viewController id="01J-lp-oVM" sceneMemberID="viewController"> | ||||||
|  |                     <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3"> | ||||||
|  |                         <rect key="frame" x="0.0" y="0.0" width="375" height="667"/> | ||||||
|  |                         <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> | ||||||
|  |                         <color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/> | ||||||
|  |                         <viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/> | ||||||
|  |                     </view> | ||||||
|  |                 </viewController> | ||||||
|  |                 <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/> | ||||||
|  |             </objects> | ||||||
|  |             <point key="canvasLocation" x="53" y="375"/> | ||||||
|  |         </scene> | ||||||
|  |     </scenes> | ||||||
|  | </document> | ||||||
							
								
								
									
										51
									
								
								package/ios/TestRecorder/Base.lproj/Main.storyboard
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								package/ios/TestRecorder/Base.lproj/Main.storyboard
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="32700.99.1234" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r"> | ||||||
|  |     <device id="retina6_12" orientation="portrait" appearance="light"/> | ||||||
|  |     <dependencies> | ||||||
|  |         <deployment identifier="iOS"/> | ||||||
|  |         <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="22685"/> | ||||||
|  |         <capability name="Safe area layout guides" minToolsVersion="9.0"/> | ||||||
|  |         <capability name="System colors in document resources" minToolsVersion="11.0"/> | ||||||
|  |         <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> | ||||||
|  |     </dependencies> | ||||||
|  |     <scenes> | ||||||
|  |         <!--View Controller--> | ||||||
|  |         <scene sceneID="tne-QT-ifu"> | ||||||
|  |             <objects> | ||||||
|  |                 <viewController id="BYZ-38-t0r" customClass="ViewController" customModule="TestRecorder" customModuleProvider="target" sceneMemberID="viewController"> | ||||||
|  |                     <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC"> | ||||||
|  |                         <rect key="frame" x="0.0" y="0.0" width="393" height="852"/> | ||||||
|  |                         <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> | ||||||
|  |                         <subviews> | ||||||
|  |                             <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="VWP-nN-U6K"> | ||||||
|  |                                 <rect key="frame" x="157.33333333333334" y="722.66666666666663" width="78.333333333333343" height="34.333333333333371"/> | ||||||
|  |                                 <inset key="imageEdgeInsets" minX="0.0" minY="0.0" maxX="2.2250738585072014e-308" maxY="0.0"/> | ||||||
|  |                                 <state key="normal" title="Record"/> | ||||||
|  |                                 <buttonConfiguration key="configuration" style="filled" title="Record"/> | ||||||
|  |                                 <connections> | ||||||
|  |                                     <action selector="toggleRecord:" destination="BYZ-38-t0r" eventType="touchUpInside" id="63a-uH-hTe"/> | ||||||
|  |                                 </connections> | ||||||
|  |                             </button> | ||||||
|  |                         </subviews> | ||||||
|  |                         <viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/> | ||||||
|  |                         <color key="backgroundColor" systemColor="systemBackgroundColor"/> | ||||||
|  |                         <constraints> | ||||||
|  |                             <constraint firstItem="6Tk-OE-BBY" firstAttribute="bottom" secondItem="VWP-nN-U6K" secondAttribute="bottom" constant="61" id="0iW-h7-WDE"/> | ||||||
|  |                             <constraint firstItem="VWP-nN-U6K" firstAttribute="centerX" secondItem="6Tk-OE-BBY" secondAttribute="centerX" id="yZb-ba-qfO"/> | ||||||
|  |                         </constraints> | ||||||
|  |                     </view> | ||||||
|  |                     <connections> | ||||||
|  |                         <outlet property="recordButton" destination="VWP-nN-U6K" id="gSk-uh-nDX"/> | ||||||
|  |                     </connections> | ||||||
|  |                 </viewController> | ||||||
|  |                 <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/> | ||||||
|  |             </objects> | ||||||
|  |             <point key="canvasLocation" x="115" y="-27"/> | ||||||
|  |         </scene> | ||||||
|  |     </scenes> | ||||||
|  |     <resources> | ||||||
|  |         <systemColor name="systemBackgroundColor"> | ||||||
|  |             <color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> | ||||||
|  |         </systemColor> | ||||||
|  |     </resources> | ||||||
|  | </document> | ||||||
							
								
								
									
										25
									
								
								package/ios/TestRecorder/Info.plist
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								package/ios/TestRecorder/Info.plist
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | |||||||
|  | <?xml version="1.0" encoding="UTF-8"?> | ||||||
|  | <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> | ||||||
|  | <plist version="1.0"> | ||||||
|  | <dict> | ||||||
|  | 	<key>UIApplicationSceneManifest</key> | ||||||
|  | 	<dict> | ||||||
|  | 		<key>UIApplicationSupportsMultipleScenes</key> | ||||||
|  | 		<false/> | ||||||
|  | 		<key>UISceneConfigurations</key> | ||||||
|  | 		<dict> | ||||||
|  | 			<key>UIWindowSceneSessionRoleApplication</key> | ||||||
|  | 			<array> | ||||||
|  | 				<dict> | ||||||
|  | 					<key>UISceneConfigurationName</key> | ||||||
|  | 					<string>Default Configuration</string> | ||||||
|  | 					<key>UISceneDelegateClassName</key> | ||||||
|  | 					<string>$(PRODUCT_MODULE_NAME).SceneDelegate</string> | ||||||
|  | 					<key>UISceneStoryboardFile</key> | ||||||
|  | 					<string>Main</string> | ||||||
|  | 				</dict> | ||||||
|  | 			</array> | ||||||
|  | 		</dict> | ||||||
|  | 	</dict> | ||||||
|  | </dict> | ||||||
|  | </plist> | ||||||
							
								
								
									
										15
									
								
								package/ios/TestRecorder/ReactStubs.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								package/ios/TestRecorder/ReactStubs.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | |||||||
|  | // | ||||||
|  | //  ReactStubs.h | ||||||
|  | //  TestRecorder | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 12/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | #import <UIKit/UIKit.h> | ||||||
|  |  | ||||||
|  | @interface UIView (React) | ||||||
|  |  | ||||||
|  | - (void)didSetProps:(NSArray<NSString *> *)changedProps; | ||||||
|  |  | ||||||
|  | @end | ||||||
							
								
								
									
										17
									
								
								package/ios/TestRecorder/ReactStubs.m
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								package/ios/TestRecorder/ReactStubs.m
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | |||||||
|  | // | ||||||
|  | //  ReactStubs.m | ||||||
|  | //  TestRecorder | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 12/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | #import "ReactStubs.h" | ||||||
|  |  | ||||||
|  | @implementation UIView (React) | ||||||
|  |  | ||||||
|  | - (void)didSetProps:(__unused NSArray<NSString *> *)changedProps | ||||||
|  | { | ||||||
|  | } | ||||||
|  |  | ||||||
|  | @end | ||||||
							
								
								
									
										102
									
								
								package/ios/TestRecorder/ReactStubs.swift
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										102
									
								
								package/ios/TestRecorder/ReactStubs.swift
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,102 @@ | |||||||
|  | // | ||||||
|  | //  ReactStubs.swift | ||||||
|  | //  TestRecorder | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 11/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | import UIKit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | enum RCTLogLevel: String { | ||||||
|  |   case trace | ||||||
|  |   case info | ||||||
|  |   case warning | ||||||
|  |   case error | ||||||
|  | } | ||||||
|  |  | ||||||
|  | enum RCTLogSource { | ||||||
|  |   case native | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func RCTDefaultLogFunction(_ level: RCTLogLevel, _ source: RCTLogSource, _ file: String, _ line: NSNumber, _ message: String) { | ||||||
|  |   print(level.rawValue, "-", message) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | typealias RCTDirectEventBlock = (Any?) -> Void | ||||||
|  | typealias RCTPromiseResolveBlock = (Any?) -> Void | ||||||
|  | typealias RCTPromiseRejectBlock = (String, String, NSError?) -> Void | ||||||
|  | typealias RCTResponseSenderBlock = (Any) -> Void | ||||||
|  |  | ||||||
|  | func NSNull() -> [String: String] { | ||||||
|  |   return [:] | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | func makeReactError(_ cameraError: CameraError, cause: NSError?) -> [String: Any] { | ||||||
|  |   var causeDictionary: [String: Any]? | ||||||
|  |   if let cause = cause { | ||||||
|  |     causeDictionary = [ | ||||||
|  |       "cause": "\(cause.domain): \(cause.code) \(cause.description)", | ||||||
|  |       "userInfo": cause.userInfo | ||||||
|  |     ] | ||||||
|  |   } | ||||||
|  |   return [ | ||||||
|  |     "error": "\(cameraError.code): \(cameraError.message)", | ||||||
|  |     "extra": [ | ||||||
|  |       "code": cameraError.code, | ||||||
|  |       "message": cameraError.message, | ||||||
|  |       "cause": causeDictionary ?? NSNull(), | ||||||
|  |     ] | ||||||
|  |   ] | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func makeReactError(_ cameraError: CameraError) -> [String: Any] { | ||||||
|  |   return makeReactError(cameraError, cause: nil) | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class RCTFPSGraph: UIView { | ||||||
|  |   convenience init(frame: CGRect, color: UIColor) { | ||||||
|  |     self.init(frame: frame) | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   func onTick(_ tick: CFTimeInterval) { | ||||||
|  |      | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func RCTTempFilePath(_ ext: String, _ error: ErrorPointer) -> String? { | ||||||
|  |   let directory = NSTemporaryDirectory().appending("ReactNative") | ||||||
|  |   let fm = FileManager.default | ||||||
|  |   if fm.fileExists(atPath: directory) { | ||||||
|  |     try! fm.removeItem(atPath: directory) | ||||||
|  |   } | ||||||
|  |   if !fm.fileExists(atPath: directory) { | ||||||
|  |     try! fm.createDirectory(atPath: directory, withIntermediateDirectories: true) | ||||||
|  |   } | ||||||
|  |   return directory | ||||||
|  |     .appending("/").appending(UUID().uuidString) | ||||||
|  |     .appending(".").appending(ext) | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class RCTViewManager: NSObject { | ||||||
|  |    | ||||||
|  |   var methodQueue: DispatchQueue! { nil } | ||||||
|  |   class func requiresMainQueueSetup() -> Bool { false } | ||||||
|  |   func view() -> UIView! { nil } | ||||||
|  |    | ||||||
|  |   struct Bridge { | ||||||
|  |     let uiManager = UIManager() | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   struct UIManager { | ||||||
|  |     func view(forReactTag: NSNumber) -> UIView! { | ||||||
|  |       nil | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   let bridge: Bridge = Bridge() | ||||||
|  | } | ||||||
							
								
								
									
										53
									
								
								package/ios/TestRecorder/SceneDelegate.swift
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								package/ios/TestRecorder/SceneDelegate.swift
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | |||||||
|  | // | ||||||
|  | //  SceneDelegate.swift | ||||||
|  | //  TestRecorder | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 11/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | import UIKit | ||||||
|  |  | ||||||
|  | class SceneDelegate: UIResponder, UIWindowSceneDelegate { | ||||||
|  |  | ||||||
|  |     var window: UIWindow? | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { | ||||||
|  |         // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. | ||||||
|  |         // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. | ||||||
|  |         // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). | ||||||
|  |         guard let _ = (scene as? UIWindowScene) else { return } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     func sceneDidDisconnect(_ scene: UIScene) { | ||||||
|  |         // Called as the scene is being released by the system. | ||||||
|  |         // This occurs shortly after the scene enters the background, or when its session is discarded. | ||||||
|  |         // Release any resources associated with this scene that can be re-created the next time the scene connects. | ||||||
|  |         // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     func sceneDidBecomeActive(_ scene: UIScene) { | ||||||
|  |         // Called when the scene has moved from an inactive state to an active state. | ||||||
|  |         // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     func sceneWillResignActive(_ scene: UIScene) { | ||||||
|  |         // Called when the scene will move from an active state to an inactive state. | ||||||
|  |         // This may occur due to temporary interruptions (ex. an incoming phone call). | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     func sceneWillEnterForeground(_ scene: UIScene) { | ||||||
|  |         // Called as the scene transitions from the background to the foreground. | ||||||
|  |         // Use this method to undo the changes made on entering the background. | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     func sceneDidEnterBackground(_ scene: UIScene) { | ||||||
|  |         // Called as the scene transitions from the foreground to the background. | ||||||
|  |         // Use this method to save data, release shared resources, and store enough scene-specific state information | ||||||
|  |         // to restore the scene back to its current state. | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |  | ||||||
|  | } | ||||||
|  |  | ||||||
							
								
								
									
										6
									
								
								package/ios/TestRecorder/TestRecorder-Bridging-Header.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								package/ios/TestRecorder/TestRecorder-Bridging-Header.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | // | ||||||
|  | //  Use this file to import your target's public headers that you would like to expose to Swift. | ||||||
|  | // | ||||||
|  |  | ||||||
|  |  | ||||||
|  | #import "ReactStubs.h" | ||||||
							
								
								
									
										131
									
								
								package/ios/TestRecorder/ViewController.swift
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										131
									
								
								package/ios/TestRecorder/ViewController.swift
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,131 @@ | |||||||
|  | // | ||||||
|  | //  ViewController.swift | ||||||
|  | //  TestRecorder | ||||||
|  | // | ||||||
|  | //  Created by Rafael Bastos on 11/07/2024. | ||||||
|  | //  Copyright © 2024 mrousavy. All rights reserved. | ||||||
|  | // | ||||||
|  |  | ||||||
|  | import UIKit | ||||||
|  | import AVFoundation | ||||||
|  |  | ||||||
|  | class ViewController: UIViewController { | ||||||
|  |    | ||||||
|  |   @IBOutlet weak var recordButton: UIButton! | ||||||
|  |    | ||||||
|  |   let cameraView = CameraView() | ||||||
|  |   let filePath: String = { | ||||||
|  |     NSTemporaryDirectory() + "TestRecorder" | ||||||
|  |   }() | ||||||
|  |    | ||||||
|  |   override func viewDidLoad() { | ||||||
|  |     super.viewDidLoad() | ||||||
|  |      | ||||||
|  |     try? FileManager.default.removeItem(atPath: filePath) | ||||||
|  |      | ||||||
|  |     cameraView.translatesAutoresizingMaskIntoConstraints = false; | ||||||
|  |     view.insertSubview(cameraView, at: 0) | ||||||
|  |     NSLayoutConstraint.activate([ | ||||||
|  |       cameraView.topAnchor.constraint(equalTo: view.topAnchor), | ||||||
|  |       cameraView.leadingAnchor.constraint(equalTo: view.leadingAnchor), | ||||||
|  |       cameraView.trailingAnchor.constraint(equalTo: view.trailingAnchor), | ||||||
|  |       cameraView.bottomAnchor.constraint(equalTo: view.bottomAnchor), | ||||||
|  |     ]) | ||||||
|  |      | ||||||
|  |     recordButton.isHidden = true | ||||||
|  |     cameraView.onInitialized = { _ in | ||||||
|  |       DispatchQueue.main.async { | ||||||
|  |         self.recordButton.isHidden = false | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |     cameraView.onInitReady = { json in | ||||||
|  |       print("onInitReady:", json ?? "nil") | ||||||
|  |     } | ||||||
|  |     cameraView.onVideoChunkReady = { json in | ||||||
|  |       print("onVideoChunkReady:", json ?? "nil") | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     Task { @MainActor in | ||||||
|  |       await requestAuthorizations() | ||||||
|  |        | ||||||
|  |       cameraView.photo = true | ||||||
|  |       cameraView.video = true | ||||||
|  |       cameraView.audio = false | ||||||
|  |       cameraView.isActive = true | ||||||
|  |       cameraView.cameraId = getCameraDeviceId() as NSString? | ||||||
|  |       cameraView.didSetProps([]) | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   func isAuthorized(for mediaType: AVMediaType) async -> Bool { | ||||||
|  |     let status = AVCaptureDevice.authorizationStatus(for: mediaType) | ||||||
|  |     var isAuthorized = status == .authorized | ||||||
|  |     if status == .notDetermined { | ||||||
|  |       isAuthorized = await AVCaptureDevice.requestAccess(for: mediaType) | ||||||
|  |     } | ||||||
|  |     return isAuthorized | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |    | ||||||
|  |   func requestAuthorizations() async { | ||||||
|  |     guard await isAuthorized(for: .video) else { return } | ||||||
|  |     guard await isAuthorized(for: .audio) else { return } | ||||||
|  |     // Set up the capture session. | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   private func getCameraDeviceId() -> String? { | ||||||
|  |     let deviceTypes: [AVCaptureDevice.DeviceType] = [ | ||||||
|  |       .builtInWideAngleCamera | ||||||
|  |     ] | ||||||
|  |     let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: .video, position: .back) | ||||||
|  |      | ||||||
|  |     let device = discoverySession.devices.first | ||||||
|  |      | ||||||
|  |     return device?.uniqueID | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   @IBAction | ||||||
|  |   func toggleRecord(_ button: UIButton) { | ||||||
|  |     if button.title(for: .normal) == "Stop" { | ||||||
|  |        | ||||||
|  |       cameraView.stopRecording(promise: Promise( | ||||||
|  |         resolver: { result in | ||||||
|  |           print("result") | ||||||
|  |         }, rejecter: { code, message, cause in | ||||||
|  |           print("error") | ||||||
|  |         })) | ||||||
|  |        | ||||||
|  |       button.setTitle("Record", for: .normal) | ||||||
|  |       button.configuration = .filled() | ||||||
|  |        | ||||||
|  |     } else { | ||||||
|  |       cameraView.startRecording( | ||||||
|  |         options: [ | ||||||
|  |           "fileType": "mp4", | ||||||
|  |           "videoCodec": "h265", | ||||||
|  |         ], | ||||||
|  |         filePath: filePath) { callback in | ||||||
|  |           print("callback", callback) | ||||||
|  |         } | ||||||
|  |        | ||||||
|  |       button.setTitle("Stop", for: .normal) | ||||||
|  |       button.configuration = .bordered() | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) { | ||||||
|  |     switch UIDevice.current.orientation { | ||||||
|  |     case .landscapeLeft: | ||||||
|  |       cameraView.orientation = "landscape-right" | ||||||
|  |     case .landscapeRight: | ||||||
|  |       cameraView.orientation = "landscape-left" | ||||||
|  |     default: | ||||||
|  |       cameraView.orientation = "portrait" | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     cameraView.didSetProps([]) | ||||||
|  |     super.viewWillTransition(to: size, with: coordinator) | ||||||
|  |   } | ||||||
|  |    | ||||||
|  | } | ||||||
|  |  | ||||||
| @@ -10,7 +10,7 @@ import AVFoundation | |||||||
| import Foundation | import Foundation | ||||||
|  |  | ||||||
| struct RecordVideoOptions { | struct RecordVideoOptions { | ||||||
|   var fileType: AVFileType = .mov |   var fileType: AVFileType = .mp4 | ||||||
|   var flash: Torch = .off |   var flash: Torch = .off | ||||||
|   var codec: AVVideoCodecType? |   var codec: AVVideoCodecType? | ||||||
|   /** |   /** | ||||||
|   | |||||||
| @@ -7,6 +7,79 @@ | |||||||
| 	objects = { | 	objects = { | ||||||
|  |  | ||||||
| /* Begin PBXBuildFile section */ | /* Begin PBXBuildFile section */ | ||||||
|  | 		B31481772C46547B00084A26 /* CameraViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518125E0102000DB86D6 /* CameraViewManager.swift */; }; | ||||||
|  | 		B31481782C46558C00084A26 /* CameraView+TakePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */; }; | ||||||
|  | 		B31481792C46559700084A26 /* CameraView+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC52AD7F08E00169C0D /* CameraView+Focus.swift */; }; | ||||||
|  | 		B3AF8E862C410FB700CC198C /* ReactStubs.m in Sources */ = {isa = PBXBuildFile; fileRef = B3AF8E852C410FB700CC198C /* ReactStubs.m */; }; | ||||||
|  | 		B3AF8E882C41159300CC198C /* ChunkedRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */; }; | ||||||
|  | 		B3AF8E892C41159300CC198C /* ChunkedRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */; }; | ||||||
|  | 		B3EF9F0D2C3FBD8300832EE7 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F0C2C3FBD8300832EE7 /* AppDelegate.swift */; }; | ||||||
|  | 		B3EF9F0F2C3FBD8300832EE7 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F0E2C3FBD8300832EE7 /* SceneDelegate.swift */; }; | ||||||
|  | 		B3EF9F112C3FBD8300832EE7 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F102C3FBD8300832EE7 /* ViewController.swift */; }; | ||||||
|  | 		B3EF9F142C3FBD8300832EE7 /* Base in Resources */ = {isa = PBXBuildFile; fileRef = B3EF9F132C3FBD8300832EE7 /* Base */; }; | ||||||
|  | 		B3EF9F162C3FBD8400832EE7 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = B3EF9F152C3FBD8400832EE7 /* Assets.xcassets */; }; | ||||||
|  | 		B3EF9F192C3FBD8400832EE7 /* Base in Resources */ = {isa = PBXBuildFile; fileRef = B3EF9F182C3FBD8400832EE7 /* Base */; }; | ||||||
|  | 		B3EF9F1E2C3FBDCF00832EE7 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; }; | ||||||
|  | 		B3EF9F1F2C3FBDDC00832EE7 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; }; | ||||||
|  | 		B3EF9F212C3FBDFC00832EE7 /* ReactStubs.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F202C3FBDFC00832EE7 /* ReactStubs.swift */; }; | ||||||
|  | 		B3EF9F222C3FBE8200832EE7 /* CameraConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E62AD698DF00E93869 /* CameraConfiguration.swift */; }; | ||||||
|  | 		B3EF9F232C3FBE8B00832EE7 /* VideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882332AD969E000317161 /* VideoStabilizationMode.swift */; }; | ||||||
|  | 		B3EF9F242C3FBEBC00832EE7 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; }; | ||||||
|  | 		B3EF9F252C3FBED900832EE7 /* Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DE2AD6FB230087F063 /* Orientation.swift */; }; | ||||||
|  | 		B3EF9F262C3FBEEA00832EE7 /* CameraDeviceFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882312AD966FC00317161 /* CameraDeviceFormat.swift */; }; | ||||||
|  | 		B3EF9F272C3FBEF800832EE7 /* PixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87B11BE2A8E63B700732EBF /* PixelFormat.swift */; }; | ||||||
|  | 		B3EF9F282C3FBF1900832EE7 /* JSUnionValue.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882372AD96B4400317161 /* JSUnionValue.swift */; }; | ||||||
|  | 		B3EF9F292C3FBF2500832EE7 /* Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103E02AD7046E0087F063 /* Torch.swift */; }; | ||||||
|  | 		B3EF9F2A2C3FBF3400832EE7 /* CodeScannerOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60AD2ACC9731009D612F /* CodeScannerOptions.swift */; }; | ||||||
|  | 		B3EF9F2B2C3FBF4100832EE7 /* AVMetadataObject.ObjectType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60B02ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift */; }; | ||||||
|  | 		B3EF9F2C2C3FBF4A00832EE7 /* EnumParserError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517325E0102000DB86D6 /* EnumParserError.swift */; }; | ||||||
|  | 		B3EF9F2D2C3FBF9600832EE7 /* CameraSessionDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */; }; | ||||||
|  | 		B3EF9F2E2C3FBFA600832EE7 /* CameraSession+CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685EC2AD6A5E600E93869 /* CameraSession+CodeScanner.swift */; }; | ||||||
|  | 		B3EF9F2F2C3FBFB200832EE7 /* CameraSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E42AD68D9300E93869 /* CameraSession.swift */; }; | ||||||
|  | 		B3EF9F302C3FBFBB00832EE7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; }; | ||||||
|  | 		B3EF9F312C3FBFD500832EE7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; }; | ||||||
|  | 		B3EF9F322C3FBFF100832EE7 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; }; | ||||||
|  | 		B3EF9F332C3FC00900832EE7 /* CameraSession+Configuration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC72AD8005400169C0D /* CameraSession+Configuration.swift */; }; | ||||||
|  | 		B3EF9F362C3FC05600832EE7 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; }; | ||||||
|  | 		B3EF9F372C3FC0CA00832EE7 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; }; | ||||||
|  | 		B3EF9F382C3FC0D900832EE7 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; }; | ||||||
|  | 		B3EF9F3A2C3FC2EB00832EE7 /* AutoFocusSystem.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882352AD96AFF00317161 /* AutoFocusSystem.swift */; }; | ||||||
|  | 		B3EF9F3C2C3FC30D00832EE7 /* AVCaptureDevice.Position+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */; }; | ||||||
|  | 		B3EF9F4A2C3FC31E00832EE7 /* AVFrameRateRange+includes.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */; }; | ||||||
|  | 		B3EF9F4B2C3FC31E00832EE7 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; }; | ||||||
|  | 		B3EF9F4C2C3FC31E00832EE7 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; }; | ||||||
|  | 		B3EF9F4D2C3FC31E00832EE7 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */; }; | ||||||
|  | 		B3EF9F4E2C3FC31E00832EE7 /* AVCaptureOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCaptureOutput+mirror.swift */; }; | ||||||
|  | 		B3EF9F4F2C3FC31E00832EE7 /* Collection+safe.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516225E0102000DB86D6 /* Collection+safe.swift */; }; | ||||||
|  | 		B3EF9F502C3FC31E00832EE7 /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8207AAE2B0E67460002990F /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift */; }; | ||||||
|  | 		B3EF9F512C3FC31E00832EE7 /* AVCaptureDevice+minFocusDistance.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88977BD2B556DBA0095C92C /* AVCaptureDevice+minFocusDistance.swift */; }; | ||||||
|  | 		B3EF9F522C3FC31E00832EE7 /* AVCaptureDevice+physicalDevices.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */; }; | ||||||
|  | 		B3EF9F532C3FC31E00832EE7 /* AVCaptureDevice+neutralZoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */; }; | ||||||
|  | 		B3EF9F542C3FC31E00832EE7 /* AVCaptureDevice.Format+dimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift */; }; | ||||||
|  | 		B3EF9F552C3FC31E00832EE7 /* AVCaptureVideoDataOutput+pixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC32AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift */; }; | ||||||
|  | 		B3EF9F562C3FC31E00832EE7 /* AVCaptureSession+synchronizeBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8207AAC2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift */; }; | ||||||
|  | 		B3EF9F572C3FC31E00832EE7 /* AVCaptureDevice+isMultiCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */; }; | ||||||
|  | 		B3EF9F582C3FC31E00832EE7 /* AVCaptureDevice+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */; }; | ||||||
|  | 		B3EF9F592C3FC31E00832EE7 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; }; | ||||||
|  | 		B3EF9F5A2C3FC31E00832EE7 /* CMVideoDimensions+toCGSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */; }; | ||||||
|  | 		B3EF9F5B2C3FC33000832EE7 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift */; }; | ||||||
|  | 		B3EF9F5C2C3FC33E00832EE7 /* RecordVideoOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC92AD8034E00169C0D /* RecordVideoOptions.swift */; }; | ||||||
|  | 		B3EF9F5D2C3FC34600832EE7 /* Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AECB2AD803B200169C0D /* Video.swift */; }; | ||||||
|  | 		B3EF9F5E2C3FC43000832EE7 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */; }; | ||||||
|  | 		B3EF9F5F2C3FC43000832EE7 /* AVAuthorizationStatus+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */; }; | ||||||
|  | 		B3EF9F602C3FC43000832EE7 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */; }; | ||||||
|  | 		B3EF9F612C3FC43000832EE7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; }; | ||||||
|  | 		B3EF9F622C3FC43000832EE7 /* AVVideoCodecType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */; }; | ||||||
|  | 		B3EF9F632C3FC43000832EE7 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */; }; | ||||||
|  | 		B3EF9F642C3FC43000832EE7 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */; }; | ||||||
|  | 		B3EF9F652C3FC43C00832EE7 /* CameraSession+Audio.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DA2AD6F0A00087F063 /* CameraSession+Audio.swift */; }; | ||||||
|  | 		B3EF9F662C3FC44B00832EE7 /* CameraSession+Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E82AD6A5D600E93869 /* CameraSession+Video.swift */; }; | ||||||
|  | 		B3EF9F672C3FC44B00832EE7 /* CameraSession+Photo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685EA2AD6A5DE00E93869 /* CameraSession+Photo.swift */; }; | ||||||
|  | 		B3EF9F682C3FC44B00832EE7 /* CameraSession+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DC2AD6F62C0087F063 /* CameraSession+Focus.swift */; }; | ||||||
|  | 		B3EF9F692C3FC44B00832EE7 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; }; | ||||||
|  | 		B3EF9F6A2C3FC46900832EE7 /* Promise.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517025E0102000DB86D6 /* Promise.swift */; }; | ||||||
|  | 		B3EF9F6B2C3FD35600832EE7 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; }; | ||||||
|  | 		B3EF9F6C2C3FD36800832EE7 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; }; | ||||||
| 		B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; }; | 		B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; }; | ||||||
| 		B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; }; | 		B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; }; | ||||||
| 		B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; }; | 		B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; }; | ||||||
| @@ -94,6 +167,19 @@ | |||||||
|  |  | ||||||
| /* Begin PBXFileReference section */ | /* Begin PBXFileReference section */ | ||||||
| 		134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; }; | 		134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; }; | ||||||
|  | 		B3AF8E832C410FB600CC198C /* TestRecorder-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "TestRecorder-Bridging-Header.h"; sourceTree = "<group>"; }; | ||||||
|  | 		B3AF8E842C410FB700CC198C /* ReactStubs.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ReactStubs.h; sourceTree = "<group>"; }; | ||||||
|  | 		B3AF8E852C410FB700CC198C /* ReactStubs.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ReactStubs.m; sourceTree = "<group>"; }; | ||||||
|  | 		B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChunkedRecorder.swift; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F0A2C3FBD8300832EE7 /* TestRecorder.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TestRecorder.app; sourceTree = BUILT_PRODUCTS_DIR; }; | ||||||
|  | 		B3EF9F0C2C3FBD8300832EE7 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F0E2C3FBD8300832EE7 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F102C3FBD8300832EE7 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F132C3FBD8300832EE7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F152C3FBD8400832EE7 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F182C3FBD8400832EE7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F1A2C3FBD8400832EE7 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; }; | ||||||
|  | 		B3EF9F202C3FBDFC00832EE7 /* ReactStubs.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ReactStubs.swift; sourceTree = "<group>"; }; | ||||||
| 		B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ResizeMode.swift; sourceTree = "<group>"; }; | 		B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ResizeMode.swift; sourceTree = "<group>"; }; | ||||||
| 		B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; }; | 		B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; }; | ||||||
| 		B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; }; | 		B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; }; | ||||||
| @@ -191,6 +277,13 @@ | |||||||
| 			); | 			); | ||||||
| 			runOnlyForDeploymentPostprocessing = 0; | 			runOnlyForDeploymentPostprocessing = 0; | ||||||
| 		}; | 		}; | ||||||
|  | 		B3EF9F072C3FBD8300832EE7 /* Frameworks */ = { | ||||||
|  | 			isa = PBXFrameworksBuildPhase; | ||||||
|  | 			buildActionMask = 2147483647; | ||||||
|  | 			files = ( | ||||||
|  | 			); | ||||||
|  | 			runOnlyForDeploymentPostprocessing = 0; | ||||||
|  | 		}; | ||||||
| /* End PBXFrameworksBuildPhase section */ | /* End PBXFrameworksBuildPhase section */ | ||||||
|  |  | ||||||
| /* Begin PBXGroup section */ | /* Begin PBXGroup section */ | ||||||
| @@ -221,10 +314,30 @@ | |||||||
| 				B887516125E0102000DB86D6 /* Extensions */, | 				B887516125E0102000DB86D6 /* Extensions */, | ||||||
| 				B887517225E0102000DB86D6 /* Parsers */, | 				B887517225E0102000DB86D6 /* Parsers */, | ||||||
| 				B887516D25E0102000DB86D6 /* React Utils */, | 				B887516D25E0102000DB86D6 /* React Utils */, | ||||||
|  | 				B3EF9F0B2C3FBD8300832EE7 /* TestRecorder */, | ||||||
| 				134814211AA4EA7D00B7C361 /* Products */, | 				134814211AA4EA7D00B7C361 /* Products */, | ||||||
|  | 				B3EF9F0A2C3FBD8300832EE7 /* TestRecorder.app */, | ||||||
| 			); | 			); | ||||||
| 			sourceTree = "<group>"; | 			sourceTree = "<group>"; | ||||||
| 		}; | 		}; | ||||||
|  | 		B3EF9F0B2C3FBD8300832EE7 /* TestRecorder */ = { | ||||||
|  | 			isa = PBXGroup; | ||||||
|  | 			children = ( | ||||||
|  | 				B3EF9F0C2C3FBD8300832EE7 /* AppDelegate.swift */, | ||||||
|  | 				B3EF9F0E2C3FBD8300832EE7 /* SceneDelegate.swift */, | ||||||
|  | 				B3EF9F102C3FBD8300832EE7 /* ViewController.swift */, | ||||||
|  | 				B3EF9F202C3FBDFC00832EE7 /* ReactStubs.swift */, | ||||||
|  | 				B3AF8E842C410FB700CC198C /* ReactStubs.h */, | ||||||
|  | 				B3AF8E852C410FB700CC198C /* ReactStubs.m */, | ||||||
|  | 				B3AF8E832C410FB600CC198C /* TestRecorder-Bridging-Header.h */, | ||||||
|  | 				B3EF9F122C3FBD8300832EE7 /* Main.storyboard */, | ||||||
|  | 				B3EF9F152C3FBD8400832EE7 /* Assets.xcassets */, | ||||||
|  | 				B3EF9F172C3FBD8400832EE7 /* LaunchScreen.storyboard */, | ||||||
|  | 				B3EF9F1A2C3FBD8400832EE7 /* Info.plist */, | ||||||
|  | 			); | ||||||
|  | 			path = TestRecorder; | ||||||
|  | 			sourceTree = "<group>"; | ||||||
|  | 		}; | ||||||
| 		B80175EA2ABDEBBB00E7DE90 /* Types */ = { | 		B80175EA2ABDEBBB00E7DE90 /* Types */ = { | ||||||
| 			isa = PBXGroup; | 			isa = PBXGroup; | ||||||
| 			children = ( | 			children = ( | ||||||
| @@ -257,6 +370,7 @@ | |||||||
| 				B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */, | 				B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */, | ||||||
| 				B83D5EE629377117000AFD2F /* PreviewView.swift */, | 				B83D5EE629377117000AFD2F /* PreviewView.swift */, | ||||||
| 				B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */, | 				B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */, | ||||||
|  | 				B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */, | ||||||
| 				B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */, | 				B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */, | ||||||
| 				B84760DE2608F57D004C3180 /* CameraQueues.swift */, | 				B84760DE2608F57D004C3180 /* CameraQueues.swift */, | ||||||
| 				B887518325E0102000DB86D6 /* CameraError.swift */, | 				B887518325E0102000DB86D6 /* CameraError.swift */, | ||||||
| @@ -366,18 +480,42 @@ | |||||||
| 			productReference = 134814201AA4EA6300B7C361 /* libVisionCamera.a */; | 			productReference = 134814201AA4EA6300B7C361 /* libVisionCamera.a */; | ||||||
| 			productType = "com.apple.product-type.library.static"; | 			productType = "com.apple.product-type.library.static"; | ||||||
| 		}; | 		}; | ||||||
|  | 		B3EF9F092C3FBD8300832EE7 /* TestRecorder */ = { | ||||||
|  | 			isa = PBXNativeTarget; | ||||||
|  | 			buildConfigurationList = B3EF9F1D2C3FBD8400832EE7 /* Build configuration list for PBXNativeTarget "TestRecorder" */; | ||||||
|  | 			buildPhases = ( | ||||||
|  | 				B3EF9F062C3FBD8300832EE7 /* Sources */, | ||||||
|  | 				B3EF9F072C3FBD8300832EE7 /* Frameworks */, | ||||||
|  | 				B3EF9F082C3FBD8300832EE7 /* Resources */, | ||||||
|  | 			); | ||||||
|  | 			buildRules = ( | ||||||
|  | 			); | ||||||
|  | 			dependencies = ( | ||||||
|  | 			); | ||||||
|  | 			name = TestRecorder; | ||||||
|  | 			productName = TestRecorder; | ||||||
|  | 			productReference = B3EF9F0A2C3FBD8300832EE7 /* TestRecorder.app */; | ||||||
|  | 			productType = "com.apple.product-type.application"; | ||||||
|  | 		}; | ||||||
| /* End PBXNativeTarget section */ | /* End PBXNativeTarget section */ | ||||||
|  |  | ||||||
| /* Begin PBXProject section */ | /* Begin PBXProject section */ | ||||||
| 		58B511D31A9E6C8500147676 /* Project object */ = { | 		58B511D31A9E6C8500147676 /* Project object */ = { | ||||||
| 			isa = PBXProject; | 			isa = PBXProject; | ||||||
| 			attributes = { | 			attributes = { | ||||||
|  | 				LastSwiftUpdateCheck = 1540; | ||||||
| 				LastUpgradeCheck = 1240; | 				LastUpgradeCheck = 1240; | ||||||
| 				ORGANIZATIONNAME = mrousavy; | 				ORGANIZATIONNAME = mrousavy; | ||||||
| 				TargetAttributes = { | 				TargetAttributes = { | ||||||
| 					58B511DA1A9E6C8500147676 = { | 					58B511DA1A9E6C8500147676 = { | ||||||
| 						CreatedOnToolsVersion = 6.1.1; | 						CreatedOnToolsVersion = 6.1.1; | ||||||
| 					}; | 					}; | ||||||
|  | 					B3EF9F092C3FBD8300832EE7 = { | ||||||
|  | 						CreatedOnToolsVersion = 15.4; | ||||||
|  | 						DevelopmentTeam = HP3AMBWJGS; | ||||||
|  | 						LastSwiftMigration = 1540; | ||||||
|  | 						ProvisioningStyle = Automatic; | ||||||
|  | 					}; | ||||||
| 				}; | 				}; | ||||||
| 			}; | 			}; | ||||||
| 			buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "VisionCamera" */; | 			buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "VisionCamera" */; | ||||||
| @@ -387,6 +525,7 @@ | |||||||
| 			knownRegions = ( | 			knownRegions = ( | ||||||
| 				English, | 				English, | ||||||
| 				en, | 				en, | ||||||
|  | 				Base, | ||||||
| 			); | 			); | ||||||
| 			mainGroup = 58B511D21A9E6C8500147676; | 			mainGroup = 58B511D21A9E6C8500147676; | ||||||
| 			productRefGroup = 58B511D21A9E6C8500147676; | 			productRefGroup = 58B511D21A9E6C8500147676; | ||||||
| @@ -394,10 +533,24 @@ | |||||||
| 			projectRoot = ""; | 			projectRoot = ""; | ||||||
| 			targets = ( | 			targets = ( | ||||||
| 				58B511DA1A9E6C8500147676 /* VisionCamera */, | 				58B511DA1A9E6C8500147676 /* VisionCamera */, | ||||||
|  | 				B3EF9F092C3FBD8300832EE7 /* TestRecorder */, | ||||||
| 			); | 			); | ||||||
| 		}; | 		}; | ||||||
| /* End PBXProject section */ | /* End PBXProject section */ | ||||||
|  |  | ||||||
|  | /* Begin PBXResourcesBuildPhase section */ | ||||||
|  | 		B3EF9F082C3FBD8300832EE7 /* Resources */ = { | ||||||
|  | 			isa = PBXResourcesBuildPhase; | ||||||
|  | 			buildActionMask = 2147483647; | ||||||
|  | 			files = ( | ||||||
|  | 				B3EF9F162C3FBD8400832EE7 /* Assets.xcassets in Resources */, | ||||||
|  | 				B3EF9F192C3FBD8400832EE7 /* Base in Resources */, | ||||||
|  | 				B3EF9F142C3FBD8300832EE7 /* Base in Resources */, | ||||||
|  | 			); | ||||||
|  | 			runOnlyForDeploymentPostprocessing = 0; | ||||||
|  | 		}; | ||||||
|  | /* End PBXResourcesBuildPhase section */ | ||||||
|  |  | ||||||
| /* Begin PBXShellScriptBuildPhase section */ | /* Begin PBXShellScriptBuildPhase section */ | ||||||
| 		B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = { | 		B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = { | ||||||
| 			isa = PBXShellScriptBuildPhase; | 			isa = PBXShellScriptBuildPhase; | ||||||
| @@ -490,6 +643,7 @@ | |||||||
| 				B88977BE2B556DBA0095C92C /* AVCaptureDevice+minFocusDistance.swift in Sources */, | 				B88977BE2B556DBA0095C92C /* AVCaptureDevice+minFocusDistance.swift in Sources */, | ||||||
| 				B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */, | 				B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */, | ||||||
| 				B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */, | 				B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */, | ||||||
|  | 				B3AF8E882C41159300CC198C /* ChunkedRecorder.swift in Sources */, | ||||||
| 				B88685ED2AD6A5E600E93869 /* CameraSession+CodeScanner.swift in Sources */, | 				B88685ED2AD6A5E600E93869 /* CameraSession+CodeScanner.swift in Sources */, | ||||||
| 				B8207AAD2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift in Sources */, | 				B8207AAD2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift in Sources */, | ||||||
| 				B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */, | 				B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */, | ||||||
| @@ -516,8 +670,103 @@ | |||||||
| 			); | 			); | ||||||
| 			runOnlyForDeploymentPostprocessing = 0; | 			runOnlyForDeploymentPostprocessing = 0; | ||||||
| 		}; | 		}; | ||||||
|  | 		B3EF9F062C3FBD8300832EE7 /* Sources */ = { | ||||||
|  | 			isa = PBXSourcesBuildPhase; | ||||||
|  | 			buildActionMask = 2147483647; | ||||||
|  | 			files = ( | ||||||
|  | 				B3EF9F372C3FC0CA00832EE7 /* CameraView+Zoom.swift in Sources */, | ||||||
|  | 				B3EF9F232C3FBE8B00832EE7 /* VideoStabilizationMode.swift in Sources */, | ||||||
|  | 				B3EF9F4A2C3FC31E00832EE7 /* AVFrameRateRange+includes.swift in Sources */, | ||||||
|  | 				B3EF9F6A2C3FC46900832EE7 /* Promise.swift in Sources */, | ||||||
|  | 				B3EF9F4B2C3FC31E00832EE7 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */, | ||||||
|  | 				B3EF9F5E2C3FC43000832EE7 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */, | ||||||
|  | 				B3AF8E892C41159300CC198C /* ChunkedRecorder.swift in Sources */, | ||||||
|  | 				B3EF9F5F2C3FC43000832EE7 /* AVAuthorizationStatus+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F602C3FC43000832EE7 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F612C3FC43000832EE7 /* AVFileType+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F622C3FC43000832EE7 /* AVVideoCodecType+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F632C3FC43000832EE7 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F642C3FC43000832EE7 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F4C2C3FC31E00832EE7 /* AVAudioSession+updateCategory.swift in Sources */, | ||||||
|  | 				B3EF9F4D2C3FC31E00832EE7 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */, | ||||||
|  | 				B3EF9F4E2C3FC31E00832EE7 /* AVCaptureOutput+mirror.swift in Sources */, | ||||||
|  | 				B3EF9F4F2C3FC31E00832EE7 /* Collection+safe.swift in Sources */, | ||||||
|  | 				B3EF9F502C3FC31E00832EE7 /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift in Sources */, | ||||||
|  | 				B3EF9F512C3FC31E00832EE7 /* AVCaptureDevice+minFocusDistance.swift in Sources */, | ||||||
|  | 				B3EF9F5B2C3FC33000832EE7 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */, | ||||||
|  | 				B31481792C46559700084A26 /* CameraView+Focus.swift in Sources */, | ||||||
|  | 				B31481772C46547B00084A26 /* CameraViewManager.swift in Sources */, | ||||||
|  | 				B3EF9F522C3FC31E00832EE7 /* AVCaptureDevice+physicalDevices.swift in Sources */, | ||||||
|  | 				B3EF9F532C3FC31E00832EE7 /* AVCaptureDevice+neutralZoom.swift in Sources */, | ||||||
|  | 				B3EF9F542C3FC31E00832EE7 /* AVCaptureDevice.Format+dimensions.swift in Sources */, | ||||||
|  | 				B3EF9F552C3FC31E00832EE7 /* AVCaptureVideoDataOutput+pixelFormat.swift in Sources */, | ||||||
|  | 				B3EF9F562C3FC31E00832EE7 /* AVCaptureSession+synchronizeBuffer.swift in Sources */, | ||||||
|  | 				B3EF9F572C3FC31E00832EE7 /* AVCaptureDevice+isMultiCam.swift in Sources */, | ||||||
|  | 				B3EF9F582C3FC31E00832EE7 /* AVCaptureDevice+toDictionary.swift in Sources */, | ||||||
|  | 				B3EF9F592C3FC31E00832EE7 /* AVCaptureDevice.Format+toDictionary.swift in Sources */, | ||||||
|  | 				B3EF9F5A2C3FC31E00832EE7 /* CMVideoDimensions+toCGSize.swift in Sources */, | ||||||
|  | 				B3EF9F212C3FBDFC00832EE7 /* ReactStubs.swift in Sources */, | ||||||
|  | 				B3EF9F5C2C3FC33E00832EE7 /* RecordVideoOptions.swift in Sources */, | ||||||
|  | 				B3EF9F6B2C3FD35600832EE7 /* CameraView+RecordVideo.swift in Sources */, | ||||||
|  | 				B3EF9F222C3FBE8200832EE7 /* CameraConfiguration.swift in Sources */, | ||||||
|  | 				B3EF9F282C3FBF1900832EE7 /* JSUnionValue.swift in Sources */, | ||||||
|  | 				B3EF9F332C3FC00900832EE7 /* CameraSession+Configuration.swift in Sources */, | ||||||
|  | 				B3EF9F362C3FC05600832EE7 /* ResizeMode.swift in Sources */, | ||||||
|  | 				B3EF9F312C3FBFD500832EE7 /* AVAssetWriter.Status+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F292C3FBF2500832EE7 /* Torch.swift in Sources */, | ||||||
|  | 				B31481782C46558C00084A26 /* CameraView+TakePhoto.swift in Sources */, | ||||||
|  | 				B3EF9F2C2C3FBF4A00832EE7 /* EnumParserError.swift in Sources */, | ||||||
|  | 				B3EF9F272C3FBEF800832EE7 /* PixelFormat.swift in Sources */, | ||||||
|  | 				B3EF9F652C3FC43C00832EE7 /* CameraSession+Audio.swift in Sources */, | ||||||
|  | 				B3EF9F382C3FC0D900832EE7 /* PreviewView.swift in Sources */, | ||||||
|  | 				B3EF9F3A2C3FC2EB00832EE7 /* AutoFocusSystem.swift in Sources */, | ||||||
|  | 				B3EF9F112C3FBD8300832EE7 /* ViewController.swift in Sources */, | ||||||
|  | 				B3EF9F5D2C3FC34600832EE7 /* Video.swift in Sources */, | ||||||
|  | 				B3EF9F2B2C3FBF4100832EE7 /* AVMetadataObject.ObjectType+descriptor.swift in Sources */, | ||||||
|  | 				B3AF8E862C410FB700CC198C /* ReactStubs.m in Sources */, | ||||||
|  | 				B3EF9F0D2C3FBD8300832EE7 /* AppDelegate.swift in Sources */, | ||||||
|  | 				B3EF9F2D2C3FBF9600832EE7 /* CameraSessionDelegate.swift in Sources */, | ||||||
|  | 				B3EF9F262C3FBEEA00832EE7 /* CameraDeviceFormat.swift in Sources */, | ||||||
|  | 				B3EF9F242C3FBEBC00832EE7 /* CameraError.swift in Sources */, | ||||||
|  | 				B3EF9F2E2C3FBFA600832EE7 /* CameraSession+CodeScanner.swift in Sources */, | ||||||
|  | 				B3EF9F252C3FBED900832EE7 /* Orientation.swift in Sources */, | ||||||
|  | 				B3EF9F662C3FC44B00832EE7 /* CameraSession+Video.swift in Sources */, | ||||||
|  | 				B3EF9F672C3FC44B00832EE7 /* CameraSession+Photo.swift in Sources */, | ||||||
|  | 				B3EF9F682C3FC44B00832EE7 /* CameraSession+Focus.swift in Sources */, | ||||||
|  | 				B3EF9F6C2C3FD36800832EE7 /* Callback.swift in Sources */, | ||||||
|  | 				B3EF9F692C3FC44B00832EE7 /* PhotoCaptureDelegate.swift in Sources */, | ||||||
|  | 				B3EF9F302C3FBFBB00832EE7 /* RecordingSession.swift in Sources */, | ||||||
|  | 				B3EF9F322C3FBFF100832EE7 /* CameraQueues.swift in Sources */, | ||||||
|  | 				B3EF9F2F2C3FBFB200832EE7 /* CameraSession.swift in Sources */, | ||||||
|  | 				B3EF9F2A2C3FBF3400832EE7 /* CodeScannerOptions.swift in Sources */, | ||||||
|  | 				B3EF9F0F2C3FBD8300832EE7 /* SceneDelegate.swift in Sources */, | ||||||
|  | 				B3EF9F1E2C3FBDCF00832EE7 /* CameraView.swift in Sources */, | ||||||
|  | 				B3EF9F3C2C3FC30D00832EE7 /* AVCaptureDevice.Position+descriptor.swift in Sources */, | ||||||
|  | 				B3EF9F1F2C3FBDDC00832EE7 /* ReactLogger.swift in Sources */, | ||||||
|  | 			); | ||||||
|  | 			runOnlyForDeploymentPostprocessing = 0; | ||||||
|  | 		}; | ||||||
| /* End PBXSourcesBuildPhase section */ | /* End PBXSourcesBuildPhase section */ | ||||||
|  |  | ||||||
|  | /* Begin PBXVariantGroup section */ | ||||||
|  | 		B3EF9F122C3FBD8300832EE7 /* Main.storyboard */ = { | ||||||
|  | 			isa = PBXVariantGroup; | ||||||
|  | 			children = ( | ||||||
|  | 				B3EF9F132C3FBD8300832EE7 /* Base */, | ||||||
|  | 			); | ||||||
|  | 			name = Main.storyboard; | ||||||
|  | 			sourceTree = "<group>"; | ||||||
|  | 		}; | ||||||
|  | 		B3EF9F172C3FBD8400832EE7 /* LaunchScreen.storyboard */ = { | ||||||
|  | 			isa = PBXVariantGroup; | ||||||
|  | 			children = ( | ||||||
|  | 				B3EF9F182C3FBD8400832EE7 /* Base */, | ||||||
|  | 			); | ||||||
|  | 			name = LaunchScreen.storyboard; | ||||||
|  | 			sourceTree = "<group>"; | ||||||
|  | 		}; | ||||||
|  | /* End PBXVariantGroup section */ | ||||||
|  |  | ||||||
| /* Begin XCBuildConfiguration section */ | /* Begin XCBuildConfiguration section */ | ||||||
| 		58B511ED1A9E6C8500147676 /* Debug */ = { | 		58B511ED1A9E6C8500147676 /* Debug */ = { | ||||||
| 			isa = XCBuildConfiguration; | 			isa = XCBuildConfiguration; | ||||||
| @@ -660,6 +909,94 @@ | |||||||
| 			}; | 			}; | ||||||
| 			name = Release; | 			name = Release; | ||||||
| 		}; | 		}; | ||||||
|  | 		B3EF9F1B2C3FBD8400832EE7 /* Debug */ = { | ||||||
|  | 			isa = XCBuildConfiguration; | ||||||
|  | 			buildSettings = { | ||||||
|  | 				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; | ||||||
|  | 				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; | ||||||
|  | 				ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; | ||||||
|  | 				CLANG_ANALYZER_NONNULL = YES; | ||||||
|  | 				CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; | ||||||
|  | 				CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; | ||||||
|  | 				CLANG_ENABLE_MODULES = YES; | ||||||
|  | 				CLANG_ENABLE_OBJC_WEAK = YES; | ||||||
|  | 				CLANG_WARN_DOCUMENTATION_COMMENTS = YES; | ||||||
|  | 				CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; | ||||||
|  | 				CODE_SIGN_STYLE = Automatic; | ||||||
|  | 				CURRENT_PROJECT_VERSION = 1; | ||||||
|  | 				DEBUG_INFORMATION_FORMAT = dwarf; | ||||||
|  | 				DEVELOPMENT_TEAM = HP3AMBWJGS; | ||||||
|  | 				ENABLE_USER_SCRIPT_SANDBOXING = YES; | ||||||
|  | 				GCC_C_LANGUAGE_STANDARD = gnu17; | ||||||
|  | 				GENERATE_INFOPLIST_FILE = YES; | ||||||
|  | 				INFOPLIST_FILE = TestRecorder/Info.plist; | ||||||
|  | 				INFOPLIST_KEY_NSCameraUsageDescription = "Record form camera"; | ||||||
|  | 				INFOPLIST_KEY_NSMicrophoneUsageDescription = "Record from microphone"; | ||||||
|  | 				INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; | ||||||
|  | 				INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; | ||||||
|  | 				INFOPLIST_KEY_UIMainStoryboardFile = Main; | ||||||
|  | 				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; | ||||||
|  | 				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; | ||||||
|  | 				IPHONEOS_DEPLOYMENT_TARGET = 16.0; | ||||||
|  | 				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; | ||||||
|  | 				LOCALIZATION_PREFERS_STRING_CATALOGS = YES; | ||||||
|  | 				MARKETING_VERSION = 1.0; | ||||||
|  | 				MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; | ||||||
|  | 				MTL_FAST_MATH = YES; | ||||||
|  | 				PRODUCT_BUNDLE_IDENTIFIER = camera.TestRecorder; | ||||||
|  | 				PRODUCT_NAME = "$(TARGET_NAME)"; | ||||||
|  | 				SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; | ||||||
|  | 				SWIFT_EMIT_LOC_STRINGS = YES; | ||||||
|  | 				SWIFT_OBJC_BRIDGING_HEADER = "TestRecorder/TestRecorder-Bridging-Header.h"; | ||||||
|  | 				SWIFT_OPTIMIZATION_LEVEL = "-Onone"; | ||||||
|  | 				SWIFT_VERSION = 5.0; | ||||||
|  | 				TARGETED_DEVICE_FAMILY = "1,2"; | ||||||
|  | 			}; | ||||||
|  | 			name = Debug; | ||||||
|  | 		}; | ||||||
|  | 		B3EF9F1C2C3FBD8400832EE7 /* Release */ = { | ||||||
|  | 			isa = XCBuildConfiguration; | ||||||
|  | 			buildSettings = { | ||||||
|  | 				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; | ||||||
|  | 				ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; | ||||||
|  | 				ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; | ||||||
|  | 				CLANG_ANALYZER_NONNULL = YES; | ||||||
|  | 				CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; | ||||||
|  | 				CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; | ||||||
|  | 				CLANG_ENABLE_MODULES = YES; | ||||||
|  | 				CLANG_ENABLE_OBJC_WEAK = YES; | ||||||
|  | 				CLANG_WARN_DOCUMENTATION_COMMENTS = YES; | ||||||
|  | 				CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; | ||||||
|  | 				CODE_SIGN_STYLE = Automatic; | ||||||
|  | 				COPY_PHASE_STRIP = NO; | ||||||
|  | 				CURRENT_PROJECT_VERSION = 1; | ||||||
|  | 				DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; | ||||||
|  | 				DEVELOPMENT_TEAM = HP3AMBWJGS; | ||||||
|  | 				ENABLE_USER_SCRIPT_SANDBOXING = YES; | ||||||
|  | 				GCC_C_LANGUAGE_STANDARD = gnu17; | ||||||
|  | 				GENERATE_INFOPLIST_FILE = YES; | ||||||
|  | 				INFOPLIST_FILE = TestRecorder/Info.plist; | ||||||
|  | 				INFOPLIST_KEY_NSCameraUsageDescription = "Record form camera"; | ||||||
|  | 				INFOPLIST_KEY_NSMicrophoneUsageDescription = "Record from microphone"; | ||||||
|  | 				INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; | ||||||
|  | 				INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; | ||||||
|  | 				INFOPLIST_KEY_UIMainStoryboardFile = Main; | ||||||
|  | 				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; | ||||||
|  | 				INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; | ||||||
|  | 				IPHONEOS_DEPLOYMENT_TARGET = 16.0; | ||||||
|  | 				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; | ||||||
|  | 				LOCALIZATION_PREFERS_STRING_CATALOGS = YES; | ||||||
|  | 				MARKETING_VERSION = 1.0; | ||||||
|  | 				MTL_FAST_MATH = YES; | ||||||
|  | 				PRODUCT_BUNDLE_IDENTIFIER = camera.TestRecorder; | ||||||
|  | 				PRODUCT_NAME = "$(TARGET_NAME)"; | ||||||
|  | 				SWIFT_EMIT_LOC_STRINGS = YES; | ||||||
|  | 				SWIFT_OBJC_BRIDGING_HEADER = "TestRecorder/TestRecorder-Bridging-Header.h"; | ||||||
|  | 				SWIFT_VERSION = 5.0; | ||||||
|  | 				TARGETED_DEVICE_FAMILY = "1,2"; | ||||||
|  | 			}; | ||||||
|  | 			name = Release; | ||||||
|  | 		}; | ||||||
| /* End XCBuildConfiguration section */ | /* End XCBuildConfiguration section */ | ||||||
|  |  | ||||||
| /* Begin XCConfigurationList section */ | /* Begin XCConfigurationList section */ | ||||||
| @@ -681,6 +1018,15 @@ | |||||||
| 			defaultConfigurationIsVisible = 0; | 			defaultConfigurationIsVisible = 0; | ||||||
| 			defaultConfigurationName = Release; | 			defaultConfigurationName = Release; | ||||||
| 		}; | 		}; | ||||||
|  | 		B3EF9F1D2C3FBD8400832EE7 /* Build configuration list for PBXNativeTarget "TestRecorder" */ = { | ||||||
|  | 			isa = XCConfigurationList; | ||||||
|  | 			buildConfigurations = ( | ||||||
|  | 				B3EF9F1B2C3FBD8400832EE7 /* Debug */, | ||||||
|  | 				B3EF9F1C2C3FBD8400832EE7 /* Release */, | ||||||
|  | 			); | ||||||
|  | 			defaultConfigurationIsVisible = 0; | ||||||
|  | 			defaultConfigurationName = Release; | ||||||
|  | 		}; | ||||||
| /* End XCConfigurationList section */ | /* End XCConfigurationList section */ | ||||||
| 	}; | 	}; | ||||||
| 	rootObject = 58B511D31A9E6C8500147676 /* Project object */; | 	rootObject = 58B511D31A9E6C8500147676 /* Project object */; | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "name": "react-native-vision-camera", |   "name": "react-native-vision-camera", | ||||||
|   "version": "3.8.2", |   "version": "3.9.2", | ||||||
|   "description": "A powerful, high-performance React Native Camera library.", |   "description": "A powerful, high-performance React Native Camera library.", | ||||||
|   "main": "lib/commonjs/index", |   "main": "lib/commonjs/index", | ||||||
|   "module": "lib/module/index", |   "module": "lib/module/index", | ||||||
| @@ -49,26 +49,33 @@ | |||||||
|     "postpack": "rm ./README.md" |     "postpack": "rm ./README.md" | ||||||
|   }, |   }, | ||||||
|   "keywords": [ |   "keywords": [ | ||||||
|     "react-native", |  | ||||||
|     "ios", |  | ||||||
|     "android", |  | ||||||
|     "camera", |  | ||||||
|     "vision", |  | ||||||
|     "native", |  | ||||||
|     "module", |  | ||||||
|     "react", |     "react", | ||||||
|  |     "native", | ||||||
|  |     "camera", | ||||||
|  |     "react-native", | ||||||
|  |     "react-native-camera", | ||||||
|  |     "vision", | ||||||
|  |     "javascript", | ||||||
|  |     "typescript", | ||||||
|  |     "android", | ||||||
|  |     "ios", | ||||||
|  |     "library", | ||||||
|  |     "instagram", | ||||||
|  |     "snapchat", | ||||||
|     "ai", |     "ai", | ||||||
|     "ar", |  | ||||||
|     "qr", |  | ||||||
|     "qr-code", |  | ||||||
|     "barcode", |  | ||||||
|     "scanner", |     "scanner", | ||||||
|  |     "qrcode", | ||||||
|  |     "barcode", | ||||||
|  |     "qr-code", | ||||||
|  |     "jsi", | ||||||
|  |     "worklet", | ||||||
|  |     "module", | ||||||
|     "frame", |     "frame", | ||||||
|     "processing", |     "processing", | ||||||
|     "realtime" |     "realtime" | ||||||
|   ], |   ], | ||||||
|   "repository": "https://github.com/mrousavy/react-native-vision-camera", |   "repository": "https://github.com/mrousavy/react-native-vision-camera", | ||||||
|   "author": "Marc Rousavy <marcrousavy@hotmail.com> (https://github.com/mrousavy)", |   "author": "Marc Rousavy <me@mrousavy.com> (https://github.com/mrousavy)", | ||||||
|   "license": "MIT", |   "license": "MIT", | ||||||
|   "bugs": { |   "bugs": { | ||||||
|     "url": "https://github.com/mrousavy/react-native-vision-camera/issues" |     "url": "https://github.com/mrousavy/react-native-vision-camera/issues" | ||||||
| @@ -159,5 +166,6 @@ | |||||||
|         } |         } | ||||||
|       ] |       ] | ||||||
|     ] |     ] | ||||||
|   } |   }, | ||||||
|  |   "packageManager": "yarn@1.22.19+sha1.4ba7fc5c6e704fce2066ecbfb0b0d8976fe62447" | ||||||
| } | } | ||||||
|   | |||||||
| @@ -5,5 +5,6 @@ if which clang-format >/dev/null; then | |||||||
|     clang-format -style=file:./cpp/.clang-format -i "$file" |     clang-format -style=file:./cpp/.clang-format -i "$file" | ||||||
|   done |   done | ||||||
| else | else | ||||||
|   echo "warning: clang-format not installed, install with 'brew install clang-format' (or manually from https://clang.llvm.org/docs/ClangFormat.html)" |   echo "error: clang-format not installed, install with 'brew install clang-format' (or manually from https://clang.llvm.org/docs/ClangFormat.html)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -3,5 +3,6 @@ | |||||||
| if which ktlint >/dev/null; then | if which ktlint >/dev/null; then | ||||||
|   cd android && ktlint --color --relative --editorconfig=./.editorconfig -F ./**/*.kt* |   cd android && ktlint --color --relative --editorconfig=./.editorconfig -F ./**/*.kt* | ||||||
| else | else | ||||||
|   echo "warning: KTLint not installed, install with 'brew install ktlint' (or manually from https://github.com/pinterest/ktlint)" |   echo "error: KTLint not installed, install with 'brew install ktlint' (or manually from https://github.com/pinterest/ktlint)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -3,5 +3,6 @@ | |||||||
| if which swiftformat >/dev/null; then | if which swiftformat >/dev/null; then | ||||||
|   cd ios && swiftformat --quiet . |   cd ios && swiftformat --quiet . | ||||||
| else | else | ||||||
|   echo "warning: SwiftFormat not installed, install with 'brew install swiftformat' (or manually from https://github.com/nicklockwood/SwiftFormat)" |   echo "error: SwiftFormat not installed, install with 'brew install swiftformat' (or manually from https://github.com/nicklockwood/SwiftFormat)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
| @@ -3,5 +3,6 @@ | |||||||
| if which swiftlint >/dev/null; then | if which swiftlint >/dev/null; then | ||||||
|   cd ios && swiftlint --quiet --fix && swiftlint --quiet |   cd ios && swiftlint --quiet --fix && swiftlint --quiet | ||||||
| else | else | ||||||
|   echo "warning: SwiftLint not installed, install with 'brew install swiftlint' (or manually from https://github.com/realm/SwiftLint)" |   echo "error: SwiftLint not installed, install with 'brew install swiftlint' (or manually from https://github.com/realm/SwiftLint)" | ||||||
|  |   exit 1 | ||||||
| fi | fi | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user