From cd0b41370692af8c59ade95211f0e30628e76f5d Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 13 Oct 2023 18:33:20 +0200 Subject: [PATCH] feat: New `Core/` library (#1975) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Moves everything Camera related into `core/` / `Core/` so that it is better encapsulated from React Native. Benefits: 1. Code is much better organized. Should be easier for collaborators now, and cleaner codebase for me. 2. Locking is fully atomically as you can now only configure the session through a lock/Mutex which is batch-overridable * On iOS, this makes Camera startup time **MUCH** faster, I measured speedups from **1.5 seconds** to only **240 milliseconds** since we only lock/commit once! 🚀 * On Android, this fixes a few out-of-sync/concurrency issues like "Capture Request contains unconfigured Input/Output Surface!" since it is now a single lock-operation! 💪 3. It is easier to integrate VisionCamera outside of React Native (e.g. Native iOS Apps, NativeScript, Flutter, etc) With this PR, VisionCamera V3 is up to **7x** faster than V2 --- package/VisionCamera.podspec | 1 + .../com/mrousavy/camera/CameraView+Events.kt | 3 + .../mrousavy/camera/CameraView+RecordVideo.kt | 3 + .../java/com/mrousavy/camera/CameraView.kt | 3 + .../com/mrousavy/camera/CameraViewModule.kt | 2 + .../mrousavy/camera/{ => core}/CameraError.kt | 2 +- .../camera/{ => core}/CameraQueues.kt | 2 +- .../com/mrousavy/camera/core/CameraSession.kt | 8 - .../camera/core/CodeScannerPipeline.kt | 1 - .../mrousavy/camera/core/RecordingSession.kt | 1 - .../com/mrousavy/camera/core/VideoPipeline.kt | 2 - .../camera/core/outputs/CameraOutputs.kt | 2 +- .../CameraCaptureSession+capture.kt | 6 +- .../CameraDevice+createCaptureSession.kt | 4 +- .../extensions/CameraManager+openCamera.kt | 6 +- .../mrousavy/camera/frameprocessor/Frame.java | 4 +- .../frameprocessor/VisionCameraProxy.kt | 2 +- .../frameprocessor/VisionCameraScheduler.java | 4 +- .../mrousavy/camera/parsers/CodeScanner.kt | 2 +- .../com/mrousavy/camera/parsers/CodeType.kt | 4 +- .../java/com/mrousavy/camera/parsers/Flash.kt | 2 +- .../mrousavy/camera/parsers/PixelFormat.kt | 2 +- .../mrousavy/camera/parsers/VideoFileType.kt | 2 +- .../com/mrousavy/camera/utils/withPromise.kt | 4 +- package/example/ios/Podfile.lock | 2 +- package/example/src/CameraPage.tsx | 9 +- package/ios/CameraView+AVAudioSession.swift | 151 ------- package/ios/CameraView+AVCaptureSession.swift | 369 ------------------ package/ios/CameraView+CodeScanner.swift | 45 --- package/ios/CameraView+Focus.swift | 86 +--- package/ios/CameraView+Orientation.swift | 45 --- package/ios/CameraView+RecordVideo.swift | 274 ++----------- package/ios/CameraView+TakePhoto.swift | 79 +--- package/ios/CameraView+Torch.swift | 51 --- package/ios/CameraView+Zoom.swift | 44 +-- package/ios/CameraView.swift | 313 ++++++++------- package/ios/CameraViewManager.m | 12 +- package/ios/CameraViewManager.swift | 4 + package/ios/Core/CameraConfiguration.swift | 231 +++++++++++ package/ios/{ => Core}/CameraError.swift | 21 +- package/ios/{ => Core}/CameraQueues.swift | 2 +- package/ios/Core/CameraSession+Audio.swift | 93 +++++ .../ios/Core/CameraSession+CodeScanner.swift | 80 ++++ .../Core/CameraSession+Configuration.swift | 323 +++++++++++++++ package/ios/Core/CameraSession+Focus.swift | 82 ++++ package/ios/Core/CameraSession+Photo.swift | 107 +++++ package/ios/Core/CameraSession+Video.swift | 221 +++++++++++ package/ios/Core/CameraSession.swift | 260 ++++++++++++ package/ios/Core/CameraSessionDelegate.swift | 32 ++ .../ios/{ => Core}/PhotoCaptureDelegate.swift | 4 +- package/ios/{ => Core}/PreviewView.swift | 0 package/ios/{ => Core}/RecordingSession.swift | 6 +- .../AVAudioSession+trySetAllowHaptics.swift | 23 -- ...reConnection+setInterfaceOrientation.swift | 1 + .../AVCaptureDevice+toDictionary.swift | 8 +- .../AVCaptureDevice.Format+toDictionary.swift | 69 +--- .../Extensions/AVCaptureOutput+mirror.swift | 52 +++ .../AVCapturePhotoOutput+mirror.swift | 20 - ...AVCaptureVideoDataOutput+pixelFormat.swift | 26 ++ .../UIInterfaceOrientation+descriptor.swift | 31 -- package/ios/Types/AutoFocusSystem.swift | 41 ++ package/ios/Types/CameraDeviceFormat.swift | 119 ++++++ package/ios/Types/CodeScanner.swift | 2 +- package/ios/Types/JSUnionValue.swift | 15 + package/ios/Types/Orientation.swift | 83 ++++ .../ios/{Parsers => Types}/PixelFormat.swift | 32 +- package/ios/Types/RecordVideoOptions.swift | 30 ++ package/ios/Types/ResizeMode.swift | 21 +- package/ios/Types/Torch.swift | 45 +++ package/ios/Types/Video.swift | 28 ++ .../ios/Types/VideoStabilizationMode.swift | 47 +++ .../VisionCamera.xcodeproj/project.pbxproj | 136 +++++-- 72 files changed, 2326 insertions(+), 1521 deletions(-) rename package/android/src/main/java/com/mrousavy/camera/{ => core}/CameraError.kt (99%) rename package/android/src/main/java/com/mrousavy/camera/{ => core}/CameraQueues.kt (96%) delete mode 100644 package/ios/CameraView+AVAudioSession.swift delete mode 100644 package/ios/CameraView+AVCaptureSession.swift delete mode 100644 package/ios/CameraView+CodeScanner.swift delete mode 100644 package/ios/CameraView+Orientation.swift delete mode 100644 package/ios/CameraView+Torch.swift create mode 100644 package/ios/Core/CameraConfiguration.swift rename package/ios/{ => Core}/CameraError.swift (94%) rename package/ios/{ => Core}/CameraQueues.swift (94%) create mode 100644 package/ios/Core/CameraSession+Audio.swift create mode 100644 package/ios/Core/CameraSession+CodeScanner.swift create mode 100644 package/ios/Core/CameraSession+Configuration.swift create mode 100644 package/ios/Core/CameraSession+Focus.swift create mode 100644 package/ios/Core/CameraSession+Photo.swift create mode 100644 package/ios/Core/CameraSession+Video.swift create mode 100644 package/ios/Core/CameraSession.swift create mode 100644 package/ios/Core/CameraSessionDelegate.swift rename package/ios/{ => Core}/PhotoCaptureDelegate.swift (93%) rename package/ios/{ => Core}/PreviewView.swift (100%) rename package/ios/{ => Core}/RecordingSession.swift (97%) delete mode 100644 package/ios/Extensions/AVAudioSession+trySetAllowHaptics.swift create mode 100644 package/ios/Extensions/AVCaptureOutput+mirror.swift delete mode 100644 package/ios/Extensions/AVCapturePhotoOutput+mirror.swift create mode 100644 package/ios/Extensions/AVCaptureVideoDataOutput+pixelFormat.swift delete mode 100644 package/ios/Parsers/UIInterfaceOrientation+descriptor.swift create mode 100644 package/ios/Types/AutoFocusSystem.swift create mode 100644 package/ios/Types/CameraDeviceFormat.swift create mode 100644 package/ios/Types/JSUnionValue.swift create mode 100644 package/ios/Types/Orientation.swift rename package/ios/{Parsers => Types}/PixelFormat.swift (66%) create mode 100644 package/ios/Types/RecordVideoOptions.swift create mode 100644 package/ios/Types/Torch.swift create mode 100644 package/ios/Types/Video.swift create mode 100644 package/ios/Types/VideoStabilizationMode.swift diff --git a/package/VisionCamera.podspec b/package/VisionCamera.podspec index 22ab832..435e579 100644 --- a/package/VisionCamera.podspec +++ b/package/VisionCamera.podspec @@ -41,6 +41,7 @@ Pod::Spec.new do |s| s.source_files = [ # Core "ios/*.{m,mm,swift}", + "ios/Core/*.{m,mm,swift}", "ios/Extensions/*.{m,mm,swift}", "ios/Parsers/*.{m,mm,swift}", "ios/React Utils/*.{m,mm,swift}", diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt b/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt index d242029..53ac75e 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraView+Events.kt @@ -6,6 +6,9 @@ import com.facebook.react.bridge.ReactContext import com.facebook.react.bridge.WritableMap import com.facebook.react.uimanager.events.RCTEventEmitter import com.google.mlkit.vision.barcode.common.Barcode +import com.mrousavy.camera.core.CameraError +import com.mrousavy.camera.core.UnknownCameraError +import com.mrousavy.camera.core.code import com.mrousavy.camera.parsers.CodeType fun CameraView.invokeOnInitialized() { diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraView+RecordVideo.kt b/package/android/src/main/java/com/mrousavy/camera/CameraView+RecordVideo.kt index 3459e45..1e3b979 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraView+RecordVideo.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraView+RecordVideo.kt @@ -5,7 +5,10 @@ import android.annotation.SuppressLint import android.content.pm.PackageManager import androidx.core.content.ContextCompat import com.facebook.react.bridge.* +import com.mrousavy.camera.core.MicrophonePermissionError +import com.mrousavy.camera.core.RecorderError import com.mrousavy.camera.core.RecordingSession +import com.mrousavy.camera.core.code import com.mrousavy.camera.parsers.Torch import com.mrousavy.camera.parsers.VideoCodec import com.mrousavy.camera.parsers.VideoFileType diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraView.kt b/package/android/src/main/java/com/mrousavy/camera/CameraView.kt index 2be256d..7627127 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraView.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraView.kt @@ -13,7 +13,10 @@ import android.view.Surface import android.widget.FrameLayout import androidx.core.content.ContextCompat import com.facebook.react.bridge.ReadableMap +import com.mrousavy.camera.core.CameraPermissionError +import com.mrousavy.camera.core.CameraQueues import com.mrousavy.camera.core.CameraSession +import com.mrousavy.camera.core.NoCameraDeviceError import com.mrousavy.camera.core.PreviewView import com.mrousavy.camera.core.outputs.CameraOutputs import com.mrousavy.camera.extensions.bigger diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraViewModule.kt b/package/android/src/main/java/com/mrousavy/camera/CameraViewModule.kt index a54c1cc..5d6bc97 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraViewModule.kt +++ b/package/android/src/main/java/com/mrousavy/camera/CameraViewModule.kt @@ -9,6 +9,8 @@ import com.facebook.react.module.annotations.ReactModule import com.facebook.react.modules.core.PermissionAwareActivity import com.facebook.react.modules.core.PermissionListener import com.facebook.react.uimanager.UIManagerHelper +import com.mrousavy.camera.core.CameraError +import com.mrousavy.camera.core.ViewNotFoundError import com.mrousavy.camera.frameprocessor.VisionCameraInstaller import com.mrousavy.camera.frameprocessor.VisionCameraProxy import com.mrousavy.camera.parsers.* diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraError.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt similarity index 99% rename from package/android/src/main/java/com/mrousavy/camera/CameraError.kt rename to package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt index 94fd335..637ef0c 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraError.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraError.kt @@ -1,4 +1,4 @@ -package com.mrousavy.camera +package com.mrousavy.camera.core import com.mrousavy.camera.core.outputs.CameraOutputs import com.mrousavy.camera.parsers.CameraDeviceError diff --git a/package/android/src/main/java/com/mrousavy/camera/CameraQueues.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraQueues.kt similarity index 96% rename from package/android/src/main/java/com/mrousavy/camera/CameraQueues.kt rename to package/android/src/main/java/com/mrousavy/camera/core/CameraQueues.kt index 571b8ca..beac060 100644 --- a/package/android/src/main/java/com/mrousavy/camera/CameraQueues.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraQueues.kt @@ -1,4 +1,4 @@ -package com.mrousavy.camera +package com.mrousavy.camera.core import android.os.Handler import android.os.HandlerThread diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt index 4b7d592..17705d2 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt @@ -16,15 +16,7 @@ import android.os.Build import android.util.Log import android.util.Range import android.util.Size -import com.mrousavy.camera.CameraNotReadyError -import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.CameraView -import com.mrousavy.camera.CaptureAbortedError -import com.mrousavy.camera.NoRecordingInProgressError -import com.mrousavy.camera.PhotoNotEnabledError -import com.mrousavy.camera.RecorderError -import com.mrousavy.camera.RecordingInProgressError -import com.mrousavy.camera.VideoNotEnabledError import com.mrousavy.camera.core.outputs.CameraOutputs import com.mrousavy.camera.extensions.capture import com.mrousavy.camera.extensions.createCaptureSession diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt b/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt index 92432a2..599b28b 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CodeScannerPipeline.kt @@ -7,7 +7,6 @@ import com.google.mlkit.vision.barcode.BarcodeScanner import com.google.mlkit.vision.barcode.BarcodeScannerOptions import com.google.mlkit.vision.barcode.BarcodeScanning import com.google.mlkit.vision.common.InputImage -import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.core.outputs.CameraOutputs import com.mrousavy.camera.parsers.Orientation import java.io.Closeable diff --git a/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt index 56e1b22..4c3963c 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt @@ -7,7 +7,6 @@ import android.os.Build import android.util.Log import android.util.Size import android.view.Surface -import com.mrousavy.camera.RecorderError import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.VideoCodec import com.mrousavy.camera.parsers.VideoFileType diff --git a/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt b/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt index c335a17..ddbec56 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt @@ -9,8 +9,6 @@ import android.os.Build import android.util.Log import android.view.Surface import com.facebook.jni.HybridData -import com.mrousavy.camera.CameraQueues -import com.mrousavy.camera.FrameProcessorsUnavailableError import com.mrousavy.camera.frameprocessor.Frame import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.parsers.Orientation diff --git a/package/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt b/package/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt index 6762b78..d128a62 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt @@ -9,7 +9,7 @@ import android.util.Log import android.util.Size import android.view.Surface import com.google.mlkit.vision.barcode.common.Barcode -import com.mrousavy.camera.CameraQueues +import com.mrousavy.camera.core.CameraQueues import com.mrousavy.camera.core.CodeScannerPipeline import com.mrousavy.camera.core.VideoPipeline import com.mrousavy.camera.extensions.bigger diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt index 3977607..c5c4fc4 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraCaptureSession+capture.kt @@ -5,9 +5,9 @@ import android.hardware.camera2.CaptureFailure import android.hardware.camera2.CaptureRequest import android.hardware.camera2.TotalCaptureResult import android.media.MediaActionSound -import com.mrousavy.camera.CameraQueues -import com.mrousavy.camera.CaptureAbortedError -import com.mrousavy.camera.UnknownCaptureError +import com.mrousavy.camera.core.CameraQueues +import com.mrousavy.camera.core.CaptureAbortedError +import com.mrousavy.camera.core.UnknownCaptureError import kotlin.coroutines.resume import kotlin.coroutines.resumeWithException import kotlin.coroutines.suspendCoroutine diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createCaptureSession.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createCaptureSession.kt index 6083c28..397d603 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createCaptureSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createCaptureSession.kt @@ -8,8 +8,8 @@ import android.hardware.camera2.params.OutputConfiguration import android.hardware.camera2.params.SessionConfiguration import android.os.Build import android.util.Log -import com.mrousavy.camera.CameraQueues -import com.mrousavy.camera.CameraSessionCannotBeConfiguredError +import com.mrousavy.camera.core.CameraQueues +import com.mrousavy.camera.core.CameraSessionCannotBeConfiguredError import com.mrousavy.camera.core.outputs.CameraOutputs import kotlin.coroutines.resume import kotlin.coroutines.resumeWithException diff --git a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraManager+openCamera.kt b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraManager+openCamera.kt index f42c3dc..56430d3 100644 --- a/package/android/src/main/java/com/mrousavy/camera/extensions/CameraManager+openCamera.kt +++ b/package/android/src/main/java/com/mrousavy/camera/extensions/CameraManager+openCamera.kt @@ -5,9 +5,9 @@ import android.hardware.camera2.CameraDevice import android.hardware.camera2.CameraManager import android.os.Build import android.util.Log -import com.mrousavy.camera.CameraCannotBeOpenedError -import com.mrousavy.camera.CameraDisconnectedError -import com.mrousavy.camera.CameraQueues +import com.mrousavy.camera.core.CameraCannotBeOpenedError +import com.mrousavy.camera.core.CameraDisconnectedError +import com.mrousavy.camera.core.CameraQueues import com.mrousavy.camera.parsers.CameraDeviceError import kotlin.coroutines.resume import kotlin.coroutines.resumeWithException diff --git a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java index f92f2be..cbd7e12 100644 --- a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java +++ b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/Frame.java @@ -4,12 +4,10 @@ import android.hardware.HardwareBuffer; import android.media.Image; import android.os.Build; import com.facebook.proguard.annotations.DoNotStrip; -import com.mrousavy.camera.HardwareBuffersNotAvailableError; +import com.mrousavy.camera.core.HardwareBuffersNotAvailableError; import com.mrousavy.camera.parsers.PixelFormat; import com.mrousavy.camera.parsers.Orientation; -import java.nio.ByteBuffer; - public class Frame { private final Image image; private final boolean isMirrored; diff --git a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt index c686d7a..c5c511e 100644 --- a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt +++ b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraProxy.kt @@ -10,7 +10,7 @@ import com.facebook.react.bridge.UiThreadUtil import com.facebook.react.turbomodule.core.CallInvokerHolderImpl import com.facebook.react.uimanager.UIManagerHelper import com.mrousavy.camera.CameraView -import com.mrousavy.camera.ViewNotFoundError +import com.mrousavy.camera.core.ViewNotFoundError import java.lang.ref.WeakReference @Suppress("KotlinJniMissingFunction") // we use fbjni. diff --git a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraScheduler.java b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraScheduler.java index f7b82b2..4dffad2 100644 --- a/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraScheduler.java +++ b/package/android/src/main/java/com/mrousavy/camera/frameprocessor/VisionCameraScheduler.java @@ -2,9 +2,7 @@ package com.mrousavy.camera.frameprocessor; import com.facebook.jni.HybridData; import com.facebook.proguard.annotations.DoNotStrip; -import com.mrousavy.camera.CameraQueues; - -import java.util.concurrent.ExecutorService; +import com.mrousavy.camera.core.CameraQueues; @SuppressWarnings("JavaJniMissingFunction") // using fbjni here public class VisionCameraScheduler { diff --git a/package/android/src/main/java/com/mrousavy/camera/parsers/CodeScanner.kt b/package/android/src/main/java/com/mrousavy/camera/parsers/CodeScanner.kt index 2edb373..7e6706a 100644 --- a/package/android/src/main/java/com/mrousavy/camera/parsers/CodeScanner.kt +++ b/package/android/src/main/java/com/mrousavy/camera/parsers/CodeScanner.kt @@ -1,7 +1,7 @@ package com.mrousavy.camera.parsers import com.facebook.react.bridge.ReadableMap -import com.mrousavy.camera.InvalidTypeScriptUnionError +import com.mrousavy.camera.core.InvalidTypeScriptUnionError class CodeScanner(map: ReadableMap) { val codeTypes: List diff --git a/package/android/src/main/java/com/mrousavy/camera/parsers/CodeType.kt b/package/android/src/main/java/com/mrousavy/camera/parsers/CodeType.kt index 0ba2964..179d216 100644 --- a/package/android/src/main/java/com/mrousavy/camera/parsers/CodeType.kt +++ b/package/android/src/main/java/com/mrousavy/camera/parsers/CodeType.kt @@ -1,8 +1,8 @@ package com.mrousavy.camera.parsers import com.google.mlkit.vision.barcode.common.Barcode -import com.mrousavy.camera.CodeTypeNotSupportedError -import com.mrousavy.camera.InvalidTypeScriptUnionError +import com.mrousavy.camera.core.CodeTypeNotSupportedError +import com.mrousavy.camera.core.InvalidTypeScriptUnionError enum class CodeType(override val unionValue: String) : JSUnionValue { CODE_128("code-128"), diff --git a/package/android/src/main/java/com/mrousavy/camera/parsers/Flash.kt b/package/android/src/main/java/com/mrousavy/camera/parsers/Flash.kt index 7dcf787..196d640 100644 --- a/package/android/src/main/java/com/mrousavy/camera/parsers/Flash.kt +++ b/package/android/src/main/java/com/mrousavy/camera/parsers/Flash.kt @@ -1,6 +1,6 @@ package com.mrousavy.camera.parsers -import com.mrousavy.camera.InvalidTypeScriptUnionError +import com.mrousavy.camera.core.InvalidTypeScriptUnionError enum class Flash(override val unionValue: String) : JSUnionValue { OFF("off"), diff --git a/package/android/src/main/java/com/mrousavy/camera/parsers/PixelFormat.kt b/package/android/src/main/java/com/mrousavy/camera/parsers/PixelFormat.kt index 8c9c7b1..39f65f6 100644 --- a/package/android/src/main/java/com/mrousavy/camera/parsers/PixelFormat.kt +++ b/package/android/src/main/java/com/mrousavy/camera/parsers/PixelFormat.kt @@ -1,7 +1,7 @@ package com.mrousavy.camera.parsers import android.graphics.ImageFormat -import com.mrousavy.camera.PixelFormatNotSupportedError +import com.mrousavy.camera.core.PixelFormatNotSupportedError enum class PixelFormat(override val unionValue: String) : JSUnionValue { YUV("yuv"), diff --git a/package/android/src/main/java/com/mrousavy/camera/parsers/VideoFileType.kt b/package/android/src/main/java/com/mrousavy/camera/parsers/VideoFileType.kt index b2aec14..5998b4d 100644 --- a/package/android/src/main/java/com/mrousavy/camera/parsers/VideoFileType.kt +++ b/package/android/src/main/java/com/mrousavy/camera/parsers/VideoFileType.kt @@ -1,6 +1,6 @@ package com.mrousavy.camera.parsers -import com.mrousavy.camera.InvalidTypeScriptUnionError +import com.mrousavy.camera.core.InvalidTypeScriptUnionError enum class VideoFileType(override val unionValue: String) : JSUnionValue { MOV("mov"), diff --git a/package/android/src/main/java/com/mrousavy/camera/utils/withPromise.kt b/package/android/src/main/java/com/mrousavy/camera/utils/withPromise.kt index 5b9288d..7c107be 100644 --- a/package/android/src/main/java/com/mrousavy/camera/utils/withPromise.kt +++ b/package/android/src/main/java/com/mrousavy/camera/utils/withPromise.kt @@ -1,8 +1,8 @@ package com.mrousavy.camera.utils import com.facebook.react.bridge.Promise -import com.mrousavy.camera.CameraError -import com.mrousavy.camera.UnknownCameraError +import com.mrousavy.camera.core.CameraError +import com.mrousavy.camera.core.UnknownCameraError inline fun withPromise(promise: Promise, closure: () -> Any?) { try { diff --git a/package/example/ios/Podfile.lock b/package/example/ios/Podfile.lock index d1d051a..9e453ce 100644 --- a/package/example/ios/Podfile.lock +++ b/package/example/ios/Podfile.lock @@ -747,7 +747,7 @@ SPEC CHECKSUMS: SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 - VisionCamera: f649cd0c0fa6266f1cd5e0787a7c9583ca143b3a + VisionCamera: f386aee60abb07d979c506ea9e6d4831e596cafe Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb diff --git a/package/example/src/CameraPage.tsx b/package/example/src/CameraPage.tsx index 2ac1189..da1f850 100644 --- a/package/example/src/CameraPage.tsx +++ b/package/example/src/CameraPage.tsx @@ -47,7 +47,12 @@ export function CameraPage({ navigation }: Props): React.ReactElement { // camera device settings const [preferredDevice] = usePreferredCameraDevice() - const device = useCameraDevice(cameraPosition) + let device = useCameraDevice(cameraPosition) + + if (preferredDevice != null && preferredDevice.position === cameraPosition) { + // override default device with the one selected by the user in settings + device = preferredDevice + } const [targetFps, setTargetFps] = useState(60) @@ -172,7 +177,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement { OSType { - // as per documentation, the first value is always the most efficient format - var defaultFormat = videoOutput.availableVideoPixelFormatTypes.first! - if enableBufferCompression { - // use compressed format instead if we enabled buffer compression - if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange && - videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange) { - // YUV 4:2:0 8-bit (limited video colors; compressed) - defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange - } - if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange && - videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange) { - // YUV 4:2:0 8-bit (full video colors; compressed) - defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange - } - } - - // If the user enabled HDR, we can only use the YUV 4:2:0 10-bit pixel format. - if hdr == true { - guard pixelFormat == nil || pixelFormat == "yuv" else { - invokeOnError(.format(.incompatiblePixelFormatWithHDR)) - return defaultFormat - } - - var targetFormats = [kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, - kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange] - if enableBufferCompression { - // If we enable buffer compression, try to use a lossless compressed YUV format first, otherwise fall back to the others. - targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, at: 0) - } - - // Find the best matching format - guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else { - invokeOnError(.format(.invalidHdr)) - return defaultFormat - } - // YUV 4:2:0 10-bit (compressed/uncompressed) - return format - } - - // If the user didn't specify a custom pixelFormat, just return the default one. - guard let pixelFormat = pixelFormat else { - return defaultFormat - } - - // If we don't use HDR, we can use any other custom pixel format. - switch pixelFormat { - case "yuv": - // YUV 4:2:0 8-bit (full/limited video colors; uncompressed) - var targetFormats = [kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, - kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] - if enableBufferCompression { - // YUV 4:2:0 8-bit (full/limited video colors; compressed) - targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, at: 0) - targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, at: 0) - } - guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else { - invokeOnError(.device(.pixelFormatNotSupported)) - return defaultFormat - } - return format - case "rgb": - // RGBA 8-bit (uncompressed) - var targetFormats = [kCVPixelFormatType_32BGRA] - if enableBufferCompression { - // RGBA 8-bit (compressed) - targetFormats.insert(kCVPixelFormatType_Lossless_32BGRA, at: 0) - } - guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else { - invokeOnError(.device(.pixelFormatNotSupported)) - return defaultFormat - } - return format - case "native": - return defaultFormat - default: - invokeOnError(.parameter(.invalid(unionName: "pixelFormat", receivedValue: pixelFormat as String))) - return defaultFormat - } - } - - // pragma MARK: Configure Device - - /** - Configures the Video Device with the given FPS and HDR modes. - */ - final func configureDevice() { - ReactLogger.log(level: .info, message: "Configuring Device...") - guard let device = videoDeviceInput?.device else { - invokeOnError(.session(.cameraNotReady)) - return - } - - do { - try device.lockForConfiguration() - - // Configure FPS - if let fps = fps?.int32Value { - let supportsGivenFps = device.activeFormat.videoSupportedFrameRateRanges.contains { range in - return range.includes(fps: Double(fps)) - } - if !supportsGivenFps { - invokeOnError(.format(.invalidFps(fps: Int(fps)))) - return - } - - let duration = CMTimeMake(value: 1, timescale: fps) - device.activeVideoMinFrameDuration = duration - device.activeVideoMaxFrameDuration = duration - } else { - device.activeVideoMinFrameDuration = CMTime.invalid - device.activeVideoMaxFrameDuration = CMTime.invalid - } - - // Configure Low-Light-Boost - if lowLightBoost != nil { - if lowLightBoost == true && !device.isLowLightBoostSupported { - invokeOnError(.device(.lowLightBoostNotSupported)) - return - } - device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue - } - - device.unlockForConfiguration() - ReactLogger.log(level: .info, message: "Device successfully configured!") - } catch let error as NSError { - invokeOnError(.device(.configureError), cause: error) - return - } - } - - // pragma MARK: Configure Format - - /** - Configures the Video Device to find the best matching Format. - */ - final func configureFormat() { - ReactLogger.log(level: .info, message: "Configuring Format...") - guard let jsFormat = format else { - // JS Format was null. Ignore it, use default. - return - } - guard let device = videoDeviceInput?.device else { - invokeOnError(.session(.cameraNotReady)) - return - } - - if device.activeFormat.isEqualTo(jsFormat: jsFormat) { - ReactLogger.log(level: .info, message: "Already selected active format.") - return - } - - // get matching format - let format = device.formats.first { $0.isEqualTo(jsFormat: jsFormat) } - guard let format else { - invokeOnError(.format(.invalidFormat)) - return - } - - do { - try device.lockForConfiguration() - defer { - device.unlockForConfiguration() - } - - let shouldReconfigurePhotoOutput = device.activeFormat.photoDimensions.toCGSize() != format.photoDimensions.toCGSize() - device.activeFormat = format - - // The Photo Output uses the smallest available Dimension by default. We need to configure it for the maximum here - if shouldReconfigurePhotoOutput, #available(iOS 16.0, *) { - if let photoOutput = photoOutput { - photoOutput.maxPhotoDimensions = format.photoDimensions - } - } - - ReactLogger.log(level: .info, message: "Format successfully configured!") - } catch let error as NSError { - invokeOnError(.device(.configureError), cause: error) - return - } - } - - // pragma MARK: Notifications/Interruptions - - @objc - func sessionRuntimeError(notification: Notification) { - ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!") - guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { - return - } - - invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError) - - if isActive { - // restart capture session after an error occured - CameraQueues.cameraQueue.async { - self.captureSession.startRunning() - } - } - } -} diff --git a/package/ios/CameraView+CodeScanner.swift b/package/ios/CameraView+CodeScanner.swift deleted file mode 100644 index 9d17b03..0000000 --- a/package/ios/CameraView+CodeScanner.swift +++ /dev/null @@ -1,45 +0,0 @@ -// -// CameraView+CodeScanner.swift -// VisionCamera -// -// Created by Marc Rousavy on 03.10.23. -// Copyright © 2023 mrousavy. All rights reserved. -// - -import AVFoundation -import Foundation - -extension CameraView: AVCaptureMetadataOutputObjectsDelegate { - public func metadataOutput(_: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from _: AVCaptureConnection) { - guard let onCodeScanned = onCodeScanned else { - return - } - guard !metadataObjects.isEmpty else { - return - } - - // Map codes to JS values - let codes = metadataObjects.map { object in - var value: String? - if let code = object as? AVMetadataMachineReadableCodeObject { - value = code.stringValue - } - let frame = previewView.layerRectConverted(fromMetadataOutputRect: object.bounds) - - return [ - "type": object.type.descriptor, - "value": value as Any, - "frame": [ - "x": frame.origin.x, - "y": frame.origin.y, - "width": frame.size.width, - "height": frame.size.height, - ], - ] - } - // Call JS event - onCodeScanned([ - "codes": codes, - ]) - } -} diff --git a/package/ios/CameraView+Focus.swift b/package/ios/CameraView+Focus.swift index e171999..4424e03 100644 --- a/package/ios/CameraView+Focus.swift +++ b/package/ios/CameraView+Focus.swift @@ -1,93 +1,19 @@ // // CameraView+Focus.swift -// mrousavy +// VisionCamera // -// Created by Marc Rousavy on 19.02.21. -// Copyright © 2021 mrousavy. All rights reserved. +// Created by Marc Rousavy on 12.10.23. +// Copyright © 2023 mrousavy. All rights reserved. // +import AVFoundation import Foundation extension CameraView { - private func convertPreviewCoordinatesToCameraCoordinates(_ point: CGPoint) -> CGPoint { - return previewView.captureDevicePointConverted(fromLayerPoint: point) - } - func focus(point: CGPoint, promise: Promise) { withPromise(promise) { - guard let device = self.videoDeviceInput?.device else { - throw CameraError.session(SessionError.cameraNotReady) - } - if !device.isFocusPointOfInterestSupported { - throw CameraError.device(DeviceError.focusNotSupported) - } - - // in {0..1} system - let normalizedPoint = convertPreviewCoordinatesToCameraCoordinates(point) - - do { - try device.lockForConfiguration() - defer { - device.unlockForConfiguration() - } - - // Set Focus - if device.isFocusPointOfInterestSupported { - device.focusPointOfInterest = normalizedPoint - device.focusMode = .autoFocus - } - - // Set Exposure - if device.isExposurePointOfInterestSupported { - device.exposurePointOfInterest = normalizedPoint - device.exposureMode = .autoExpose - } - - // Remove any existing listeners - NotificationCenter.default.removeObserver(self, - name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, - object: nil) - - // Listen for focus completion - device.isSubjectAreaChangeMonitoringEnabled = true - NotificationCenter.default.addObserver(self, - selector: #selector(subjectAreaDidChange), - name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange, - object: nil) - return nil - } catch { - throw CameraError.device(DeviceError.configureError) - } - } - } - - @objc - func subjectAreaDidChange(notification _: NSNotification) { - guard let device = videoDeviceInput?.device else { - invokeOnError(.session(.cameraNotReady)) - return - } - - do { - try device.lockForConfiguration() - defer { - device.unlockForConfiguration() - } - - // Reset Focus to continuous/auto - if device.isFocusPointOfInterestSupported { - device.focusMode = .continuousAutoFocus - } - - // Reset Exposure to continuous/auto - if device.isExposurePointOfInterestSupported { - device.exposureMode = .continuousAutoExposure - } - - // Disable listeners - device.isSubjectAreaChangeMonitoringEnabled = false - } catch { - invokeOnError(.device(.configureError)) + try cameraSession.focus(point: point) + return nil } } } diff --git a/package/ios/CameraView+Orientation.swift b/package/ios/CameraView+Orientation.swift deleted file mode 100644 index 1eb0f9b..0000000 --- a/package/ios/CameraView+Orientation.swift +++ /dev/null @@ -1,45 +0,0 @@ -// -// CameraView+Orientation.swift -// VisionCamera -// -// Created by Marc Rousavy on 04.01.22. -// Copyright © 2022 mrousavy. All rights reserved. -// - -import Foundation -import UIKit - -extension CameraView { - /// Orientation of the input connection (preview) - private var inputOrientation: UIInterfaceOrientation { - return .portrait - } - - // Orientation of the output connections (photo, video, frame processor) - var outputOrientation: UIInterfaceOrientation { - if let userOrientation = orientation as String?, - let parsedOrientation = try? UIInterfaceOrientation(withString: userOrientation) { - // user is overriding output orientation - return parsedOrientation - } else { - // use same as input orientation - return inputOrientation - } - } - - func updateOrientation() { - // Updates the Orientation for all rotable - let isMirrored = videoDeviceInput?.device.position == .front - - let connectionOrientation = outputOrientation - captureSession.outputs.forEach { output in - output.connections.forEach { connection in - if connection.isVideoMirroringSupported { - connection.automaticallyAdjustsVideoMirroring = false - connection.isVideoMirrored = isMirrored - } - connection.setInterfaceOrientation(connectionOrientation) - } - } - } -} diff --git a/package/ios/CameraView+RecordVideo.swift b/package/ios/CameraView+RecordVideo.swift index 8f0df93..8ecad27 100644 --- a/package/ios/CameraView+RecordVideo.swift +++ b/package/ios/CameraView+RecordVideo.swift @@ -11,268 +11,42 @@ import AVFoundation // MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - /** - Starts a video + audio recording with a custom Asset Writer. - */ - func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) { - CameraQueues.cameraQueue.async { - ReactLogger.log(level: .info, message: "Starting Video recording...") - let callback = Callback(jsCallbackFunc) + func startRecording(options: NSDictionary, callback jsCallback: @escaping RCTResponseSenderBlock) { + // Type-safety + let callback = Callback(jsCallback) - var fileType = AVFileType.mov - if let fileTypeOption = options["fileType"] as? String { - guard let parsed = try? AVFileType(withString: fileTypeOption) else { - callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption))) - return + do { + let options = try RecordVideoOptions(fromJSValue: options) + + // Start Recording with success and error callbacks + cameraSession.startRecording( + options: options, + onVideoRecorded: { video in + callback.resolve(video.toJSValue()) + }, + onError: { error in + callback.reject(error: error) } - fileType = parsed + ) + } catch { + // Some error occured while initializing VideoSettings + if let error = error as? CameraError { + callback.reject(error: error) + } else { + callback.reject(error: .capture(.unknown(message: error.localizedDescription)), cause: error as NSError) } - - let errorPointer = ErrorPointer(nilLiteral: ()) - let fileExtension = fileType.descriptor ?? "mov" - guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else { - callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee) - return - } - - ReactLogger.log(level: .info, message: "File path: \(tempFilePath)") - let tempURL = URL(string: "file://\(tempFilePath)")! - - if let flashMode = options["flash"] as? String { - // use the torch as the video's flash - self.setTorchMode(flashMode) - } - - guard let videoOutput = self.videoOutput else { - if self.video?.boolValue == true { - callback.reject(error: .session(.cameraNotReady)) - return - } else { - callback.reject(error: .capture(.videoNotEnabled)) - return - } - } - guard let videoInput = self.videoDeviceInput else { - callback.reject(error: .session(.cameraNotReady)) - return - } - - // TODO: The startRecording() func cannot be async because RN doesn't allow - // both a callback and a Promise in a single function. Wait for TurboModules? - // This means that any errors that occur in this function have to be delegated through - // the callback, but I'd prefer for them to throw for the original function instead. - - let enableAudio = self.audio?.boolValue == true - - let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in - defer { - if enableAudio { - CameraQueues.audioQueue.async { - self.deactivateAudioSession() - } - } - if options["flash"] != nil { - // Set torch mode back to what it was before if we used it for the video flash. - self.setTorchMode(self.torch) - } - } - - self.recordingSession = nil - self.isRecording = false - ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).") - - if let error = error as NSError? { - if error.domain == "capture/aborted" { - callback.reject(error: .capture(.aborted), cause: error) - } else { - callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(error.description)")), cause: error) - } - } else { - if status == .completed { - callback.resolve([ - "path": recordingSession.url.absoluteString, - "duration": recordingSession.duration, - ]) - } else { - callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)")) - } - } - } - - let recordingSession: RecordingSession - do { - recordingSession = try RecordingSession(url: tempURL, - fileType: fileType, - completion: onFinish) - } catch let error as NSError { - callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error) - return - } - self.recordingSession = recordingSession - - var videoCodec: AVVideoCodecType? - if let codecString = options["videoCodec"] as? String { - videoCodec = AVVideoCodecType(withString: codecString) - } - - // Init Video - guard var videoSettings = self.recommendedVideoSettings(videoOutput: videoOutput, fileType: fileType, videoCodec: videoCodec), - !videoSettings.isEmpty else { - callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!"))) - return - } - - // Custom Video Bit Rate (Mbps -> bps) - if let videoBitRate = options["videoBitRate"] as? NSNumber { - let bitsPerSecond = videoBitRate.doubleValue * 1_000_000 - videoSettings[AVVideoCompressionPropertiesKey] = [ - AVVideoAverageBitRateKey: NSNumber(value: bitsPerSecond), - ] - } - - // get pixel format (420f, 420v, x420) - let pixelFormat = CMFormatDescriptionGetMediaSubType(videoInput.device.activeFormat.formatDescription) - recordingSession.initializeVideoWriter(withSettings: videoSettings, - pixelFormat: pixelFormat) - - // Init Audio (optional) - if enableAudio { - // Activate Audio Session asynchronously - CameraQueues.audioQueue.async { - self.activateAudioSession() - } - - if let audioOutput = self.audioOutput, - let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) { - recordingSession.initializeAudioWriter(withSettings: audioSettings) - } - } - - // start recording session with or without audio. - do { - try recordingSession.startAssetWriter() - } catch let error as NSError { - callback.reject(error: .capture(.createRecorderError(message: "RecordingSession failed to start asset writer.")), cause: error) - return - } - self.isRecording = true } } func stopRecording(promise: Promise) { - CameraQueues.cameraQueue.async { - self.isRecording = false - - withPromise(promise) { - guard let recordingSession = self.recordingSession else { - throw CameraError.capture(.noRecordingInProgress) - } - recordingSession.finish() - return nil - } - } + cameraSession.stopRecording(promise: promise) } func pauseRecording(promise: Promise) { - CameraQueues.cameraQueue.async { - withPromise(promise) { - guard self.recordingSession != nil else { - // there's no active recording! - throw CameraError.capture(.noRecordingInProgress) - } - self.isRecording = false - return nil - } - } + cameraSession.pauseRecording(promise: promise) } func resumeRecording(promise: Promise) { - CameraQueues.cameraQueue.async { - withPromise(promise) { - guard self.recordingSession != nil else { - // there's no active recording! - throw CameraError.capture(.noRecordingInProgress) - } - self.isRecording = true - return nil - } - } - } - - public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) { - #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS - if captureOutput is AVCaptureVideoDataOutput { - if let frameProcessor = frameProcessor { - // Call Frame Processor - let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation) - frameProcessor.call(frame) - } - } - #endif - - // Record Video Frame/Audio Sample to File - if isRecording { - guard let recordingSession = recordingSession else { - invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!"))) - return - } - - switch captureOutput { - case is AVCaptureVideoDataOutput: - recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) - case is AVCaptureAudioDataOutput: - let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer), - from: audioCaptureSession.masterClock ?? CMClockGetHostTimeClock(), - to: captureSession.masterClock ?? CMClockGetHostTimeClock()) - recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp) - default: - break - } - } - - #if DEBUG - if captureOutput is AVCaptureVideoDataOutput { - // Update FPS Graph per Frame - if let fpsGraph = fpsGraph { - DispatchQueue.main.async { - fpsGraph.onTick(CACurrentMediaTime()) - } - } - } - #endif - } - - private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput, - fileType: AVFileType, - videoCodec: AVVideoCodecType?) -> [String: Any]? { - if videoCodec != nil { - return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType) - } else { - return videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType) - } - } - - /** - Gets the orientation of the CameraView's images (CMSampleBuffers). - */ - private var bufferOrientation: UIImage.Orientation { - guard let cameraPosition = videoDeviceInput?.device.position else { - return .up - } - - switch outputOrientation { - case .portrait: - return cameraPosition == .front ? .leftMirrored : .right - case .landscapeLeft: - return cameraPosition == .front ? .downMirrored : .up - case .portraitUpsideDown: - return cameraPosition == .front ? .rightMirrored : .left - case .landscapeRight: - return cameraPosition == .front ? .upMirrored : .down - case .unknown: - return .up - @unknown default: - return .up - } + cameraSession.resumeRecording(promise: promise) } } diff --git a/package/ios/CameraView+TakePhoto.swift b/package/ios/CameraView+TakePhoto.swift index 3dc619e..63d7a71 100644 --- a/package/ios/CameraView+TakePhoto.swift +++ b/package/ios/CameraView+TakePhoto.swift @@ -10,83 +10,6 @@ import AVFoundation extension CameraView { func takePhoto(options: NSDictionary, promise: Promise) { - CameraQueues.cameraQueue.async { - guard let photoOutput = self.photoOutput, - let videoDeviceInput = self.videoDeviceInput else { - if self.photo?.boolValue == true { - promise.reject(error: .session(.cameraNotReady)) - return - } else { - promise.reject(error: .capture(.photoNotEnabled)) - return - } - } - - ReactLogger.log(level: .info, message: "Capturing photo...") - - // Create photo settings - let photoSettings = AVCapturePhotoSettings() - - // default, overridable settings if high quality capture was enabled - if self.enableHighQualityPhotos?.boolValue == true { - // TODO: On iOS 16+ this will be removed in favor of maxPhotoDimensions. - photoSettings.isHighResolutionPhotoEnabled = true - if #available(iOS 13.0, *) { - photoSettings.photoQualityPrioritization = .quality - } - } - - // flash - if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String { - guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else { - promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash))) - return - } - photoSettings.flashMode = flashMode - } - - // shutter sound - let enableShutterSound = options["enableShutterSound"] as? Bool ?? true - - // depth data - photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled - if #available(iOS 12.0, *) { - photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled - } - - // quality prioritization - if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String { - guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else { - promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization))) - return - } - photoSettings.photoQualityPrioritization = photoQualityPrioritization - } - - // photo size is always the one selected in the format - if #available(iOS 16.0, *) { - photoSettings.maxPhotoDimensions = photoOutput.maxPhotoDimensions - } - - // red-eye reduction - if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool { - photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction - } - - // stabilization - if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool { - photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization - } - - // distortion correction - if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool { - photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection - } - - photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise, enableShutterSound: enableShutterSound)) - - // Assume that `takePhoto` is always called with the same parameters, so prepare the next call too. - photoOutput.setPreparedPhotoSettingsArray([photoSettings], completionHandler: nil) - } + cameraSession.takePhoto(options: options, promise: promise) } } diff --git a/package/ios/CameraView+Torch.swift b/package/ios/CameraView+Torch.swift deleted file mode 100644 index 1e0fc42..0000000 --- a/package/ios/CameraView+Torch.swift +++ /dev/null @@ -1,51 +0,0 @@ -// -// CameraView+Torch.swift -// VisionCamera -// -// Created by Marc Rousavy on 20.07.23. -// Copyright © 2023 mrousavy. All rights reserved. -// - -import AVFoundation -import Foundation - -extension CameraView { - final func setTorchMode(_ torchMode: String) { - guard let device = videoDeviceInput?.device else { - invokeOnError(.session(.cameraNotReady)) - return - } - guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else { - invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch))) - return - } - if !captureSession.isRunning { - torchMode = .off - } - if device.torchMode == torchMode { - // no need to run the whole lock/unlock bs - return - } - if !device.hasTorch || !device.isTorchAvailable { - if torchMode == .off { - // ignore it, when it's off and not supported, it's off. - return - } else { - // torch mode is .auto or .on, but no torch is available. - invokeOnError(.device(.flashUnavailable)) - return - } - } - do { - try device.lockForConfiguration() - device.torchMode = torchMode - if torchMode == .on { - try device.setTorchModeOn(level: 1.0) - } - device.unlockForConfiguration() - } catch let error as NSError { - invokeOnError(.device(.configureError), cause: error) - return - } - } -} diff --git a/package/ios/CameraView+Zoom.swift b/package/ios/CameraView+Zoom.swift index 08df6cc..e7ecabd 100644 --- a/package/ios/CameraView+Zoom.swift +++ b/package/ios/CameraView+Zoom.swift @@ -7,34 +7,20 @@ // import Foundation +import UIKit extension CameraView { - var minAvailableZoom: CGFloat { - return videoDeviceInput?.device.minAvailableVideoZoomFactor ?? 1 - } - - var maxAvailableZoom: CGFloat { - return videoDeviceInput?.device.activeFormat.videoMaxZoomFactor ?? 1 - } - @objc final func onPinch(_ gesture: UIPinchGestureRecognizer) { - guard let device = videoDeviceInput?.device else { - return - } - - let scale = max(min(gesture.scale * pinchScaleOffset, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0)) + let scale = max(min(gesture.scale * pinchScaleOffset, cameraSession.maxZoom), CGFloat(1.0)) if gesture.state == .ended { pinchScaleOffset = scale return } - do { - try device.lockForConfiguration() - device.videoZoomFactor = scale - device.unlockForConfiguration() - } catch { - invokeOnError(.device(.configureError)) + // Update zoom on Camera + cameraSession.configure { configuration in + configuration.zoom = scale } } @@ -50,24 +36,4 @@ extension CameraView { self.pinchGestureRecognizer = nil } } - - @objc - final func zoom(factor: CGFloat, animated: Bool) { - guard let device = videoDeviceInput?.device else { - return - } - - do { - try device.lockForConfiguration() - let clamped = max(min(factor, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0)) - if animated { - device.ramp(toVideoZoomFactor: clamped, withRate: 1) - } else { - device.videoZoomFactor = clamped - } - device.unlockForConfiguration() - } catch { - invokeOnError(.device(.configureError)) - } - } } diff --git a/package/ios/CameraView.swift b/package/ios/CameraView.swift index 4340276..1603ede 100644 --- a/package/ios/CameraView.swift +++ b/package/ios/CameraView.swift @@ -10,50 +10,36 @@ import AVFoundation import Foundation import UIKit -// // TODOs for the CameraView which are currently too hard to implement either because of AVFoundation's limitations, or my brain capacity // // CameraView+RecordVideo // TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI) - +// // CameraView+TakePhoto // TODO: Photo HDR -private let propsThatRequireReconfiguration = ["cameraId", - "enableDepthData", - "enableHighQualityPhotos", - "enablePortraitEffectsMatteDelivery", - "photo", - "video", - "enableFrameProcessor", - "hdr", - "pixelFormat", - "codeScannerOptions"] -private let propsThatRequireDeviceReconfiguration = ["fps", - "lowLightBoost"] - // MARK: - CameraView -public final class CameraView: UIView { +public final class CameraView: UIView, CameraSessionDelegate { // pragma MARK: React Properties // props that require reconfiguring @objc var cameraId: NSString? @objc var enableDepthData = false - @objc var enableHighQualityPhotos: NSNumber? // nullable bool + @objc var enableHighQualityPhotos = false @objc var enablePortraitEffectsMatteDelivery = false @objc var enableBufferCompression = false // use cases - @objc var photo: NSNumber? // nullable bool - @objc var video: NSNumber? // nullable bool - @objc var audio: NSNumber? // nullable bool + @objc var photo = false + @objc var video = false + @objc var audio = false @objc var enableFrameProcessor = false @objc var codeScannerOptions: NSDictionary? @objc var pixelFormat: NSString? // props that require format reconfiguring @objc var format: NSDictionary? @objc var fps: NSNumber? - @objc var hdr: NSNumber? // nullable bool - @objc var lowLightBoost: NSNumber? // nullable bool + @objc var hdr = false + @objc var lowLightBoost = false @objc var orientation: NSString? // other props @objc var isActive = false @@ -63,7 +49,8 @@ public final class CameraView: UIView { @objc var videoStabilizationMode: NSString? @objc var resizeMode: NSString = "cover" { didSet { - previewView.resizeMode = ResizeMode(fromTypeScriptUnion: resizeMode as String) + let parsed = try? ResizeMode(jsValue: resizeMode as String) + previewView.resizeMode = parsed ?? .cover } } @@ -84,20 +71,9 @@ public final class CameraView: UIView { } // pragma MARK: Internal Properties + var cameraSession: CameraSession var isMounted = false var isReady = false - // Capture Session - let captureSession = AVCaptureSession() - let audioCaptureSession = AVCaptureSession() - // Inputs & Outputs - var videoDeviceInput: AVCaptureDeviceInput? - var audioDeviceInput: AVCaptureDeviceInput? - var photoOutput: AVCapturePhotoOutput? - var videoOutput: AVCaptureVideoDataOutput? - var audioOutput: AVCaptureAudioDataOutput? - // CameraView+RecordView (+ Frame Processor) - var isRecording = false - var recordingSession: RecordingSession? #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS @objc public var frameProcessor: FrameProcessor? #endif @@ -110,30 +86,16 @@ public final class CameraView: UIView { var fpsGraph: RCTFPSGraph? #endif - /// Returns whether the AVCaptureSession is currently running (reflected by isActive) - var isRunning: Bool { - return captureSession.isRunning - } - // pragma MARK: Setup + override public init(frame: CGRect) { - previewView = PreviewView(frame: frame, session: captureSession) + // Create CameraSession + cameraSession = CameraSession() + previewView = cameraSession.createPreviewView(frame: frame) super.init(frame: frame) + cameraSession.delegate = self addSubview(previewView) - - NotificationCenter.default.addObserver(self, - selector: #selector(sessionRuntimeError), - name: .AVCaptureSessionRuntimeError, - object: captureSession) - NotificationCenter.default.addObserver(self, - selector: #selector(sessionRuntimeError), - name: .AVCaptureSessionRuntimeError, - object: audioCaptureSession) - NotificationCenter.default.addObserver(self, - selector: #selector(audioSessionInterrupted), - name: AVAudioSession.interruptionNotification, - object: AVAudioSession.sharedInstance) } @available(*, unavailable) @@ -141,18 +103,6 @@ public final class CameraView: UIView { fatalError("init(coder:) is not implemented.") } - deinit { - NotificationCenter.default.removeObserver(self, - name: .AVCaptureSessionRuntimeError, - object: captureSession) - NotificationCenter.default.removeObserver(self, - name: .AVCaptureSessionRuntimeError, - object: audioCaptureSession) - NotificationCenter.default.removeObserver(self, - name: AVAudioSession.interruptionNotification, - object: AVAudioSession.sharedInstance) - } - override public func willMove(toSuperview newSuperview: UIView?) { super.willMove(toSuperview: newSuperview) @@ -169,89 +119,111 @@ public final class CameraView: UIView { previewView.bounds = bounds } + func getPixelFormat() -> PixelFormat { + // TODO: Use ObjC RCT enum parser for this + if let pixelFormat = pixelFormat as? String { + do { + return try PixelFormat(jsValue: pixelFormat) + } catch { + if let error = error as? CameraError { + onError(error) + } else { + onError(.unknown(message: error.localizedDescription, cause: error as NSError)) + } + } + } + return .native + } + + func getTorch() -> Torch { + // TODO: Use ObjC RCT enum parser for this + if let torch = try? Torch(jsValue: torch) { + return torch + } + return .off + } + // pragma MARK: Props updating override public final func didSetProps(_ changedProps: [String]!) { - ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...") - let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) } - let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format") - let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) } - let shouldReconfigureAudioSession = changedProps.contains("audio") + ReactLogger.log(level: .info, message: "Updating \(changedProps.count) props: [\(changedProps.joined(separator: ", "))]") - let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice + cameraSession.configure { config in + // Input Camera Device + config.cameraId = cameraId as? String - let shouldCheckActive = willReconfigure || changedProps.contains("isActive") || captureSession.isRunning != isActive - let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive - let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive - let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode") - let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation") + // Photo + if photo { + config.photo = .enabled(config: CameraConfiguration.Photo(enableHighQualityPhotos: enableHighQualityPhotos, + enableDepthData: enableDepthData, + enablePortraitEffectsMatte: enablePortraitEffectsMatteDelivery)) + } else { + config.photo = .disabled + } + // Video/Frame Processor + if video || enableFrameProcessor { + config.video = .enabled(config: CameraConfiguration.Video(pixelFormat: getPixelFormat(), + enableBufferCompression: enableBufferCompression, + enableHdr: hdr, + enableFrameProcessor: enableFrameProcessor)) + } else { + config.video = .disabled + } + + // Audio + if audio { + config.audio = .enabled(config: CameraConfiguration.Audio()) + } else { + config.audio = .disabled + } + + // Code Scanner + if let codeScannerOptions { + let codeScanner = try CodeScanner(fromJsValue: codeScannerOptions) + config.codeScanner = .enabled(config: codeScanner) + } else { + config.codeScanner = .disabled + } + + // Orientation + if let jsOrientation = orientation as? String { + let orientation = try Orientation(jsValue: jsOrientation) + config.orientation = orientation + } else { + config.orientation = .portrait + } + + // Format + if let jsFormat = format { + let format = try CameraDeviceFormat(jsValue: jsFormat) + config.format = format + } else { + config.format = nil + } + + // Side-Props + config.fps = fps?.int32Value + config.enableLowLightBoost = lowLightBoost + config.torch = getTorch() + + // Zoom + config.zoom = zoom.doubleValue + + // isActive + config.isActive = isActive + } + + // Store `zoom` offset for native pinch-gesture + if changedProps.contains("zoom") { + pinchScaleOffset = zoom.doubleValue + } + + // Set up Debug FPS Graph if changedProps.contains("enableFpsGraph") { DispatchQueue.main.async { self.setupFpsGraph() } } - - if shouldReconfigure || - shouldReconfigureAudioSession || - shouldCheckActive || - shouldUpdateTorch || - shouldUpdateZoom || - shouldReconfigureFormat || - shouldReconfigureDevice || - shouldUpdateVideoStabilization || - shouldUpdateOrientation { - CameraQueues.cameraQueue.async { - // Video Configuration - if shouldReconfigure { - self.configureCaptureSession() - } - if shouldReconfigureFormat { - self.configureFormat() - } - if shouldReconfigureDevice { - self.configureDevice() - } - if shouldUpdateVideoStabilization, let videoStabilizationMode = self.videoStabilizationMode as String? { - self.captureSession.setVideoStabilizationMode(videoStabilizationMode) - } - - if shouldUpdateZoom { - let zoomClamped = max(min(CGFloat(self.zoom.doubleValue), self.maxAvailableZoom), self.minAvailableZoom) - self.zoom(factor: zoomClamped, animated: false) - self.pinchScaleOffset = zoomClamped - } - - if shouldCheckActive && self.captureSession.isRunning != self.isActive { - if self.isActive { - ReactLogger.log(level: .info, message: "Starting Session...") - self.captureSession.startRunning() - ReactLogger.log(level: .info, message: "Started Session!") - } else { - ReactLogger.log(level: .info, message: "Stopping Session...") - self.captureSession.stopRunning() - ReactLogger.log(level: .info, message: "Stopped Session!") - } - } - - if shouldUpdateOrientation { - self.updateOrientation() - } - - // This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch. - if shouldUpdateTorch { - CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) { - self.setTorchMode(self.torch) - } - } - } - - // Audio Configuration - if shouldReconfigureAudioSession { - CameraQueues.audioQueue.async { - self.configureAudioSession() - } - } - } } func setupFpsGraph() { @@ -269,12 +241,16 @@ public final class CameraView: UIView { } // pragma MARK: Event Invokers - final func invokeOnError(_ error: CameraError, cause: NSError? = nil) { + + func onError(_ error: CameraError) { ReactLogger.log(level: .error, message: "Invoking onError(): \(error.message)") - guard let onError = onError else { return } + guard let onError = onError else { + return + } var causeDictionary: [String: Any]? - if let cause = cause { + if case let .unknown(_, cause) = error, + let cause = cause { causeDictionary = [ "code": cause.code, "domain": cause.domain, @@ -289,9 +265,58 @@ public final class CameraView: UIView { ]) } - final func invokeOnInitialized() { + func onSessionInitialized() { ReactLogger.log(level: .info, message: "Camera initialized!") - guard let onInitialized = onInitialized else { return } + guard let onInitialized = onInitialized else { + return + } onInitialized([String: Any]()) } + + func onFrame(sampleBuffer: CMSampleBuffer) { + #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS + if let frameProcessor = frameProcessor { + // Call Frame Processor + let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation) + frameProcessor.call(frame) + } + #endif + + #if DEBUG + if let fpsGraph { + fpsGraph.onTick(CACurrentMediaTime()) + } + #endif + } + + func onCodeScanned(codes: [CameraSession.Code]) { + guard let onCodeScanned = onCodeScanned else { + return + } + onCodeScanned([ + "codes": codes.map { $0.toJSValue() }, + ]) + } + + /** + Gets the orientation of the CameraView's images (CMSampleBuffers). + */ + private var bufferOrientation: UIImage.Orientation { + guard let cameraPosition = cameraSession.videoDeviceInput?.device.position else { + return .up + } + let orientation = cameraSession.configuration?.orientation ?? .portrait + + // TODO: I think this is wrong. + switch orientation { + case .portrait: + return cameraPosition == .front ? .leftMirrored : .right + case .landscapeLeft: + return cameraPosition == .front ? .downMirrored : .up + case .portraitUpsideDown: + return cameraPosition == .front ? .rightMirrored : .left + case .landscapeRight: + return cameraPosition == .front ? .upMirrored : .down + } + } } diff --git a/package/ios/CameraViewManager.m b/package/ios/CameraViewManager.m index 4cee979..d01dda5 100644 --- a/package/ios/CameraViewManager.m +++ b/package/ios/CameraViewManager.m @@ -25,19 +25,19 @@ RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(installFrameProcessorBindings); RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL); RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString); RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL); -RCT_EXPORT_VIEW_PROPERTY(enableHighQualityPhotos, NSNumber); // nullable bool +RCT_EXPORT_VIEW_PROPERTY(enableHighQualityPhotos, BOOL); RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL); RCT_EXPORT_VIEW_PROPERTY(enableBufferCompression, BOOL); // use cases -RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool -RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool -RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool +RCT_EXPORT_VIEW_PROPERTY(photo, BOOL); +RCT_EXPORT_VIEW_PROPERTY(video, BOOL); +RCT_EXPORT_VIEW_PROPERTY(audio, BOOL); RCT_EXPORT_VIEW_PROPERTY(enableFrameProcessor, BOOL); // device format RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary); RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber); -RCT_EXPORT_VIEW_PROPERTY(hdr, NSNumber); // nullable bool -RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool +RCT_EXPORT_VIEW_PROPERTY(hdr, BOOL); +RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, BOOL); RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString); RCT_EXPORT_VIEW_PROPERTY(pixelFormat, NSString); // other props diff --git a/package/ios/CameraViewManager.swift b/package/ios/CameraViewManager.swift index 11044ea..2739a0e 100644 --- a/package/ios/CameraViewManager.swift +++ b/package/ios/CameraViewManager.swift @@ -38,6 +38,10 @@ final class CameraViewManager: RCTViewManager { #endif } + // TODO: The startRecording() func cannot be async because RN doesn't allow + // both a callback and a Promise in a single function. Wait for TurboModules? + // This means that any errors that occur in this function have to be delegated through + // the callback, but I'd prefer for them to throw for the original function instead. @objc final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) { let component = getCameraView(withTag: node) diff --git a/package/ios/Core/CameraConfiguration.swift b/package/ios/Core/CameraConfiguration.swift new file mode 100644 index 0000000..6471d30 --- /dev/null +++ b/package/ios/Core/CameraConfiguration.swift @@ -0,0 +1,231 @@ +// +// CameraConfiguration.swift +// VisionCamera +// +// Created by Marc Rousavy on 11.10.23. +// Copyright © 2023 mrousavy. All rights reserved. +// + +import AVFoundation +import Foundation + +// MARK: - CameraConfiguration + +class CameraConfiguration { + // pragma MARK: Configuration Props + + // Input + var cameraId: String? + + // Outputs + var photo: OutputConfiguration = .disabled + var video: OutputConfiguration