feat: New Core/
library (#1975)
Moves everything Camera related into `core/` / `Core/` so that it is better encapsulated from React Native. Benefits: 1. Code is much better organized. Should be easier for collaborators now, and cleaner codebase for me. 2. Locking is fully atomically as you can now only configure the session through a lock/Mutex which is batch-overridable * On iOS, this makes Camera startup time **MUCH** faster, I measured speedups from **1.5 seconds** to only **240 milliseconds** since we only lock/commit once! 🚀 * On Android, this fixes a few out-of-sync/concurrency issues like "Capture Request contains unconfigured Input/Output Surface!" since it is now a single lock-operation! 💪 3. It is easier to integrate VisionCamera outside of React Native (e.g. Native iOS Apps, NativeScript, Flutter, etc) With this PR, VisionCamera V3 is up to **7x** faster than V2
This commit is contained in:
parent
54871022f4
commit
cd0b413706
@ -41,6 +41,7 @@ Pod::Spec.new do |s|
|
||||
s.source_files = [
|
||||
# Core
|
||||
"ios/*.{m,mm,swift}",
|
||||
"ios/Core/*.{m,mm,swift}",
|
||||
"ios/Extensions/*.{m,mm,swift}",
|
||||
"ios/Parsers/*.{m,mm,swift}",
|
||||
"ios/React Utils/*.{m,mm,swift}",
|
||||
|
@ -6,6 +6,9 @@ import com.facebook.react.bridge.ReactContext
|
||||
import com.facebook.react.bridge.WritableMap
|
||||
import com.facebook.react.uimanager.events.RCTEventEmitter
|
||||
import com.google.mlkit.vision.barcode.common.Barcode
|
||||
import com.mrousavy.camera.core.CameraError
|
||||
import com.mrousavy.camera.core.UnknownCameraError
|
||||
import com.mrousavy.camera.core.code
|
||||
import com.mrousavy.camera.parsers.CodeType
|
||||
|
||||
fun CameraView.invokeOnInitialized() {
|
||||
|
@ -5,7 +5,10 @@ import android.annotation.SuppressLint
|
||||
import android.content.pm.PackageManager
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.facebook.react.bridge.*
|
||||
import com.mrousavy.camera.core.MicrophonePermissionError
|
||||
import com.mrousavy.camera.core.RecorderError
|
||||
import com.mrousavy.camera.core.RecordingSession
|
||||
import com.mrousavy.camera.core.code
|
||||
import com.mrousavy.camera.parsers.Torch
|
||||
import com.mrousavy.camera.parsers.VideoCodec
|
||||
import com.mrousavy.camera.parsers.VideoFileType
|
||||
|
@ -13,7 +13,10 @@ import android.view.Surface
|
||||
import android.widget.FrameLayout
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.mrousavy.camera.core.CameraPermissionError
|
||||
import com.mrousavy.camera.core.CameraQueues
|
||||
import com.mrousavy.camera.core.CameraSession
|
||||
import com.mrousavy.camera.core.NoCameraDeviceError
|
||||
import com.mrousavy.camera.core.PreviewView
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.extensions.bigger
|
||||
|
@ -9,6 +9,8 @@ import com.facebook.react.module.annotations.ReactModule
|
||||
import com.facebook.react.modules.core.PermissionAwareActivity
|
||||
import com.facebook.react.modules.core.PermissionListener
|
||||
import com.facebook.react.uimanager.UIManagerHelper
|
||||
import com.mrousavy.camera.core.CameraError
|
||||
import com.mrousavy.camera.core.ViewNotFoundError
|
||||
import com.mrousavy.camera.frameprocessor.VisionCameraInstaller
|
||||
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
|
||||
import com.mrousavy.camera.parsers.*
|
||||
|
@ -1,4 +1,4 @@
|
||||
package com.mrousavy.camera
|
||||
package com.mrousavy.camera.core
|
||||
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.parsers.CameraDeviceError
|
@ -1,4 +1,4 @@
|
||||
package com.mrousavy.camera
|
||||
package com.mrousavy.camera.core
|
||||
|
||||
import android.os.Handler
|
||||
import android.os.HandlerThread
|
@ -16,15 +16,7 @@ import android.os.Build
|
||||
import android.util.Log
|
||||
import android.util.Range
|
||||
import android.util.Size
|
||||
import com.mrousavy.camera.CameraNotReadyError
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.CameraView
|
||||
import com.mrousavy.camera.CaptureAbortedError
|
||||
import com.mrousavy.camera.NoRecordingInProgressError
|
||||
import com.mrousavy.camera.PhotoNotEnabledError
|
||||
import com.mrousavy.camera.RecorderError
|
||||
import com.mrousavy.camera.RecordingInProgressError
|
||||
import com.mrousavy.camera.VideoNotEnabledError
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.extensions.capture
|
||||
import com.mrousavy.camera.extensions.createCaptureSession
|
||||
|
@ -7,7 +7,6 @@ import com.google.mlkit.vision.barcode.BarcodeScanner
|
||||
import com.google.mlkit.vision.barcode.BarcodeScannerOptions
|
||||
import com.google.mlkit.vision.barcode.BarcodeScanning
|
||||
import com.google.mlkit.vision.common.InputImage
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import java.io.Closeable
|
||||
|
@ -7,7 +7,6 @@ import android.os.Build
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.Surface
|
||||
import com.mrousavy.camera.RecorderError
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import com.mrousavy.camera.parsers.VideoCodec
|
||||
import com.mrousavy.camera.parsers.VideoFileType
|
||||
|
@ -9,8 +9,6 @@ import android.os.Build
|
||||
import android.util.Log
|
||||
import android.view.Surface
|
||||
import com.facebook.jni.HybridData
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.FrameProcessorsUnavailableError
|
||||
import com.mrousavy.camera.frameprocessor.Frame
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
|
@ -9,7 +9,7 @@ import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.Surface
|
||||
import com.google.mlkit.vision.barcode.common.Barcode
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.core.CameraQueues
|
||||
import com.mrousavy.camera.core.CodeScannerPipeline
|
||||
import com.mrousavy.camera.core.VideoPipeline
|
||||
import com.mrousavy.camera.extensions.bigger
|
||||
|
@ -5,9 +5,9 @@ import android.hardware.camera2.CaptureFailure
|
||||
import android.hardware.camera2.CaptureRequest
|
||||
import android.hardware.camera2.TotalCaptureResult
|
||||
import android.media.MediaActionSound
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.CaptureAbortedError
|
||||
import com.mrousavy.camera.UnknownCaptureError
|
||||
import com.mrousavy.camera.core.CameraQueues
|
||||
import com.mrousavy.camera.core.CaptureAbortedError
|
||||
import com.mrousavy.camera.core.UnknownCaptureError
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
import kotlin.coroutines.suspendCoroutine
|
||||
|
@ -8,8 +8,8 @@ import android.hardware.camera2.params.OutputConfiguration
|
||||
import android.hardware.camera2.params.SessionConfiguration
|
||||
import android.os.Build
|
||||
import android.util.Log
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.CameraSessionCannotBeConfiguredError
|
||||
import com.mrousavy.camera.core.CameraQueues
|
||||
import com.mrousavy.camera.core.CameraSessionCannotBeConfiguredError
|
||||
import com.mrousavy.camera.core.outputs.CameraOutputs
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
|
@ -5,9 +5,9 @@ import android.hardware.camera2.CameraDevice
|
||||
import android.hardware.camera2.CameraManager
|
||||
import android.os.Build
|
||||
import android.util.Log
|
||||
import com.mrousavy.camera.CameraCannotBeOpenedError
|
||||
import com.mrousavy.camera.CameraDisconnectedError
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.core.CameraCannotBeOpenedError
|
||||
import com.mrousavy.camera.core.CameraDisconnectedError
|
||||
import com.mrousavy.camera.core.CameraQueues
|
||||
import com.mrousavy.camera.parsers.CameraDeviceError
|
||||
import kotlin.coroutines.resume
|
||||
import kotlin.coroutines.resumeWithException
|
||||
|
@ -4,12 +4,10 @@ import android.hardware.HardwareBuffer;
|
||||
import android.media.Image;
|
||||
import android.os.Build;
|
||||
import com.facebook.proguard.annotations.DoNotStrip;
|
||||
import com.mrousavy.camera.HardwareBuffersNotAvailableError;
|
||||
import com.mrousavy.camera.core.HardwareBuffersNotAvailableError;
|
||||
import com.mrousavy.camera.parsers.PixelFormat;
|
||||
import com.mrousavy.camera.parsers.Orientation;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public class Frame {
|
||||
private final Image image;
|
||||
private final boolean isMirrored;
|
||||
|
@ -10,7 +10,7 @@ import com.facebook.react.bridge.UiThreadUtil
|
||||
import com.facebook.react.turbomodule.core.CallInvokerHolderImpl
|
||||
import com.facebook.react.uimanager.UIManagerHelper
|
||||
import com.mrousavy.camera.CameraView
|
||||
import com.mrousavy.camera.ViewNotFoundError
|
||||
import com.mrousavy.camera.core.ViewNotFoundError
|
||||
import java.lang.ref.WeakReference
|
||||
|
||||
@Suppress("KotlinJniMissingFunction") // we use fbjni.
|
||||
|
@ -2,9 +2,7 @@ package com.mrousavy.camera.frameprocessor;
|
||||
|
||||
import com.facebook.jni.HybridData;
|
||||
import com.facebook.proguard.annotations.DoNotStrip;
|
||||
import com.mrousavy.camera.CameraQueues;
|
||||
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import com.mrousavy.camera.core.CameraQueues;
|
||||
|
||||
@SuppressWarnings("JavaJniMissingFunction") // using fbjni here
|
||||
public class VisionCameraScheduler {
|
||||
|
@ -1,7 +1,7 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
import com.facebook.react.bridge.ReadableMap
|
||||
import com.mrousavy.camera.InvalidTypeScriptUnionError
|
||||
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
|
||||
|
||||
class CodeScanner(map: ReadableMap) {
|
||||
val codeTypes: List<CodeType>
|
||||
|
@ -1,8 +1,8 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
import com.google.mlkit.vision.barcode.common.Barcode
|
||||
import com.mrousavy.camera.CodeTypeNotSupportedError
|
||||
import com.mrousavy.camera.InvalidTypeScriptUnionError
|
||||
import com.mrousavy.camera.core.CodeTypeNotSupportedError
|
||||
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
|
||||
|
||||
enum class CodeType(override val unionValue: String) : JSUnionValue {
|
||||
CODE_128("code-128"),
|
||||
|
@ -1,6 +1,6 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
import com.mrousavy.camera.InvalidTypeScriptUnionError
|
||||
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
|
||||
|
||||
enum class Flash(override val unionValue: String) : JSUnionValue {
|
||||
OFF("off"),
|
||||
|
@ -1,7 +1,7 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import com.mrousavy.camera.PixelFormatNotSupportedError
|
||||
import com.mrousavy.camera.core.PixelFormatNotSupportedError
|
||||
|
||||
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
|
||||
YUV("yuv"),
|
||||
|
@ -1,6 +1,6 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
import com.mrousavy.camera.InvalidTypeScriptUnionError
|
||||
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
|
||||
|
||||
enum class VideoFileType(override val unionValue: String) : JSUnionValue {
|
||||
MOV("mov"),
|
||||
|
@ -1,8 +1,8 @@
|
||||
package com.mrousavy.camera.utils
|
||||
|
||||
import com.facebook.react.bridge.Promise
|
||||
import com.mrousavy.camera.CameraError
|
||||
import com.mrousavy.camera.UnknownCameraError
|
||||
import com.mrousavy.camera.core.CameraError
|
||||
import com.mrousavy.camera.core.UnknownCameraError
|
||||
|
||||
inline fun withPromise(promise: Promise, closure: () -> Any?) {
|
||||
try {
|
||||
|
@ -747,7 +747,7 @@ SPEC CHECKSUMS:
|
||||
SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d
|
||||
SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d
|
||||
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
|
||||
VisionCamera: f649cd0c0fa6266f1cd5e0787a7c9583ca143b3a
|
||||
VisionCamera: f386aee60abb07d979c506ea9e6d4831e596cafe
|
||||
Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce
|
||||
|
||||
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
|
||||
|
@ -47,7 +47,12 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
|
||||
// camera device settings
|
||||
const [preferredDevice] = usePreferredCameraDevice()
|
||||
const device = useCameraDevice(cameraPosition)
|
||||
let device = useCameraDevice(cameraPosition)
|
||||
|
||||
if (preferredDevice != null && preferredDevice.position === cameraPosition) {
|
||||
// override default device with the one selected by the user in settings
|
||||
device = preferredDevice
|
||||
}
|
||||
|
||||
const [targetFps, setTargetFps] = useState(60)
|
||||
|
||||
@ -172,7 +177,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
<ReanimatedCamera
|
||||
ref={camera}
|
||||
style={StyleSheet.absoluteFill}
|
||||
device={preferredDevice ?? device}
|
||||
device={device}
|
||||
format={format}
|
||||
fps={fps}
|
||||
hdr={enableHdr}
|
||||
|
@ -1,151 +0,0 @@
|
||||
//
|
||||
// CameraView+AVAudioSession.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
Extension for CameraView that sets up the AVAudioSession.
|
||||
*/
|
||||
extension CameraView {
|
||||
/**
|
||||
Configures the Audio Capture Session with an audio input and audio data output.
|
||||
*/
|
||||
final func configureAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Audio Session...")
|
||||
|
||||
audioCaptureSession.beginConfiguration()
|
||||
defer {
|
||||
audioCaptureSession.commitConfiguration()
|
||||
}
|
||||
|
||||
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
|
||||
let enableAudio = audio?.boolValue == true
|
||||
|
||||
// check microphone permission
|
||||
if enableAudio {
|
||||
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
|
||||
if audioPermissionStatus != .authorized {
|
||||
invokeOnError(.permission(.microphone))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Audio Input
|
||||
do {
|
||||
if let audioDeviceInput = audioDeviceInput {
|
||||
audioCaptureSession.removeInput(audioDeviceInput)
|
||||
self.audioDeviceInput = nil
|
||||
}
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
||||
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
||||
invokeOnError(.device(.microphoneUnavailable))
|
||||
return
|
||||
}
|
||||
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
||||
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
|
||||
invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
|
||||
return
|
||||
}
|
||||
audioCaptureSession.addInput(audioDeviceInput!)
|
||||
}
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.microphoneUnavailable), cause: error)
|
||||
return
|
||||
}
|
||||
|
||||
// Audio Output
|
||||
if let audioOutput = audioOutput {
|
||||
audioCaptureSession.removeOutput(audioOutput)
|
||||
self.audioOutput = nil
|
||||
}
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
||||
audioOutput = AVCaptureAudioDataOutput()
|
||||
guard audioCaptureSession.canAddOutput(audioOutput!) else {
|
||||
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
|
||||
return
|
||||
}
|
||||
audioOutput!.setSampleBufferDelegate(self, queue: CameraQueues.audioQueue)
|
||||
audioCaptureSession.addOutput(audioOutput!)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration.
|
||||
|
||||
The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording.
|
||||
Background audio is allowed to play on speakers or bluetooth speakers.
|
||||
*/
|
||||
final func activateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Activating Audio Session...")
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().updateCategory(AVAudioSession.Category.playAndRecord,
|
||||
options: [.mixWithOthers,
|
||||
.allowBluetoothA2DP,
|
||||
.defaultToSpeaker,
|
||||
.allowAirPlay])
|
||||
|
||||
if #available(iOS 14.5, *) {
|
||||
// prevents the audio session from being interrupted by a phone call
|
||||
try AVAudioSession.sharedInstance().setPrefersNoInterruptionsFromSystemAlerts(true)
|
||||
}
|
||||
|
||||
audioCaptureSession.startRunning()
|
||||
} catch let error as NSError {
|
||||
switch error.code {
|
||||
case 561_017_449:
|
||||
self.invokeOnError(.session(.audioInUseByOtherApp), cause: error)
|
||||
default:
|
||||
self.invokeOnError(.session(.audioSessionFailedToActivate), cause: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final func deactivateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Deactivating Audio Session...")
|
||||
|
||||
audioCaptureSession.stopRunning()
|
||||
}
|
||||
|
||||
@objc
|
||||
func audioSessionInterrupted(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Audio Session Interruption Notification!")
|
||||
guard let userInfo = notification.userInfo,
|
||||
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
|
||||
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: Add JS-Event for Audio Session interruptions?
|
||||
switch type {
|
||||
case .began:
|
||||
// Something interrupted our Audio Session, stop recording audio.
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
|
||||
case .ended:
|
||||
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
|
||||
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
||||
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
|
||||
if options.contains(.shouldResume) {
|
||||
if isRecording {
|
||||
CameraQueues.audioQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
|
||||
// restart audio session because interruption is over
|
||||
self.activateAudioSession()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
|
||||
}
|
||||
@unknown default:
|
||||
()
|
||||
}
|
||||
}
|
||||
}
|
@ -1,369 +0,0 @@
|
||||
//
|
||||
// CameraView+AVCaptureSession.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
Extension for CameraView that sets up the AVCaptureSession, Device and Format.
|
||||
*/
|
||||
extension CameraView {
|
||||
// pragma MARK: Configure Capture Session
|
||||
|
||||
/**
|
||||
Configures the Capture Session.
|
||||
*/
|
||||
final func configureCaptureSession() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Session...")
|
||||
isReady = false
|
||||
|
||||
#if targetEnvironment(simulator)
|
||||
invokeOnError(.device(.notAvailableOnSimulator))
|
||||
return
|
||||
#endif
|
||||
|
||||
guard cameraId != nil else {
|
||||
invokeOnError(.device(.noDevice))
|
||||
return
|
||||
}
|
||||
let cameraId = self.cameraId! as String
|
||||
|
||||
ReactLogger.log(level: .info, message: "Initializing Camera with device \(cameraId)...")
|
||||
captureSession.beginConfiguration()
|
||||
defer {
|
||||
captureSession.commitConfiguration()
|
||||
}
|
||||
|
||||
// pragma MARK: Capture Session Inputs
|
||||
// Video Input
|
||||
do {
|
||||
if let videoDeviceInput = videoDeviceInput {
|
||||
captureSession.removeInput(videoDeviceInput)
|
||||
self.videoDeviceInput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Video input...")
|
||||
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
|
||||
invokeOnError(.device(.invalid))
|
||||
return
|
||||
}
|
||||
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
|
||||
guard captureSession.canAddInput(videoDeviceInput!) else {
|
||||
invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "video-input")))
|
||||
return
|
||||
}
|
||||
captureSession.addInput(videoDeviceInput!)
|
||||
} catch {
|
||||
invokeOnError(.device(.invalid))
|
||||
return
|
||||
}
|
||||
|
||||
// pragma MARK: Capture Session Outputs
|
||||
|
||||
// Photo Output
|
||||
if let photoOutput = photoOutput {
|
||||
captureSession.removeOutput(photoOutput)
|
||||
self.photoOutput = nil
|
||||
}
|
||||
if photo?.boolValue == true {
|
||||
ReactLogger.log(level: .info, message: "Adding Photo output...")
|
||||
photoOutput = AVCapturePhotoOutput()
|
||||
|
||||
if enableHighQualityPhotos?.boolValue == true {
|
||||
// TODO: In iOS 16 this will be removed in favor of maxPhotoDimensions.
|
||||
photoOutput!.isHighResolutionCaptureEnabled = true
|
||||
if #available(iOS 13.0, *) {
|
||||
// TODO: Test if this actually does any fusion or if this just calls the captureOutput twice. If the latter, remove it.
|
||||
photoOutput!.isVirtualDeviceConstituentPhotoDeliveryEnabled = photoOutput!.isVirtualDeviceConstituentPhotoDeliverySupported
|
||||
photoOutput!.maxPhotoQualityPrioritization = .quality
|
||||
} else {
|
||||
photoOutput!.isDualCameraDualPhotoDeliveryEnabled = photoOutput!.isDualCameraDualPhotoDeliverySupported
|
||||
}
|
||||
}
|
||||
// TODO: Enable isResponsiveCaptureEnabled? (iOS 17+)
|
||||
// TODO: Enable isFastCapturePrioritizationEnabled? (iOS 17+)
|
||||
if enableDepthData {
|
||||
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported
|
||||
}
|
||||
if #available(iOS 12.0, *), enablePortraitEffectsMatteDelivery {
|
||||
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported
|
||||
}
|
||||
guard captureSession.canAddOutput(photoOutput!) else {
|
||||
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
|
||||
return
|
||||
}
|
||||
captureSession.addOutput(photoOutput!)
|
||||
if videoDeviceInput!.device.position == .front {
|
||||
photoOutput!.mirror()
|
||||
}
|
||||
}
|
||||
|
||||
// Video Output + Frame Processor
|
||||
if let videoOutput = videoOutput {
|
||||
captureSession.removeOutput(videoOutput)
|
||||
self.videoOutput = nil
|
||||
}
|
||||
if video?.boolValue == true || enableFrameProcessor {
|
||||
ReactLogger.log(level: .info, message: "Adding Video Data output...")
|
||||
videoOutput = AVCaptureVideoDataOutput()
|
||||
guard captureSession.canAddOutput(videoOutput!) else {
|
||||
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
|
||||
return
|
||||
}
|
||||
videoOutput!.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue)
|
||||
videoOutput!.alwaysDiscardsLateVideoFrames = false
|
||||
|
||||
let pixelFormatType = getPixelFormat(videoOutput: videoOutput!)
|
||||
videoOutput!.videoSettings = [
|
||||
String(kCVPixelBufferPixelFormatTypeKey): pixelFormatType,
|
||||
]
|
||||
captureSession.addOutput(videoOutput!)
|
||||
}
|
||||
|
||||
// Code Scanner
|
||||
if let codeScannerOptions = codeScannerOptions {
|
||||
guard let codeScanner = try? CodeScanner(fromJsValue: codeScannerOptions) else {
|
||||
invokeOnError(.parameter(.invalid(unionName: "codeScanner", receivedValue: codeScannerOptions.description)))
|
||||
return
|
||||
}
|
||||
let metadataOutput = AVCaptureMetadataOutput()
|
||||
guard captureSession.canAddOutput(metadataOutput) else {
|
||||
invokeOnError(.codeScanner(.notCompatibleWithOutputs))
|
||||
return
|
||||
}
|
||||
captureSession.addOutput(metadataOutput)
|
||||
|
||||
for codeType in codeScanner.codeTypes {
|
||||
// swiftlint:disable:next for_where
|
||||
if !metadataOutput.availableMetadataObjectTypes.contains(codeType) {
|
||||
invokeOnError(.codeScanner(.codeTypeNotSupported(codeType: codeType.descriptor)))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
metadataOutput.setMetadataObjectsDelegate(self, queue: CameraQueues.codeScannerQueue)
|
||||
metadataOutput.metadataObjectTypes = codeScanner.codeTypes
|
||||
if let rectOfInterest = codeScanner.regionOfInterest {
|
||||
metadataOutput.rectOfInterest = rectOfInterest
|
||||
}
|
||||
}
|
||||
|
||||
if outputOrientation != .portrait {
|
||||
updateOrientation()
|
||||
}
|
||||
|
||||
invokeOnInitialized()
|
||||
isReady = true
|
||||
ReactLogger.log(level: .info, message: "Session successfully configured!")
|
||||
}
|
||||
|
||||
/**
|
||||
Returns the pixel format that should be used for the AVCaptureVideoDataOutput.
|
||||
If HDR is enabled, this will return YUV 4:2:0 10-bit.
|
||||
If HDR is disabled, this will return whatever the user specified as a pixelFormat, or the most efficient format as a fallback.
|
||||
*/
|
||||
private func getPixelFormat(videoOutput: AVCaptureVideoDataOutput) -> OSType {
|
||||
// as per documentation, the first value is always the most efficient format
|
||||
var defaultFormat = videoOutput.availableVideoPixelFormatTypes.first!
|
||||
if enableBufferCompression {
|
||||
// use compressed format instead if we enabled buffer compression
|
||||
if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange &&
|
||||
videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange) {
|
||||
// YUV 4:2:0 8-bit (limited video colors; compressed)
|
||||
defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange
|
||||
}
|
||||
if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange &&
|
||||
videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange) {
|
||||
// YUV 4:2:0 8-bit (full video colors; compressed)
|
||||
defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange
|
||||
}
|
||||
}
|
||||
|
||||
// If the user enabled HDR, we can only use the YUV 4:2:0 10-bit pixel format.
|
||||
if hdr == true {
|
||||
guard pixelFormat == nil || pixelFormat == "yuv" else {
|
||||
invokeOnError(.format(.incompatiblePixelFormatWithHDR))
|
||||
return defaultFormat
|
||||
}
|
||||
|
||||
var targetFormats = [kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
|
||||
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange]
|
||||
if enableBufferCompression {
|
||||
// If we enable buffer compression, try to use a lossless compressed YUV format first, otherwise fall back to the others.
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, at: 0)
|
||||
}
|
||||
|
||||
// Find the best matching format
|
||||
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
|
||||
invokeOnError(.format(.invalidHdr))
|
||||
return defaultFormat
|
||||
}
|
||||
// YUV 4:2:0 10-bit (compressed/uncompressed)
|
||||
return format
|
||||
}
|
||||
|
||||
// If the user didn't specify a custom pixelFormat, just return the default one.
|
||||
guard let pixelFormat = pixelFormat else {
|
||||
return defaultFormat
|
||||
}
|
||||
|
||||
// If we don't use HDR, we can use any other custom pixel format.
|
||||
switch pixelFormat {
|
||||
case "yuv":
|
||||
// YUV 4:2:0 8-bit (full/limited video colors; uncompressed)
|
||||
var targetFormats = [kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
|
||||
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
|
||||
if enableBufferCompression {
|
||||
// YUV 4:2:0 8-bit (full/limited video colors; compressed)
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, at: 0)
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, at: 0)
|
||||
}
|
||||
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
|
||||
invokeOnError(.device(.pixelFormatNotSupported))
|
||||
return defaultFormat
|
||||
}
|
||||
return format
|
||||
case "rgb":
|
||||
// RGBA 8-bit (uncompressed)
|
||||
var targetFormats = [kCVPixelFormatType_32BGRA]
|
||||
if enableBufferCompression {
|
||||
// RGBA 8-bit (compressed)
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_32BGRA, at: 0)
|
||||
}
|
||||
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
|
||||
invokeOnError(.device(.pixelFormatNotSupported))
|
||||
return defaultFormat
|
||||
}
|
||||
return format
|
||||
case "native":
|
||||
return defaultFormat
|
||||
default:
|
||||
invokeOnError(.parameter(.invalid(unionName: "pixelFormat", receivedValue: pixelFormat as String)))
|
||||
return defaultFormat
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Configure Device
|
||||
|
||||
/**
|
||||
Configures the Video Device with the given FPS and HDR modes.
|
||||
*/
|
||||
final func configureDevice() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Device...")
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
|
||||
// Configure FPS
|
||||
if let fps = fps?.int32Value {
|
||||
let supportsGivenFps = device.activeFormat.videoSupportedFrameRateRanges.contains { range in
|
||||
return range.includes(fps: Double(fps))
|
||||
}
|
||||
if !supportsGivenFps {
|
||||
invokeOnError(.format(.invalidFps(fps: Int(fps))))
|
||||
return
|
||||
}
|
||||
|
||||
let duration = CMTimeMake(value: 1, timescale: fps)
|
||||
device.activeVideoMinFrameDuration = duration
|
||||
device.activeVideoMaxFrameDuration = duration
|
||||
} else {
|
||||
device.activeVideoMinFrameDuration = CMTime.invalid
|
||||
device.activeVideoMaxFrameDuration = CMTime.invalid
|
||||
}
|
||||
|
||||
// Configure Low-Light-Boost
|
||||
if lowLightBoost != nil {
|
||||
if lowLightBoost == true && !device.isLowLightBoostSupported {
|
||||
invokeOnError(.device(.lowLightBoostNotSupported))
|
||||
return
|
||||
}
|
||||
device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue
|
||||
}
|
||||
|
||||
device.unlockForConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Device successfully configured!")
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Configure Format
|
||||
|
||||
/**
|
||||
Configures the Video Device to find the best matching Format.
|
||||
*/
|
||||
final func configureFormat() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Format...")
|
||||
guard let jsFormat = format else {
|
||||
// JS Format was null. Ignore it, use default.
|
||||
return
|
||||
}
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
if device.activeFormat.isEqualTo(jsFormat: jsFormat) {
|
||||
ReactLogger.log(level: .info, message: "Already selected active format.")
|
||||
return
|
||||
}
|
||||
|
||||
// get matching format
|
||||
let format = device.formats.first { $0.isEqualTo(jsFormat: jsFormat) }
|
||||
guard let format else {
|
||||
invokeOnError(.format(.invalidFormat))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
defer {
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
|
||||
let shouldReconfigurePhotoOutput = device.activeFormat.photoDimensions.toCGSize() != format.photoDimensions.toCGSize()
|
||||
device.activeFormat = format
|
||||
|
||||
// The Photo Output uses the smallest available Dimension by default. We need to configure it for the maximum here
|
||||
if shouldReconfigurePhotoOutput, #available(iOS 16.0, *) {
|
||||
if let photoOutput = photoOutput {
|
||||
photoOutput.maxPhotoDimensions = format.photoDimensions
|
||||
}
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Format successfully configured!")
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Notifications/Interruptions
|
||||
|
||||
@objc
|
||||
func sessionRuntimeError(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
|
||||
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
|
||||
return
|
||||
}
|
||||
|
||||
invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError)
|
||||
|
||||
if isActive {
|
||||
// restart capture session after an error occured
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.captureSession.startRunning()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
//
|
||||
// CameraView+CodeScanner.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 03.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraView: AVCaptureMetadataOutputObjectsDelegate {
|
||||
public func metadataOutput(_: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from _: AVCaptureConnection) {
|
||||
guard let onCodeScanned = onCodeScanned else {
|
||||
return
|
||||
}
|
||||
guard !metadataObjects.isEmpty else {
|
||||
return
|
||||
}
|
||||
|
||||
// Map codes to JS values
|
||||
let codes = metadataObjects.map { object in
|
||||
var value: String?
|
||||
if let code = object as? AVMetadataMachineReadableCodeObject {
|
||||
value = code.stringValue
|
||||
}
|
||||
let frame = previewView.layerRectConverted(fromMetadataOutputRect: object.bounds)
|
||||
|
||||
return [
|
||||
"type": object.type.descriptor,
|
||||
"value": value as Any,
|
||||
"frame": [
|
||||
"x": frame.origin.x,
|
||||
"y": frame.origin.y,
|
||||
"width": frame.size.width,
|
||||
"height": frame.size.height,
|
||||
],
|
||||
]
|
||||
}
|
||||
// Call JS event
|
||||
onCodeScanned([
|
||||
"codes": codes,
|
||||
])
|
||||
}
|
||||
}
|
@ -1,93 +1,19 @@
|
||||
//
|
||||
// CameraView+Focus.swift
|
||||
// mrousavy
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 19.02.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
// Created by Marc Rousavy on 12.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraView {
|
||||
private func convertPreviewCoordinatesToCameraCoordinates(_ point: CGPoint) -> CGPoint {
|
||||
return previewView.captureDevicePointConverted(fromLayerPoint: point)
|
||||
}
|
||||
|
||||
func focus(point: CGPoint, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
guard let device = self.videoDeviceInput?.device else {
|
||||
throw CameraError.session(SessionError.cameraNotReady)
|
||||
}
|
||||
if !device.isFocusPointOfInterestSupported {
|
||||
throw CameraError.device(DeviceError.focusNotSupported)
|
||||
}
|
||||
|
||||
// in {0..1} system
|
||||
let normalizedPoint = convertPreviewCoordinatesToCameraCoordinates(point)
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
defer {
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
|
||||
// Set Focus
|
||||
if device.isFocusPointOfInterestSupported {
|
||||
device.focusPointOfInterest = normalizedPoint
|
||||
device.focusMode = .autoFocus
|
||||
}
|
||||
|
||||
// Set Exposure
|
||||
if device.isExposurePointOfInterestSupported {
|
||||
device.exposurePointOfInterest = normalizedPoint
|
||||
device.exposureMode = .autoExpose
|
||||
}
|
||||
|
||||
// Remove any existing listeners
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
|
||||
object: nil)
|
||||
|
||||
// Listen for focus completion
|
||||
device.isSubjectAreaChangeMonitoringEnabled = true
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(subjectAreaDidChange),
|
||||
name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
|
||||
object: nil)
|
||||
try cameraSession.focus(point: point)
|
||||
return nil
|
||||
} catch {
|
||||
throw CameraError.device(DeviceError.configureError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
func subjectAreaDidChange(notification _: NSNotification) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
defer {
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
|
||||
// Reset Focus to continuous/auto
|
||||
if device.isFocusPointOfInterestSupported {
|
||||
device.focusMode = .continuousAutoFocus
|
||||
}
|
||||
|
||||
// Reset Exposure to continuous/auto
|
||||
if device.isExposurePointOfInterestSupported {
|
||||
device.exposureMode = .continuousAutoExposure
|
||||
}
|
||||
|
||||
// Disable listeners
|
||||
device.isSubjectAreaChangeMonitoringEnabled = false
|
||||
} catch {
|
||||
invokeOnError(.device(.configureError))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,45 +0,0 @@
|
||||
//
|
||||
// CameraView+Orientation.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 04.01.22.
|
||||
// Copyright © 2022 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension CameraView {
|
||||
/// Orientation of the input connection (preview)
|
||||
private var inputOrientation: UIInterfaceOrientation {
|
||||
return .portrait
|
||||
}
|
||||
|
||||
// Orientation of the output connections (photo, video, frame processor)
|
||||
var outputOrientation: UIInterfaceOrientation {
|
||||
if let userOrientation = orientation as String?,
|
||||
let parsedOrientation = try? UIInterfaceOrientation(withString: userOrientation) {
|
||||
// user is overriding output orientation
|
||||
return parsedOrientation
|
||||
} else {
|
||||
// use same as input orientation
|
||||
return inputOrientation
|
||||
}
|
||||
}
|
||||
|
||||
func updateOrientation() {
|
||||
// Updates the Orientation for all rotable
|
||||
let isMirrored = videoDeviceInput?.device.position == .front
|
||||
|
||||
let connectionOrientation = outputOrientation
|
||||
captureSession.outputs.forEach { output in
|
||||
output.connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = isMirrored
|
||||
}
|
||||
connection.setInterfaceOrientation(connectionOrientation)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -11,268 +11,42 @@ import AVFoundation
|
||||
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
||||
|
||||
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
/**
|
||||
Starts a video + audio recording with a custom Asset Writer.
|
||||
*/
|
||||
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
||||
let callback = Callback(jsCallbackFunc)
|
||||
func startRecording(options: NSDictionary, callback jsCallback: @escaping RCTResponseSenderBlock) {
|
||||
// Type-safety
|
||||
let callback = Callback(jsCallback)
|
||||
|
||||
var fileType = AVFileType.mov
|
||||
if let fileTypeOption = options["fileType"] as? String {
|
||||
guard let parsed = try? AVFileType(withString: fileTypeOption) else {
|
||||
callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption)))
|
||||
return
|
||||
}
|
||||
fileType = parsed
|
||||
}
|
||||
|
||||
let errorPointer = ErrorPointer(nilLiteral: ())
|
||||
let fileExtension = fileType.descriptor ?? "mov"
|
||||
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
||||
callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee)
|
||||
return
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
|
||||
let tempURL = URL(string: "file://\(tempFilePath)")!
|
||||
|
||||
if let flashMode = options["flash"] as? String {
|
||||
// use the torch as the video's flash
|
||||
self.setTorchMode(flashMode)
|
||||
}
|
||||
|
||||
guard let videoOutput = self.videoOutput else {
|
||||
if self.video?.boolValue == true {
|
||||
callback.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
} else {
|
||||
callback.reject(error: .capture(.videoNotEnabled))
|
||||
return
|
||||
}
|
||||
}
|
||||
guard let videoInput = self.videoDeviceInput else {
|
||||
callback.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
||||
// both a callback and a Promise in a single function. Wait for TurboModules?
|
||||
// This means that any errors that occur in this function have to be delegated through
|
||||
// the callback, but I'd prefer for them to throw for the original function instead.
|
||||
|
||||
let enableAudio = self.audio?.boolValue == true
|
||||
|
||||
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
||||
defer {
|
||||
if enableAudio {
|
||||
CameraQueues.audioQueue.async {
|
||||
self.deactivateAudioSession()
|
||||
}
|
||||
}
|
||||
if options["flash"] != nil {
|
||||
// Set torch mode back to what it was before if we used it for the video flash.
|
||||
self.setTorchMode(self.torch)
|
||||
}
|
||||
}
|
||||
|
||||
self.recordingSession = nil
|
||||
self.isRecording = false
|
||||
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
||||
|
||||
if let error = error as NSError? {
|
||||
if error.domain == "capture/aborted" {
|
||||
callback.reject(error: .capture(.aborted), cause: error)
|
||||
} else {
|
||||
callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(error.description)")), cause: error)
|
||||
}
|
||||
} else {
|
||||
if status == .completed {
|
||||
callback.resolve([
|
||||
"path": recordingSession.url.absoluteString,
|
||||
"duration": recordingSession.duration,
|
||||
])
|
||||
} else {
|
||||
callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let recordingSession: RecordingSession
|
||||
do {
|
||||
recordingSession = try RecordingSession(url: tempURL,
|
||||
fileType: fileType,
|
||||
completion: onFinish)
|
||||
} catch let error as NSError {
|
||||
callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error)
|
||||
return
|
||||
}
|
||||
self.recordingSession = recordingSession
|
||||
let options = try RecordVideoOptions(fromJSValue: options)
|
||||
|
||||
var videoCodec: AVVideoCodecType?
|
||||
if let codecString = options["videoCodec"] as? String {
|
||||
videoCodec = AVVideoCodecType(withString: codecString)
|
||||
// Start Recording with success and error callbacks
|
||||
cameraSession.startRecording(
|
||||
options: options,
|
||||
onVideoRecorded: { video in
|
||||
callback.resolve(video.toJSValue())
|
||||
},
|
||||
onError: { error in
|
||||
callback.reject(error: error)
|
||||
}
|
||||
|
||||
// Init Video
|
||||
guard var videoSettings = self.recommendedVideoSettings(videoOutput: videoOutput, fileType: fileType, videoCodec: videoCodec),
|
||||
!videoSettings.isEmpty else {
|
||||
callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
||||
return
|
||||
)
|
||||
} catch {
|
||||
// Some error occured while initializing VideoSettings
|
||||
if let error = error as? CameraError {
|
||||
callback.reject(error: error)
|
||||
} else {
|
||||
callback.reject(error: .capture(.unknown(message: error.localizedDescription)), cause: error as NSError)
|
||||
}
|
||||
|
||||
// Custom Video Bit Rate (Mbps -> bps)
|
||||
if let videoBitRate = options["videoBitRate"] as? NSNumber {
|
||||
let bitsPerSecond = videoBitRate.doubleValue * 1_000_000
|
||||
videoSettings[AVVideoCompressionPropertiesKey] = [
|
||||
AVVideoAverageBitRateKey: NSNumber(value: bitsPerSecond),
|
||||
]
|
||||
}
|
||||
|
||||
// get pixel format (420f, 420v, x420)
|
||||
let pixelFormat = CMFormatDescriptionGetMediaSubType(videoInput.device.activeFormat.formatDescription)
|
||||
recordingSession.initializeVideoWriter(withSettings: videoSettings,
|
||||
pixelFormat: pixelFormat)
|
||||
|
||||
// Init Audio (optional)
|
||||
if enableAudio {
|
||||
// Activate Audio Session asynchronously
|
||||
CameraQueues.audioQueue.async {
|
||||
self.activateAudioSession()
|
||||
}
|
||||
|
||||
if let audioOutput = self.audioOutput,
|
||||
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) {
|
||||
recordingSession.initializeAudioWriter(withSettings: audioSettings)
|
||||
}
|
||||
}
|
||||
|
||||
// start recording session with or without audio.
|
||||
do {
|
||||
try recordingSession.startAssetWriter()
|
||||
} catch let error as NSError {
|
||||
callback.reject(error: .capture(.createRecorderError(message: "RecordingSession failed to start asset writer.")), cause: error)
|
||||
return
|
||||
}
|
||||
self.isRecording = true
|
||||
}
|
||||
}
|
||||
|
||||
func stopRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.isRecording = false
|
||||
|
||||
withPromise(promise) {
|
||||
guard let recordingSession = self.recordingSession else {
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
recordingSession.finish()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
cameraSession.stopRecording(promise: promise)
|
||||
}
|
||||
|
||||
func pauseRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
withPromise(promise) {
|
||||
guard self.recordingSession != nil else {
|
||||
// there's no active recording!
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
self.isRecording = false
|
||||
return nil
|
||||
}
|
||||
}
|
||||
cameraSession.pauseRecording(promise: promise)
|
||||
}
|
||||
|
||||
func resumeRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
withPromise(promise) {
|
||||
guard self.recordingSession != nil else {
|
||||
// there's no active recording!
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
self.isRecording = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
if captureOutput is AVCaptureVideoDataOutput {
|
||||
if let frameProcessor = frameProcessor {
|
||||
// Call Frame Processor
|
||||
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
|
||||
frameProcessor.call(frame)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Record Video Frame/Audio Sample to File
|
||||
if isRecording {
|
||||
guard let recordingSession = recordingSession else {
|
||||
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
||||
return
|
||||
}
|
||||
|
||||
switch captureOutput {
|
||||
case is AVCaptureVideoDataOutput:
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
||||
case is AVCaptureAudioDataOutput:
|
||||
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
|
||||
from: audioCaptureSession.masterClock ?? CMClockGetHostTimeClock(),
|
||||
to: captureSession.masterClock ?? CMClockGetHostTimeClock())
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
if captureOutput is AVCaptureVideoDataOutput {
|
||||
// Update FPS Graph per Frame
|
||||
if let fpsGraph = fpsGraph {
|
||||
DispatchQueue.main.async {
|
||||
fpsGraph.onTick(CACurrentMediaTime())
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput,
|
||||
fileType: AVFileType,
|
||||
videoCodec: AVVideoCodecType?) -> [String: Any]? {
|
||||
if videoCodec != nil {
|
||||
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
|
||||
} else {
|
||||
return videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
||||
*/
|
||||
private var bufferOrientation: UIImage.Orientation {
|
||||
guard let cameraPosition = videoDeviceInput?.device.position else {
|
||||
return .up
|
||||
}
|
||||
|
||||
switch outputOrientation {
|
||||
case .portrait:
|
||||
return cameraPosition == .front ? .leftMirrored : .right
|
||||
case .landscapeLeft:
|
||||
return cameraPosition == .front ? .downMirrored : .up
|
||||
case .portraitUpsideDown:
|
||||
return cameraPosition == .front ? .rightMirrored : .left
|
||||
case .landscapeRight:
|
||||
return cameraPosition == .front ? .upMirrored : .down
|
||||
case .unknown:
|
||||
return .up
|
||||
@unknown default:
|
||||
return .up
|
||||
}
|
||||
cameraSession.resumeRecording(promise: promise)
|
||||
}
|
||||
}
|
||||
|
@ -10,83 +10,6 @@ import AVFoundation
|
||||
|
||||
extension CameraView {
|
||||
func takePhoto(options: NSDictionary, promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
guard let photoOutput = self.photoOutput,
|
||||
let videoDeviceInput = self.videoDeviceInput else {
|
||||
if self.photo?.boolValue == true {
|
||||
promise.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
} else {
|
||||
promise.reject(error: .capture(.photoNotEnabled))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Capturing photo...")
|
||||
|
||||
// Create photo settings
|
||||
let photoSettings = AVCapturePhotoSettings()
|
||||
|
||||
// default, overridable settings if high quality capture was enabled
|
||||
if self.enableHighQualityPhotos?.boolValue == true {
|
||||
// TODO: On iOS 16+ this will be removed in favor of maxPhotoDimensions.
|
||||
photoSettings.isHighResolutionPhotoEnabled = true
|
||||
if #available(iOS 13.0, *) {
|
||||
photoSettings.photoQualityPrioritization = .quality
|
||||
}
|
||||
}
|
||||
|
||||
// flash
|
||||
if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String {
|
||||
guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash)))
|
||||
return
|
||||
}
|
||||
photoSettings.flashMode = flashMode
|
||||
}
|
||||
|
||||
// shutter sound
|
||||
let enableShutterSound = options["enableShutterSound"] as? Bool ?? true
|
||||
|
||||
// depth data
|
||||
photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled
|
||||
if #available(iOS 12.0, *) {
|
||||
photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled
|
||||
}
|
||||
|
||||
// quality prioritization
|
||||
if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String {
|
||||
guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization)))
|
||||
return
|
||||
}
|
||||
photoSettings.photoQualityPrioritization = photoQualityPrioritization
|
||||
}
|
||||
|
||||
// photo size is always the one selected in the format
|
||||
if #available(iOS 16.0, *) {
|
||||
photoSettings.maxPhotoDimensions = photoOutput.maxPhotoDimensions
|
||||
}
|
||||
|
||||
// red-eye reduction
|
||||
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
|
||||
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
|
||||
}
|
||||
|
||||
// stabilization
|
||||
if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool {
|
||||
photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization
|
||||
}
|
||||
|
||||
// distortion correction
|
||||
if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool {
|
||||
photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection
|
||||
}
|
||||
|
||||
photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise, enableShutterSound: enableShutterSound))
|
||||
|
||||
// Assume that `takePhoto` is always called with the same parameters, so prepare the next call too.
|
||||
photoOutput.setPreparedPhotoSettingsArray([photoSettings], completionHandler: nil)
|
||||
}
|
||||
cameraSession.takePhoto(options: options, promise: promise)
|
||||
}
|
||||
}
|
||||
|
@ -1,51 +0,0 @@
|
||||
//
|
||||
// CameraView+Torch.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 20.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraView {
|
||||
final func setTorchMode(_ torchMode: String) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
|
||||
invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
|
||||
return
|
||||
}
|
||||
if !captureSession.isRunning {
|
||||
torchMode = .off
|
||||
}
|
||||
if device.torchMode == torchMode {
|
||||
// no need to run the whole lock/unlock bs
|
||||
return
|
||||
}
|
||||
if !device.hasTorch || !device.isTorchAvailable {
|
||||
if torchMode == .off {
|
||||
// ignore it, when it's off and not supported, it's off.
|
||||
return
|
||||
} else {
|
||||
// torch mode is .auto or .on, but no torch is available.
|
||||
invokeOnError(.device(.flashUnavailable))
|
||||
return
|
||||
}
|
||||
}
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
device.torchMode = torchMode
|
||||
if torchMode == .on {
|
||||
try device.setTorchModeOn(level: 1.0)
|
||||
}
|
||||
device.unlockForConfiguration()
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
@ -7,34 +7,20 @@
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension CameraView {
|
||||
var minAvailableZoom: CGFloat {
|
||||
return videoDeviceInput?.device.minAvailableVideoZoomFactor ?? 1
|
||||
}
|
||||
|
||||
var maxAvailableZoom: CGFloat {
|
||||
return videoDeviceInput?.device.activeFormat.videoMaxZoomFactor ?? 1
|
||||
}
|
||||
|
||||
@objc
|
||||
final func onPinch(_ gesture: UIPinchGestureRecognizer) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
return
|
||||
}
|
||||
|
||||
let scale = max(min(gesture.scale * pinchScaleOffset, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0))
|
||||
let scale = max(min(gesture.scale * pinchScaleOffset, cameraSession.maxZoom), CGFloat(1.0))
|
||||
if gesture.state == .ended {
|
||||
pinchScaleOffset = scale
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
device.videoZoomFactor = scale
|
||||
device.unlockForConfiguration()
|
||||
} catch {
|
||||
invokeOnError(.device(.configureError))
|
||||
// Update zoom on Camera
|
||||
cameraSession.configure { configuration in
|
||||
configuration.zoom = scale
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,24 +36,4 @@ extension CameraView {
|
||||
self.pinchGestureRecognizer = nil
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
final func zoom(factor: CGFloat, animated: Bool) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
let clamped = max(min(factor, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0))
|
||||
if animated {
|
||||
device.ramp(toVideoZoomFactor: clamped, withRate: 1)
|
||||
} else {
|
||||
device.videoZoomFactor = clamped
|
||||
}
|
||||
device.unlockForConfiguration()
|
||||
} catch {
|
||||
invokeOnError(.device(.configureError))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,50 +10,36 @@ import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
//
|
||||
// TODOs for the CameraView which are currently too hard to implement either because of AVFoundation's limitations, or my brain capacity
|
||||
//
|
||||
// CameraView+RecordVideo
|
||||
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
|
||||
|
||||
//
|
||||
// CameraView+TakePhoto
|
||||
// TODO: Photo HDR
|
||||
|
||||
private let propsThatRequireReconfiguration = ["cameraId",
|
||||
"enableDepthData",
|
||||
"enableHighQualityPhotos",
|
||||
"enablePortraitEffectsMatteDelivery",
|
||||
"photo",
|
||||
"video",
|
||||
"enableFrameProcessor",
|
||||
"hdr",
|
||||
"pixelFormat",
|
||||
"codeScannerOptions"]
|
||||
private let propsThatRequireDeviceReconfiguration = ["fps",
|
||||
"lowLightBoost"]
|
||||
|
||||
// MARK: - CameraView
|
||||
|
||||
public final class CameraView: UIView {
|
||||
public final class CameraView: UIView, CameraSessionDelegate {
|
||||
// pragma MARK: React Properties
|
||||
// props that require reconfiguring
|
||||
@objc var cameraId: NSString?
|
||||
@objc var enableDepthData = false
|
||||
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
|
||||
@objc var enableHighQualityPhotos = false
|
||||
@objc var enablePortraitEffectsMatteDelivery = false
|
||||
@objc var enableBufferCompression = false
|
||||
// use cases
|
||||
@objc var photo: NSNumber? // nullable bool
|
||||
@objc var video: NSNumber? // nullable bool
|
||||
@objc var audio: NSNumber? // nullable bool
|
||||
@objc var photo = false
|
||||
@objc var video = false
|
||||
@objc var audio = false
|
||||
@objc var enableFrameProcessor = false
|
||||
@objc var codeScannerOptions: NSDictionary?
|
||||
@objc var pixelFormat: NSString?
|
||||
// props that require format reconfiguring
|
||||
@objc var format: NSDictionary?
|
||||
@objc var fps: NSNumber?
|
||||
@objc var hdr: NSNumber? // nullable bool
|
||||
@objc var lowLightBoost: NSNumber? // nullable bool
|
||||
@objc var hdr = false
|
||||
@objc var lowLightBoost = false
|
||||
@objc var orientation: NSString?
|
||||
// other props
|
||||
@objc var isActive = false
|
||||
@ -63,7 +49,8 @@ public final class CameraView: UIView {
|
||||
@objc var videoStabilizationMode: NSString?
|
||||
@objc var resizeMode: NSString = "cover" {
|
||||
didSet {
|
||||
previewView.resizeMode = ResizeMode(fromTypeScriptUnion: resizeMode as String)
|
||||
let parsed = try? ResizeMode(jsValue: resizeMode as String)
|
||||
previewView.resizeMode = parsed ?? .cover
|
||||
}
|
||||
}
|
||||
|
||||
@ -84,20 +71,9 @@ public final class CameraView: UIView {
|
||||
}
|
||||
|
||||
// pragma MARK: Internal Properties
|
||||
var cameraSession: CameraSession
|
||||
var isMounted = false
|
||||
var isReady = false
|
||||
// Capture Session
|
||||
let captureSession = AVCaptureSession()
|
||||
let audioCaptureSession = AVCaptureSession()
|
||||
// Inputs & Outputs
|
||||
var videoDeviceInput: AVCaptureDeviceInput?
|
||||
var audioDeviceInput: AVCaptureDeviceInput?
|
||||
var photoOutput: AVCapturePhotoOutput?
|
||||
var videoOutput: AVCaptureVideoDataOutput?
|
||||
var audioOutput: AVCaptureAudioDataOutput?
|
||||
// CameraView+RecordView (+ Frame Processor)
|
||||
var isRecording = false
|
||||
var recordingSession: RecordingSession?
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
@objc public var frameProcessor: FrameProcessor?
|
||||
#endif
|
||||
@ -110,30 +86,16 @@ public final class CameraView: UIView {
|
||||
var fpsGraph: RCTFPSGraph?
|
||||
#endif
|
||||
|
||||
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
|
||||
var isRunning: Bool {
|
||||
return captureSession.isRunning
|
||||
}
|
||||
|
||||
// pragma MARK: Setup
|
||||
|
||||
override public init(frame: CGRect) {
|
||||
previewView = PreviewView(frame: frame, session: captureSession)
|
||||
// Create CameraSession
|
||||
cameraSession = CameraSession()
|
||||
previewView = cameraSession.createPreviewView(frame: frame)
|
||||
super.init(frame: frame)
|
||||
cameraSession.delegate = self
|
||||
|
||||
addSubview(previewView)
|
||||
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(sessionRuntimeError),
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: captureSession)
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(sessionRuntimeError),
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: audioCaptureSession)
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(audioSessionInterrupted),
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
@ -141,18 +103,6 @@ public final class CameraView: UIView {
|
||||
fatalError("init(coder:) is not implemented.")
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: captureSession)
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: audioCaptureSession)
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
}
|
||||
|
||||
override public func willMove(toSuperview newSuperview: UIView?) {
|
||||
super.willMove(toSuperview: newSuperview)
|
||||
|
||||
@ -169,89 +119,111 @@ public final class CameraView: UIView {
|
||||
previewView.bounds = bounds
|
||||
}
|
||||
|
||||
func getPixelFormat() -> PixelFormat {
|
||||
// TODO: Use ObjC RCT enum parser for this
|
||||
if let pixelFormat = pixelFormat as? String {
|
||||
do {
|
||||
return try PixelFormat(jsValue: pixelFormat)
|
||||
} catch {
|
||||
if let error = error as? CameraError {
|
||||
onError(error)
|
||||
} else {
|
||||
onError(.unknown(message: error.localizedDescription, cause: error as NSError))
|
||||
}
|
||||
}
|
||||
}
|
||||
return .native
|
||||
}
|
||||
|
||||
func getTorch() -> Torch {
|
||||
// TODO: Use ObjC RCT enum parser for this
|
||||
if let torch = try? Torch(jsValue: torch) {
|
||||
return torch
|
||||
}
|
||||
return .off
|
||||
}
|
||||
|
||||
// pragma MARK: Props updating
|
||||
override public final func didSetProps(_ changedProps: [String]!) {
|
||||
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
|
||||
let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) }
|
||||
let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format")
|
||||
let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) }
|
||||
let shouldReconfigureAudioSession = changedProps.contains("audio")
|
||||
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) props: [\(changedProps.joined(separator: ", "))]")
|
||||
|
||||
let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice
|
||||
cameraSession.configure { config in
|
||||
// Input Camera Device
|
||||
config.cameraId = cameraId as? String
|
||||
|
||||
let shouldCheckActive = willReconfigure || changedProps.contains("isActive") || captureSession.isRunning != isActive
|
||||
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
|
||||
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
|
||||
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
|
||||
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
|
||||
// Photo
|
||||
if photo {
|
||||
config.photo = .enabled(config: CameraConfiguration.Photo(enableHighQualityPhotos: enableHighQualityPhotos,
|
||||
enableDepthData: enableDepthData,
|
||||
enablePortraitEffectsMatte: enablePortraitEffectsMatteDelivery))
|
||||
} else {
|
||||
config.photo = .disabled
|
||||
}
|
||||
|
||||
// Video/Frame Processor
|
||||
if video || enableFrameProcessor {
|
||||
config.video = .enabled(config: CameraConfiguration.Video(pixelFormat: getPixelFormat(),
|
||||
enableBufferCompression: enableBufferCompression,
|
||||
enableHdr: hdr,
|
||||
enableFrameProcessor: enableFrameProcessor))
|
||||
} else {
|
||||
config.video = .disabled
|
||||
}
|
||||
|
||||
// Audio
|
||||
if audio {
|
||||
config.audio = .enabled(config: CameraConfiguration.Audio())
|
||||
} else {
|
||||
config.audio = .disabled
|
||||
}
|
||||
|
||||
// Code Scanner
|
||||
if let codeScannerOptions {
|
||||
let codeScanner = try CodeScanner(fromJsValue: codeScannerOptions)
|
||||
config.codeScanner = .enabled(config: codeScanner)
|
||||
} else {
|
||||
config.codeScanner = .disabled
|
||||
}
|
||||
|
||||
// Orientation
|
||||
if let jsOrientation = orientation as? String {
|
||||
let orientation = try Orientation(jsValue: jsOrientation)
|
||||
config.orientation = orientation
|
||||
} else {
|
||||
config.orientation = .portrait
|
||||
}
|
||||
|
||||
// Format
|
||||
if let jsFormat = format {
|
||||
let format = try CameraDeviceFormat(jsValue: jsFormat)
|
||||
config.format = format
|
||||
} else {
|
||||
config.format = nil
|
||||
}
|
||||
|
||||
// Side-Props
|
||||
config.fps = fps?.int32Value
|
||||
config.enableLowLightBoost = lowLightBoost
|
||||
config.torch = getTorch()
|
||||
|
||||
// Zoom
|
||||
config.zoom = zoom.doubleValue
|
||||
|
||||
// isActive
|
||||
config.isActive = isActive
|
||||
}
|
||||
|
||||
// Store `zoom` offset for native pinch-gesture
|
||||
if changedProps.contains("zoom") {
|
||||
pinchScaleOffset = zoom.doubleValue
|
||||
}
|
||||
|
||||
// Set up Debug FPS Graph
|
||||
if changedProps.contains("enableFpsGraph") {
|
||||
DispatchQueue.main.async {
|
||||
self.setupFpsGraph()
|
||||
}
|
||||
}
|
||||
|
||||
if shouldReconfigure ||
|
||||
shouldReconfigureAudioSession ||
|
||||
shouldCheckActive ||
|
||||
shouldUpdateTorch ||
|
||||
shouldUpdateZoom ||
|
||||
shouldReconfigureFormat ||
|
||||
shouldReconfigureDevice ||
|
||||
shouldUpdateVideoStabilization ||
|
||||
shouldUpdateOrientation {
|
||||
CameraQueues.cameraQueue.async {
|
||||
// Video Configuration
|
||||
if shouldReconfigure {
|
||||
self.configureCaptureSession()
|
||||
}
|
||||
if shouldReconfigureFormat {
|
||||
self.configureFormat()
|
||||
}
|
||||
if shouldReconfigureDevice {
|
||||
self.configureDevice()
|
||||
}
|
||||
if shouldUpdateVideoStabilization, let videoStabilizationMode = self.videoStabilizationMode as String? {
|
||||
self.captureSession.setVideoStabilizationMode(videoStabilizationMode)
|
||||
}
|
||||
|
||||
if shouldUpdateZoom {
|
||||
let zoomClamped = max(min(CGFloat(self.zoom.doubleValue), self.maxAvailableZoom), self.minAvailableZoom)
|
||||
self.zoom(factor: zoomClamped, animated: false)
|
||||
self.pinchScaleOffset = zoomClamped
|
||||
}
|
||||
|
||||
if shouldCheckActive && self.captureSession.isRunning != self.isActive {
|
||||
if self.isActive {
|
||||
ReactLogger.log(level: .info, message: "Starting Session...")
|
||||
self.captureSession.startRunning()
|
||||
ReactLogger.log(level: .info, message: "Started Session!")
|
||||
} else {
|
||||
ReactLogger.log(level: .info, message: "Stopping Session...")
|
||||
self.captureSession.stopRunning()
|
||||
ReactLogger.log(level: .info, message: "Stopped Session!")
|
||||
}
|
||||
}
|
||||
|
||||
if shouldUpdateOrientation {
|
||||
self.updateOrientation()
|
||||
}
|
||||
|
||||
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
|
||||
if shouldUpdateTorch {
|
||||
CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
|
||||
self.setTorchMode(self.torch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Audio Configuration
|
||||
if shouldReconfigureAudioSession {
|
||||
CameraQueues.audioQueue.async {
|
||||
self.configureAudioSession()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setupFpsGraph() {
|
||||
@ -269,12 +241,16 @@ public final class CameraView: UIView {
|
||||
}
|
||||
|
||||
// pragma MARK: Event Invokers
|
||||
final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
|
||||
|
||||
func onError(_ error: CameraError) {
|
||||
ReactLogger.log(level: .error, message: "Invoking onError(): \(error.message)")
|
||||
guard let onError = onError else { return }
|
||||
guard let onError = onError else {
|
||||
return
|
||||
}
|
||||
|
||||
var causeDictionary: [String: Any]?
|
||||
if let cause = cause {
|
||||
if case let .unknown(_, cause) = error,
|
||||
let cause = cause {
|
||||
causeDictionary = [
|
||||
"code": cause.code,
|
||||
"domain": cause.domain,
|
||||
@ -289,9 +265,58 @@ public final class CameraView: UIView {
|
||||
])
|
||||
}
|
||||
|
||||
final func invokeOnInitialized() {
|
||||
func onSessionInitialized() {
|
||||
ReactLogger.log(level: .info, message: "Camera initialized!")
|
||||
guard let onInitialized = onInitialized else { return }
|
||||
guard let onInitialized = onInitialized else {
|
||||
return
|
||||
}
|
||||
onInitialized([String: Any]())
|
||||
}
|
||||
|
||||
func onFrame(sampleBuffer: CMSampleBuffer) {
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
if let frameProcessor = frameProcessor {
|
||||
// Call Frame Processor
|
||||
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
|
||||
frameProcessor.call(frame)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if DEBUG
|
||||
if let fpsGraph {
|
||||
fpsGraph.onTick(CACurrentMediaTime())
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
func onCodeScanned(codes: [CameraSession.Code]) {
|
||||
guard let onCodeScanned = onCodeScanned else {
|
||||
return
|
||||
}
|
||||
onCodeScanned([
|
||||
"codes": codes.map { $0.toJSValue() },
|
||||
])
|
||||
}
|
||||
|
||||
/**
|
||||
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
||||
*/
|
||||
private var bufferOrientation: UIImage.Orientation {
|
||||
guard let cameraPosition = cameraSession.videoDeviceInput?.device.position else {
|
||||
return .up
|
||||
}
|
||||
let orientation = cameraSession.configuration?.orientation ?? .portrait
|
||||
|
||||
// TODO: I think this is wrong.
|
||||
switch orientation {
|
||||
case .portrait:
|
||||
return cameraPosition == .front ? .leftMirrored : .right
|
||||
case .landscapeLeft:
|
||||
return cameraPosition == .front ? .downMirrored : .up
|
||||
case .portraitUpsideDown:
|
||||
return cameraPosition == .front ? .rightMirrored : .left
|
||||
case .landscapeRight:
|
||||
return cameraPosition == .front ? .upMirrored : .down
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -25,19 +25,19 @@ RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(installFrameProcessorBindings);
|
||||
RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableHighQualityPhotos, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableHighQualityPhotos, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableBufferCompression, BOOL);
|
||||
// use cases
|
||||
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(photo, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(video, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(audio, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableFrameProcessor, BOOL);
|
||||
// device format
|
||||
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);
|
||||
RCT_EXPORT_VIEW_PROPERTY(hdr, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(hdr, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(pixelFormat, NSString);
|
||||
// other props
|
||||
|
@ -38,6 +38,10 @@ final class CameraViewManager: RCTViewManager {
|
||||
#endif
|
||||
}
|
||||
|
||||
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
||||
// both a callback and a Promise in a single function. Wait for TurboModules?
|
||||
// This means that any errors that occur in this function have to be delegated through
|
||||
// the callback, but I'd prefer for them to throw for the original function instead.
|
||||
@objc
|
||||
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
|
||||
let component = getCameraView(withTag: node)
|
||||
|
231
package/ios/Core/CameraConfiguration.swift
Normal file
231
package/ios/Core/CameraConfiguration.swift
Normal file
@ -0,0 +1,231 @@
|
||||
//
|
||||
// CameraConfiguration.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
// MARK: - CameraConfiguration
|
||||
|
||||
class CameraConfiguration {
|
||||
// pragma MARK: Configuration Props
|
||||
|
||||
// Input
|
||||
var cameraId: String?
|
||||
|
||||
// Outputs
|
||||
var photo: OutputConfiguration<Photo> = .disabled
|
||||
var video: OutputConfiguration<Video> = .disabled
|
||||
var codeScanner: OutputConfiguration<CodeScanner> = .disabled
|
||||
|
||||
// Orientation
|
||||
var orientation: Orientation = .portrait
|
||||
|
||||
// Format
|
||||
var format: CameraDeviceFormat?
|
||||
|
||||
// Side-Props
|
||||
var fps: Int32?
|
||||
var enableLowLightBoost = false
|
||||
var torch: Torch = .off
|
||||
|
||||
// Zoom
|
||||
var zoom: CGFloat?
|
||||
|
||||
// isActive (Start/Stop)
|
||||
var isActive = false
|
||||
|
||||
// Audio Session
|
||||
var audio: OutputConfiguration<Audio> = .disabled
|
||||
|
||||
init(copyOf other: CameraConfiguration?) {
|
||||
if let other {
|
||||
// copy over all values
|
||||
cameraId = other.cameraId
|
||||
photo = other.photo
|
||||
video = other.video
|
||||
codeScanner = other.codeScanner
|
||||
orientation = other.orientation
|
||||
format = other.format
|
||||
fps = other.fps
|
||||
enableLowLightBoost = other.enableLowLightBoost
|
||||
torch = other.torch
|
||||
zoom = other.zoom
|
||||
isActive = other.isActive
|
||||
audio = other.audio
|
||||
} else {
|
||||
// self will just be initialized with the default values.
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Types
|
||||
|
||||
struct Difference {
|
||||
let inputChanged: Bool
|
||||
let outputsChanged: Bool
|
||||
let orientationChanged: Bool
|
||||
let formatChanged: Bool
|
||||
let sidePropsChanged: Bool
|
||||
let zoomChanged: Bool
|
||||
|
||||
let audioSessionChanged: Bool
|
||||
|
||||
/**
|
||||
Returns `true` when props that affect the AVCaptureSession configuration (i.e. props that require beginConfiguration()) have changed.
|
||||
[`inputChanged`, `outputsChanged`, `orientationChanged`]
|
||||
*/
|
||||
var isSessionConfigurationDirty: Bool {
|
||||
return inputChanged || outputsChanged || orientationChanged
|
||||
}
|
||||
|
||||
/**
|
||||
Returns `true` when props that affect the AVCaptureDevice configuration (i.e. props that require lockForConfiguration()) have changed.
|
||||
[`formatChanged`, `sidePropsChanged`, `zoomChanged`]
|
||||
*/
|
||||
var isDeviceConfigurationDirty: Bool {
|
||||
return isSessionConfigurationDirty || formatChanged || sidePropsChanged || zoomChanged
|
||||
}
|
||||
|
||||
init(between left: CameraConfiguration?, and right: CameraConfiguration) {
|
||||
// cameraId
|
||||
inputChanged = left?.cameraId != right.cameraId
|
||||
// photo, video, codeScanner
|
||||
outputsChanged = inputChanged || left?.photo != right.photo || left?.video != right.video || left?.codeScanner != right.codeScanner
|
||||
// orientation
|
||||
orientationChanged = outputsChanged || left?.orientation != right.orientation
|
||||
// format (depends on cameraId)
|
||||
formatChanged = inputChanged || left?.format != right.format
|
||||
// side-props (depends on format)
|
||||
sidePropsChanged = formatChanged || left?.fps != right.fps || left?.enableLowLightBoost != right.enableLowLightBoost || left?.torch != right.torch
|
||||
// zoom (depends on format)
|
||||
zoomChanged = formatChanged || left?.zoom != right.zoom
|
||||
|
||||
// audio session
|
||||
audioSessionChanged = left?.audio != right.audio
|
||||
}
|
||||
}
|
||||
|
||||
enum OutputConfiguration<T: Equatable>: Equatable {
|
||||
case disabled
|
||||
case enabled(config: T)
|
||||
|
||||
public static func == (lhs: OutputConfiguration, rhs: OutputConfiguration) -> Bool {
|
||||
switch (lhs, rhs) {
|
||||
case (.disabled, .disabled):
|
||||
return true
|
||||
case let (.enabled(a), .enabled(b)):
|
||||
return a == b
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
A Photo Output configuration
|
||||
*/
|
||||
struct Photo: Equatable {
|
||||
var enableHighQualityPhotos = false
|
||||
var enableDepthData = false
|
||||
var enablePortraitEffectsMatte = false
|
||||
}
|
||||
|
||||
/**
|
||||
A Video Output configuration
|
||||
*/
|
||||
struct Video: Equatable {
|
||||
var pixelFormat: PixelFormat = .native
|
||||
var enableBufferCompression = false
|
||||
var enableHdr = false
|
||||
var enableFrameProcessor = false
|
||||
}
|
||||
|
||||
/**
|
||||
An Audio Output configuration
|
||||
*/
|
||||
struct Audio: Equatable {
|
||||
// no props for audio at the moment
|
||||
}
|
||||
}
|
||||
|
||||
extension CameraConfiguration.Video {
|
||||
/**
|
||||
Returns the pixel format that should be used for the given AVCaptureVideoDataOutput.
|
||||
If HDR is enabled, this will return YUV 4:2:0 10-bit.
|
||||
If HDR is disabled, this will return whatever the user specified as a pixelFormat, or the most efficient format as a fallback.
|
||||
*/
|
||||
func getPixelFormat(for videoOutput: AVCaptureVideoDataOutput) throws -> OSType {
|
||||
// as per documentation, the first value is always the most efficient format
|
||||
var defaultFormat = videoOutput.availableVideoPixelFormatTypes.first!
|
||||
if enableBufferCompression {
|
||||
// use compressed format instead if we enabled buffer compression
|
||||
if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange &&
|
||||
videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange) {
|
||||
// YUV 4:2:0 8-bit (limited video colors; compressed)
|
||||
defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange
|
||||
}
|
||||
if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange &&
|
||||
videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange) {
|
||||
// YUV 4:2:0 8-bit (full video colors; compressed)
|
||||
defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange
|
||||
}
|
||||
}
|
||||
|
||||
// If the user enabled HDR, we can only use the YUV 4:2:0 10-bit pixel format.
|
||||
if enableHdr == true {
|
||||
guard pixelFormat == .native || pixelFormat == .yuv else {
|
||||
throw CameraError.format(.incompatiblePixelFormatWithHDR)
|
||||
}
|
||||
|
||||
var targetFormats = [kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
|
||||
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange]
|
||||
if enableBufferCompression {
|
||||
// If we enable buffer compression, try to use a lossless compressed YUV format first, otherwise fall back to the others.
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, at: 0)
|
||||
}
|
||||
|
||||
// Find the best matching format
|
||||
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
|
||||
throw CameraError.format(.invalidHdr)
|
||||
}
|
||||
// YUV 4:2:0 10-bit (compressed/uncompressed)
|
||||
return format
|
||||
}
|
||||
|
||||
// If we don't use HDR, we can use any other custom pixel format.
|
||||
switch pixelFormat {
|
||||
case .yuv:
|
||||
// YUV 4:2:0 8-bit (full/limited video colors; uncompressed)
|
||||
var targetFormats = [kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
|
||||
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
|
||||
if enableBufferCompression {
|
||||
// YUV 4:2:0 8-bit (full/limited video colors; compressed)
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, at: 0)
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, at: 0)
|
||||
}
|
||||
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
|
||||
throw CameraError.device(.pixelFormatNotSupported)
|
||||
}
|
||||
return format
|
||||
case .rgb:
|
||||
// RGBA 8-bit (uncompressed)
|
||||
var targetFormats = [kCVPixelFormatType_32BGRA]
|
||||
if enableBufferCompression {
|
||||
// RGBA 8-bit (compressed)
|
||||
targetFormats.insert(kCVPixelFormatType_Lossless_32BGRA, at: 0)
|
||||
}
|
||||
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
|
||||
throw CameraError.device(.pixelFormatNotSupported)
|
||||
}
|
||||
return format
|
||||
case .native:
|
||||
return defaultFormat
|
||||
case .unknown:
|
||||
throw CameraError.parameter(.invalid(unionName: "pixelFormat", receivedValue: "unknown"))
|
||||
}
|
||||
}
|
||||
}
|
@ -175,7 +175,7 @@ enum CaptureError {
|
||||
case recordingInProgress
|
||||
case noRecordingInProgress
|
||||
case fileError
|
||||
case createTempFileError
|
||||
case createTempFileError(message: String? = nil)
|
||||
case createRecorderError(message: String? = nil)
|
||||
case videoNotEnabled
|
||||
case photoNotEnabled
|
||||
@ -213,8 +213,8 @@ enum CaptureError {
|
||||
return "There was no active video recording in progress! Did you call stopRecording() twice?"
|
||||
case .fileError:
|
||||
return "An unexpected File IO error occured!"
|
||||
case .createTempFileError:
|
||||
return "Failed to create a temporary file!"
|
||||
case let .createTempFileError(message: message):
|
||||
return "Failed to create a temporary file! \(message ?? "(no additional message)")"
|
||||
case let .createRecorderError(message: message):
|
||||
return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")"
|
||||
case .videoNotEnabled:
|
||||
@ -264,7 +264,7 @@ enum CameraError: Error {
|
||||
case session(_ id: SessionError)
|
||||
case capture(_ id: CaptureError)
|
||||
case codeScanner(_ id: CodeScannerError)
|
||||
case unknown(message: String? = nil)
|
||||
case unknown(message: String? = nil, cause: NSError? = nil)
|
||||
|
||||
var code: String {
|
||||
switch self {
|
||||
@ -303,8 +303,17 @@ enum CameraError: Error {
|
||||
return id.message
|
||||
case let .codeScanner(id: id):
|
||||
return id.message
|
||||
case let .unknown(message: message):
|
||||
return message ?? "An unexpected error occured."
|
||||
case let .unknown(message: message, cause: cause):
|
||||
return message ?? cause?.description ?? "An unexpected error occured."
|
||||
}
|
||||
}
|
||||
|
||||
var cause: NSError? {
|
||||
switch self {
|
||||
case let .unknown(message: _, cause: cause):
|
||||
return cause
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
@ -10,7 +10,7 @@ import Foundation
|
||||
|
||||
@objc
|
||||
public class CameraQueues: NSObject {
|
||||
/// The serial execution queue for the camera preview layer (input stream) as well as output processing of photos.
|
||||
/// The serial execution queue for camera configuration and setup.
|
||||
@objc public static let cameraQueue = DispatchQueue(label: "mrousavy/VisionCamera.main",
|
||||
qos: .userInteractive,
|
||||
attributes: [],
|
93
package/ios/Core/CameraSession+Audio.swift
Normal file
93
package/ios/Core/CameraSession+Audio.swift
Normal file
@ -0,0 +1,93 @@
|
||||
//
|
||||
// CameraSession+Audio.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraSession {
|
||||
/**
|
||||
Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration.
|
||||
|
||||
The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording.
|
||||
Background audio is allowed to play on speakers or bluetooth speakers.
|
||||
*/
|
||||
final func activateAudioSession() throws {
|
||||
ReactLogger.log(level: .info, message: "Activating Audio Session...")
|
||||
|
||||
do {
|
||||
let audioSession = AVAudioSession.sharedInstance()
|
||||
|
||||
try audioSession.updateCategory(AVAudioSession.Category.playAndRecord,
|
||||
options: [.mixWithOthers,
|
||||
.allowBluetoothA2DP,
|
||||
.defaultToSpeaker,
|
||||
.allowAirPlay])
|
||||
|
||||
if #available(iOS 14.5, *) {
|
||||
// prevents the audio session from being interrupted by a phone call
|
||||
try audioSession.setPrefersNoInterruptionsFromSystemAlerts(true)
|
||||
}
|
||||
|
||||
if #available(iOS 13.0, *) {
|
||||
// allow system sounds (notifications, calls, music) to play while recording
|
||||
try audioSession.setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
}
|
||||
|
||||
audioCaptureSession.startRunning()
|
||||
} catch let error as NSError {
|
||||
ReactLogger.log(level: .error, message: "Failed to activate audio session! Error \(error.code): \(error.description)")
|
||||
switch error.code {
|
||||
case 561_017_449:
|
||||
throw CameraError.session(.audioInUseByOtherApp)
|
||||
default:
|
||||
throw CameraError.session(.audioSessionFailedToActivate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final func deactivateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Deactivating Audio Session...")
|
||||
|
||||
audioCaptureSession.stopRunning()
|
||||
}
|
||||
|
||||
@objc
|
||||
func audioSessionInterrupted(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Audio Session Interruption Notification!")
|
||||
guard let userInfo = notification.userInfo,
|
||||
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
|
||||
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: Add JS-Event for Audio Session interruptions?
|
||||
switch type {
|
||||
case .began:
|
||||
// Something interrupted our Audio Session, stop recording audio.
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
|
||||
case .ended:
|
||||
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
|
||||
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
||||
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
|
||||
if options.contains(.shouldResume) {
|
||||
// Try resuming if possible
|
||||
if isRecording {
|
||||
CameraQueues.audioQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
|
||||
// restart audio session because interruption is over
|
||||
try? self.activateAudioSession()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
|
||||
}
|
||||
@unknown default:
|
||||
()
|
||||
}
|
||||
}
|
||||
}
|
80
package/ios/Core/CameraSession+CodeScanner.swift
Normal file
80
package/ios/Core/CameraSession+CodeScanner.swift
Normal file
@ -0,0 +1,80 @@
|
||||
//
|
||||
// CameraSession+CodeScanner.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraSession: AVCaptureMetadataOutputObjectsDelegate {
|
||||
public func metadataOutput(_: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from _: AVCaptureConnection) {
|
||||
guard let onCodeScanned = delegate?.onCodeScanned else {
|
||||
// No delegate callback
|
||||
return
|
||||
}
|
||||
guard !metadataObjects.isEmpty else {
|
||||
// No codes detected
|
||||
return
|
||||
}
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
// No cameraId set
|
||||
return
|
||||
}
|
||||
let size = device.activeFormat.videoDimensions
|
||||
|
||||
// Map codes to JS values
|
||||
let codes = metadataObjects.map { object in
|
||||
var value: String?
|
||||
if let code = object as? AVMetadataMachineReadableCodeObject {
|
||||
value = code.stringValue
|
||||
}
|
||||
let x = object.bounds.origin.x * Double(size.width)
|
||||
let y = object.bounds.origin.y * Double(size.height)
|
||||
let w = object.bounds.width * Double(size.width)
|
||||
let h = object.bounds.height * Double(size.height)
|
||||
let frame = CGRect(x: x, y: y, width: w, height: h)
|
||||
|
||||
return Code(type: object.type, value: value, frame: frame)
|
||||
}
|
||||
|
||||
// Call delegate (JS) event
|
||||
onCodeScanned(codes)
|
||||
}
|
||||
|
||||
/**
|
||||
A scanned QR/Barcode.
|
||||
*/
|
||||
struct Code {
|
||||
/**
|
||||
Type of the scanned Code
|
||||
*/
|
||||
let type: AVMetadataObject.ObjectType
|
||||
/**
|
||||
Decoded value of the code
|
||||
*/
|
||||
let value: String?
|
||||
/**
|
||||
Location of the code on-screen, relative to the video output layer
|
||||
*/
|
||||
let frame: CGRect
|
||||
|
||||
/**
|
||||
Converts this Code to a JS Object (Dictionary)
|
||||
*/
|
||||
func toJSValue() -> [String: AnyHashable] {
|
||||
return [
|
||||
"type": type.descriptor,
|
||||
"value": value,
|
||||
"frame": [
|
||||
"x": frame.origin.x,
|
||||
"y": frame.origin.y,
|
||||
"width": frame.size.width,
|
||||
"height": frame.size.height,
|
||||
],
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
323
package/ios/Core/CameraSession+Configuration.swift
Normal file
323
package/ios/Core/CameraSession+Configuration.swift
Normal file
@ -0,0 +1,323 @@
|
||||
//
|
||||
// CameraSession+Configuration.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 12.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraSession {
|
||||
// pragma MARK: Input Device
|
||||
|
||||
/**
|
||||
Configures the Input Device (`cameraId`)
|
||||
*/
|
||||
func configureDevice(configuration: CameraConfiguration) throws {
|
||||
ReactLogger.log(level: .info, message: "Configuring Input Device...")
|
||||
|
||||
// Remove all inputs
|
||||
captureSession.inputs.forEach { input in
|
||||
captureSession.removeInput(input)
|
||||
}
|
||||
videoDeviceInput = nil
|
||||
|
||||
#if targetEnvironment(simulator)
|
||||
// iOS Simulators don't have Cameras
|
||||
throw CameraError.device(.notAvailableOnSimulator)
|
||||
#endif
|
||||
|
||||
guard let cameraId = configuration.cameraId else {
|
||||
throw CameraError.device(.noDevice)
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Configuring Camera \(cameraId)...")
|
||||
// Video Input (Camera Device/Sensor)
|
||||
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
|
||||
throw CameraError.device(.invalid)
|
||||
}
|
||||
let input = try AVCaptureDeviceInput(device: videoDevice)
|
||||
guard captureSession.canAddInput(input) else {
|
||||
throw CameraError.parameter(.unsupportedInput(inputDescriptor: "video-input"))
|
||||
}
|
||||
captureSession.addInput(input)
|
||||
videoDeviceInput = input
|
||||
|
||||
ReactLogger.log(level: .info, message: "Successfully configured Input Device!")
|
||||
}
|
||||
|
||||
// pragma MARK: Outputs
|
||||
|
||||
/**
|
||||
Configures all outputs (`photo` + `video` + `codeScanner`)
|
||||
*/
|
||||
func configureOutputs(configuration: CameraConfiguration) throws {
|
||||
ReactLogger.log(level: .info, message: "Configuring Outputs...")
|
||||
|
||||
// Remove all outputs
|
||||
captureSession.outputs.forEach { output in
|
||||
captureSession.removeOutput(output)
|
||||
}
|
||||
photoOutput = nil
|
||||
videoOutput = nil
|
||||
audioOutput = nil
|
||||
codeScannerOutput = nil
|
||||
|
||||
// Photo Output
|
||||
if case let .enabled(photo) = configuration.photo {
|
||||
ReactLogger.log(level: .info, message: "Adding Photo output...")
|
||||
let photoOutput = AVCapturePhotoOutput()
|
||||
|
||||
// 1. Configure
|
||||
if photo.enableHighQualityPhotos {
|
||||
// TODO: In iOS 16 this will be removed in favor of maxPhotoDimensions.
|
||||
photoOutput.isHighResolutionCaptureEnabled = true
|
||||
if #available(iOS 13.0, *) {
|
||||
// TODO: Test if this actually does any fusion or if this just calls the captureOutput twice. If the latter, remove it.
|
||||
photoOutput.isVirtualDeviceConstituentPhotoDeliveryEnabled = photoOutput.isVirtualDeviceConstituentPhotoDeliverySupported
|
||||
photoOutput.maxPhotoQualityPrioritization = .quality
|
||||
} else {
|
||||
photoOutput.isDualCameraDualPhotoDeliveryEnabled = photoOutput.isDualCameraDualPhotoDeliverySupported
|
||||
}
|
||||
}
|
||||
// TODO: Enable isResponsiveCaptureEnabled? (iOS 17+)
|
||||
// TODO: Enable isFastCapturePrioritizationEnabled? (iOS 17+)
|
||||
if photo.enableDepthData {
|
||||
photoOutput.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliverySupported
|
||||
}
|
||||
if #available(iOS 12.0, *), photo.enablePortraitEffectsMatte {
|
||||
photoOutput.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliverySupported
|
||||
}
|
||||
|
||||
// 2. Add
|
||||
guard captureSession.canAddOutput(photoOutput) else {
|
||||
throw CameraError.parameter(.unsupportedOutput(outputDescriptor: "photo-output"))
|
||||
}
|
||||
captureSession.addOutput(photoOutput)
|
||||
self.photoOutput = photoOutput
|
||||
}
|
||||
|
||||
// Video Output + Frame Processor
|
||||
if case let .enabled(video) = configuration.video {
|
||||
ReactLogger.log(level: .info, message: "Adding Video Data output...")
|
||||
let videoOutput = AVCaptureVideoDataOutput()
|
||||
|
||||
// 1. Configure
|
||||
videoOutput.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue)
|
||||
videoOutput.alwaysDiscardsLateVideoFrames = true
|
||||
let pixelFormatType = try video.getPixelFormat(for: videoOutput)
|
||||
videoOutput.videoSettings = [
|
||||
String(kCVPixelBufferPixelFormatTypeKey): pixelFormatType,
|
||||
]
|
||||
|
||||
// 2. Add
|
||||
guard captureSession.canAddOutput(videoOutput) else {
|
||||
throw CameraError.parameter(.unsupportedOutput(outputDescriptor: "video-output"))
|
||||
}
|
||||
captureSession.addOutput(videoOutput)
|
||||
self.videoOutput = videoOutput
|
||||
}
|
||||
|
||||
// Code Scanner
|
||||
if case let .enabled(codeScanner) = configuration.codeScanner {
|
||||
ReactLogger.log(level: .info, message: "Adding Code Scanner output...")
|
||||
let codeScannerOutput = AVCaptureMetadataOutput()
|
||||
|
||||
// 1. Configure
|
||||
try codeScanner.codeTypes.forEach { type in
|
||||
if !codeScannerOutput.availableMetadataObjectTypes.contains(type) {
|
||||
throw CameraError.codeScanner(.codeTypeNotSupported(codeType: type.descriptor))
|
||||
}
|
||||
}
|
||||
codeScannerOutput.setMetadataObjectsDelegate(self, queue: CameraQueues.codeScannerQueue)
|
||||
codeScannerOutput.metadataObjectTypes = codeScanner.codeTypes
|
||||
if let rectOfInterest = codeScanner.regionOfInterest {
|
||||
codeScannerOutput.rectOfInterest = rectOfInterest
|
||||
}
|
||||
|
||||
// 2. Add
|
||||
guard captureSession.canAddOutput(codeScannerOutput) else {
|
||||
throw CameraError.codeScanner(.notCompatibleWithOutputs)
|
||||
}
|
||||
captureSession.addOutput(codeScannerOutput)
|
||||
self.codeScannerOutput = codeScannerOutput
|
||||
}
|
||||
|
||||
// Done!
|
||||
ReactLogger.log(level: .info, message: "Successfully configured all outputs!")
|
||||
delegate?.onSessionInitialized()
|
||||
}
|
||||
|
||||
// pragma MARK: Orientation
|
||||
|
||||
func configureOrientation(configuration: CameraConfiguration) {
|
||||
// Set up orientation and mirroring for all outputs.
|
||||
// Note: Photos are only rotated through EXIF tags, and Preview through view transforms
|
||||
let isMirrored = videoDeviceInput?.device.position == .front
|
||||
captureSession.outputs.forEach { output in
|
||||
if isMirrored {
|
||||
output.mirror()
|
||||
}
|
||||
output.setOrientation(configuration.orientation)
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Format
|
||||
|
||||
/**
|
||||
Configures the active format (`format`)
|
||||
*/
|
||||
func configureFormat(configuration: CameraConfiguration) throws {
|
||||
guard let targetFormat = configuration.format else {
|
||||
// No format was set, just use the default.
|
||||
return
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Configuring Format (\(targetFormat))...")
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
throw CameraError.session(.cameraNotReady)
|
||||
}
|
||||
|
||||
let currentFormat = CameraDeviceFormat(fromFormat: device.activeFormat)
|
||||
if currentFormat == targetFormat {
|
||||
ReactLogger.log(level: .info, message: "Already selected active format, no need to configure.")
|
||||
return
|
||||
}
|
||||
|
||||
// Find matching format (JS Dictionary -> strongly typed Swift class)
|
||||
let format = device.formats.first { targetFormat.isEqualTo(format: $0) }
|
||||
guard let format else {
|
||||
throw CameraError.format(.invalidFormat)
|
||||
}
|
||||
|
||||
// Set new device Format
|
||||
device.activeFormat = format
|
||||
|
||||
ReactLogger.log(level: .info, message: "Successfully configured Format!")
|
||||
}
|
||||
|
||||
// pragma MARK: Side-Props
|
||||
|
||||
/**
|
||||
Configures format-dependant "side-props" (`fps`, `lowLightBoost`, `torch`)
|
||||
*/
|
||||
func configureSideProps(configuration: CameraConfiguration) throws {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
throw CameraError.session(.cameraNotReady)
|
||||
}
|
||||
|
||||
// Configure FPS
|
||||
if let fps = configuration.fps {
|
||||
let supportsGivenFps = device.activeFormat.videoSupportedFrameRateRanges.contains { range in
|
||||
return range.includes(fps: Double(fps))
|
||||
}
|
||||
if !supportsGivenFps {
|
||||
throw CameraError.format(.invalidFps(fps: Int(fps)))
|
||||
}
|
||||
|
||||
let duration = CMTimeMake(value: 1, timescale: fps)
|
||||
device.activeVideoMinFrameDuration = duration
|
||||
device.activeVideoMaxFrameDuration = duration
|
||||
} else {
|
||||
device.activeVideoMinFrameDuration = CMTime.invalid
|
||||
device.activeVideoMaxFrameDuration = CMTime.invalid
|
||||
}
|
||||
|
||||
// Configure Low-Light-Boost
|
||||
if configuration.enableLowLightBoost {
|
||||
let isDifferent = configuration.enableLowLightBoost != device.automaticallyEnablesLowLightBoostWhenAvailable
|
||||
if isDifferent && !device.isLowLightBoostSupported {
|
||||
throw CameraError.device(.lowLightBoostNotSupported)
|
||||
}
|
||||
device.automaticallyEnablesLowLightBoostWhenAvailable = configuration.enableLowLightBoost
|
||||
}
|
||||
|
||||
// Configure Torch
|
||||
if configuration.torch != .off {
|
||||
guard device.hasTorch else {
|
||||
throw CameraError.device(.flashUnavailable)
|
||||
}
|
||||
|
||||
device.torchMode = configuration.torch.toTorchMode()
|
||||
try device.setTorchModeOn(level: 1.0)
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Zoom
|
||||
|
||||
/**
|
||||
Configures zoom (`zoom`)
|
||||
*/
|
||||
func configureZoom(configuration: CameraConfiguration) throws {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
throw CameraError.session(.cameraNotReady)
|
||||
}
|
||||
guard let zoom = configuration.zoom else {
|
||||
return
|
||||
}
|
||||
|
||||
let clamped = max(min(zoom, device.activeFormat.videoMaxZoomFactor), device.minAvailableVideoZoomFactor)
|
||||
device.videoZoomFactor = clamped
|
||||
}
|
||||
|
||||
// pragma MARK: Audio
|
||||
|
||||
/**
|
||||
Configures the Audio Capture Session with an audio input and audio data output.
|
||||
*/
|
||||
func configureAudioSession(configuration: CameraConfiguration) throws {
|
||||
ReactLogger.log(level: .info, message: "Configuring Audio Session...")
|
||||
|
||||
// Prevent iOS from automatically configuring the Audio Session for us
|
||||
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
|
||||
let enableAudio = configuration.audio != .disabled
|
||||
|
||||
// Check microphone permission
|
||||
if enableAudio {
|
||||
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
|
||||
if audioPermissionStatus != .authorized {
|
||||
throw CameraError.permission(.microphone)
|
||||
}
|
||||
}
|
||||
|
||||
// Remove all current inputs
|
||||
audioCaptureSession.inputs.forEach { input in
|
||||
audioCaptureSession.removeInput(input)
|
||||
}
|
||||
audioDeviceInput = nil
|
||||
|
||||
// Audio Input (Microphone)
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
||||
guard let microphone = AVCaptureDevice.default(for: .audio) else {
|
||||
throw CameraError.device(.microphoneUnavailable)
|
||||
}
|
||||
let input = try AVCaptureDeviceInput(device: microphone)
|
||||
guard audioCaptureSession.canAddInput(input) else {
|
||||
throw CameraError.parameter(.unsupportedInput(inputDescriptor: "audio-input"))
|
||||
}
|
||||
audioCaptureSession.addInput(input)
|
||||
audioDeviceInput = input
|
||||
}
|
||||
|
||||
// Remove all current outputs
|
||||
audioCaptureSession.outputs.forEach { output in
|
||||
audioCaptureSession.removeOutput(output)
|
||||
}
|
||||
audioOutput = nil
|
||||
|
||||
// Audio Output
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
||||
let output = AVCaptureAudioDataOutput()
|
||||
guard audioCaptureSession.canAddOutput(output) else {
|
||||
throw CameraError.parameter(.unsupportedOutput(outputDescriptor: "audio-output"))
|
||||
}
|
||||
output.setSampleBufferDelegate(self, queue: CameraQueues.audioQueue)
|
||||
audioCaptureSession.addOutput(output)
|
||||
audioOutput = output
|
||||
}
|
||||
}
|
||||
}
|
82
package/ios/Core/CameraSession+Focus.swift
Normal file
82
package/ios/Core/CameraSession+Focus.swift
Normal file
@ -0,0 +1,82 @@
|
||||
//
|
||||
// CameraSession+Focus.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraSession {
|
||||
/**
|
||||
Focuses the Camera to the specified point. The point must be in the Camera coordinate system, so {0...1} on both axis.
|
||||
*/
|
||||
func focus(point: CGPoint) throws {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
throw CameraError.session(SessionError.cameraNotReady)
|
||||
}
|
||||
if !device.isFocusPointOfInterestSupported {
|
||||
throw CameraError.device(DeviceError.focusNotSupported)
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
defer {
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
|
||||
// Set Focus
|
||||
if device.isFocusPointOfInterestSupported {
|
||||
device.focusPointOfInterest = point
|
||||
device.focusMode = .autoFocus
|
||||
}
|
||||
|
||||
// Set Exposure
|
||||
if device.isExposurePointOfInterestSupported {
|
||||
device.exposurePointOfInterest = point
|
||||
device.exposureMode = .autoExpose
|
||||
}
|
||||
|
||||
// Remove any existing listeners
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
|
||||
object: nil)
|
||||
|
||||
// Listen for focus completion
|
||||
device.isSubjectAreaChangeMonitoringEnabled = true
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(subjectAreaDidChange),
|
||||
name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
|
||||
object: nil)
|
||||
} catch {
|
||||
throw CameraError.device(DeviceError.configureError)
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
func subjectAreaDidChange(notification _: NSNotification) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
return
|
||||
}
|
||||
|
||||
try? device.lockForConfiguration()
|
||||
defer {
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
|
||||
// Reset Focus to continuous/auto
|
||||
if device.isFocusPointOfInterestSupported {
|
||||
device.focusMode = .continuousAutoFocus
|
||||
}
|
||||
|
||||
// Reset Exposure to continuous/auto
|
||||
if device.isExposurePointOfInterestSupported {
|
||||
device.exposureMode = .continuousAutoExposure
|
||||
}
|
||||
|
||||
// Disable listeners
|
||||
device.isSubjectAreaChangeMonitoringEnabled = false
|
||||
}
|
||||
}
|
107
package/ios/Core/CameraSession+Photo.swift
Normal file
107
package/ios/Core/CameraSession+Photo.swift
Normal file
@ -0,0 +1,107 @@
|
||||
//
|
||||
// CameraSession+Photo.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraSession {
|
||||
/**
|
||||
Takes a photo.
|
||||
`takePhoto` is only available if `photo={true}`.
|
||||
*/
|
||||
func takePhoto(options: NSDictionary, promise: Promise) {
|
||||
// Run on Camera Queue
|
||||
CameraQueues.cameraQueue.async {
|
||||
// Get Photo Output configuration
|
||||
guard let configuration = self.configuration else {
|
||||
promise.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
guard case let .enabled(config: photo) = configuration.photo else {
|
||||
// User needs to enable photo={true}
|
||||
promise.reject(error: .capture(.photoNotEnabled))
|
||||
return
|
||||
}
|
||||
|
||||
// Check if Photo Output is available
|
||||
guard let photoOutput = self.photoOutput,
|
||||
let videoDeviceInput = self.videoDeviceInput else {
|
||||
// Camera is not yet ready
|
||||
promise.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Capturing photo...")
|
||||
|
||||
// Create photo settings
|
||||
let photoSettings = AVCapturePhotoSettings()
|
||||
|
||||
// default, overridable settings if high quality capture was enabled
|
||||
if photo.enableHighQualityPhotos {
|
||||
// TODO: On iOS 16+ this will be removed in favor of maxPhotoDimensions.
|
||||
photoSettings.isHighResolutionPhotoEnabled = true
|
||||
if #available(iOS 13.0, *) {
|
||||
photoSettings.photoQualityPrioritization = .quality
|
||||
}
|
||||
}
|
||||
|
||||
// flash
|
||||
if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String {
|
||||
guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash)))
|
||||
return
|
||||
}
|
||||
photoSettings.flashMode = flashMode
|
||||
}
|
||||
|
||||
// shutter sound
|
||||
let enableShutterSound = options["enableShutterSound"] as? Bool ?? true
|
||||
|
||||
// depth data
|
||||
photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled
|
||||
if #available(iOS 12.0, *) {
|
||||
photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled
|
||||
}
|
||||
|
||||
// quality prioritization
|
||||
if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String {
|
||||
guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization)))
|
||||
return
|
||||
}
|
||||
photoSettings.photoQualityPrioritization = photoQualityPrioritization
|
||||
}
|
||||
|
||||
// photo size is always the one selected in the format
|
||||
if #available(iOS 16.0, *) {
|
||||
photoSettings.maxPhotoDimensions = photoOutput.maxPhotoDimensions
|
||||
}
|
||||
|
||||
// red-eye reduction
|
||||
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
|
||||
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
|
||||
}
|
||||
|
||||
// stabilization
|
||||
if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool {
|
||||
photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization
|
||||
}
|
||||
|
||||
// distortion correction
|
||||
if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool {
|
||||
photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection
|
||||
}
|
||||
|
||||
// Actually do the capture!
|
||||
photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise, enableShutterSound: enableShutterSound))
|
||||
|
||||
// Assume that `takePhoto` is always called with the same parameters, so prepare the next call too.
|
||||
photoOutput.setPreparedPhotoSettingsArray([photoSettings], completionHandler: nil)
|
||||
}
|
||||
}
|
||||
}
|
221
package/ios/Core/CameraSession+Video.swift
Normal file
221
package/ios/Core/CameraSession+Video.swift
Normal file
@ -0,0 +1,221 @@
|
||||
//
|
||||
// CameraSession+Video.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension CameraSession {
|
||||
/**
|
||||
Starts a video + audio recording with a custom Asset Writer.
|
||||
*/
|
||||
func startRecording(options: RecordVideoOptions,
|
||||
onVideoRecorded: @escaping (_ video: Video) -> Void,
|
||||
onError: @escaping (_ error: CameraError) -> Void) {
|
||||
// Run on Camera Queue
|
||||
CameraQueues.cameraQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
||||
|
||||
if options.flash != .off {
|
||||
// use the torch as the video's flash
|
||||
self.configure { config in
|
||||
config.torch = options.flash
|
||||
}
|
||||
}
|
||||
|
||||
// Get Video Output
|
||||
guard let videoOutput = self.videoOutput else {
|
||||
if self.configuration?.video == .disabled {
|
||||
onError(.capture(.videoNotEnabled))
|
||||
} else {
|
||||
onError(.session(.cameraNotReady))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
let enableAudio = self.configuration?.audio != .disabled
|
||||
|
||||
// Callback for when the recording ends
|
||||
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
||||
defer {
|
||||
// Disable Audio Session again
|
||||
if enableAudio {
|
||||
CameraQueues.audioQueue.async {
|
||||
self.deactivateAudioSession()
|
||||
}
|
||||
}
|
||||
// Reset flash
|
||||
if options.flash != .off {
|
||||
// Set torch mode back to what it was before if we used it for the video flash.
|
||||
self.configure { config in
|
||||
let torch = self.configuration?.torch ?? .off
|
||||
config.torch = torch
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.recordingSession = nil
|
||||
self.isRecording = false
|
||||
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
||||
|
||||
if let error = error as NSError? {
|
||||
ReactLogger.log(level: .error, message: "RecordingSession Error \(error.code): \(error.description)")
|
||||
// Something went wrong, we have an error
|
||||
if error.domain == "capture/aborted" {
|
||||
onError(.capture(.aborted))
|
||||
} else {
|
||||
onError(.capture(.unknown(message: "An unknown recording error occured! \(error.code) \(error.description)")))
|
||||
}
|
||||
} else {
|
||||
if status == .completed {
|
||||
// Recording was successfully saved
|
||||
let video = Video(path: recordingSession.url.absoluteString,
|
||||
duration: recordingSession.duration)
|
||||
onVideoRecorded(video)
|
||||
} else {
|
||||
// Recording wasn't saved and we don't have an error either.
|
||||
onError(.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create temporary file
|
||||
let errorPointer = ErrorPointer(nilLiteral: ())
|
||||
let fileExtension = options.fileType.descriptor ?? "mov"
|
||||
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
||||
let message = errorPointer?.pointee?.description
|
||||
onError(.capture(.createTempFileError(message: message)))
|
||||
return
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
|
||||
let tempURL = URL(string: "file://\(tempFilePath)")!
|
||||
|
||||
let recordingSession: RecordingSession
|
||||
do {
|
||||
recordingSession = try RecordingSession(url: tempURL,
|
||||
fileType: options.fileType,
|
||||
completion: onFinish)
|
||||
} catch let error as NSError {
|
||||
onError(.capture(.createRecorderError(message: error.description)))
|
||||
return
|
||||
}
|
||||
self.recordingSession = recordingSession
|
||||
|
||||
// Init Video
|
||||
guard var videoSettings = self.recommendedVideoSettings(videoOutput: videoOutput,
|
||||
fileType: options.fileType,
|
||||
videoCodec: options.codec),
|
||||
!videoSettings.isEmpty else {
|
||||
onError(.capture(.createRecorderError(message: "Failed to get video settings!")))
|
||||
return
|
||||
}
|
||||
ReactLogger.log(level: .trace, message: "Recommended Video Settings: \(videoSettings.description)")
|
||||
|
||||
// Custom Video Bit Rate
|
||||
if let videoBitRate = options.bitRate {
|
||||
// Convert from Mbps -> bps
|
||||
let bitsPerSecond = videoBitRate * 1_000_000
|
||||
videoSettings[AVVideoCompressionPropertiesKey] = [
|
||||
AVVideoAverageBitRateKey: NSNumber(value: bitsPerSecond),
|
||||
]
|
||||
}
|
||||
|
||||
// get pixel format (420f, 420v, x420)
|
||||
let pixelFormat = videoOutput.pixelFormat
|
||||
recordingSession.initializeVideoWriter(withSettings: videoSettings,
|
||||
pixelFormat: pixelFormat)
|
||||
|
||||
// Enable/Activate Audio Session (optional)
|
||||
if enableAudio {
|
||||
if let audioOutput = self.audioOutput {
|
||||
// Activate Audio Session asynchronously
|
||||
CameraQueues.audioQueue.async {
|
||||
do {
|
||||
try self.activateAudioSession()
|
||||
} catch {
|
||||
self.onConfigureError(error)
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize audio asset writer
|
||||
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: options.fileType)
|
||||
recordingSession.initializeAudioWriter(withSettings: audioSettings)
|
||||
}
|
||||
}
|
||||
|
||||
// start recording session with or without audio.
|
||||
do {
|
||||
try recordingSession.startAssetWriter()
|
||||
self.isRecording = true
|
||||
} catch let error as NSError {
|
||||
onError(.capture(.createRecorderError(message: "RecordingSession failed to start asset writer. \(error.description)")))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Stops an active recording.
|
||||
*/
|
||||
func stopRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.isRecording = false
|
||||
|
||||
withPromise(promise) {
|
||||
guard let recordingSession = self.recordingSession else {
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
recordingSession.finish()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Pauses an active recording.
|
||||
*/
|
||||
func pauseRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
withPromise(promise) {
|
||||
guard self.recordingSession != nil else {
|
||||
// there's no active recording!
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
self.isRecording = false
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Resumes an active, but paused recording.
|
||||
*/
|
||||
func resumeRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
withPromise(promise) {
|
||||
guard self.recordingSession != nil else {
|
||||
// there's no active recording!
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
self.isRecording = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput,
|
||||
fileType: AVFileType,
|
||||
videoCodec: AVVideoCodecType?) -> [String: Any]? {
|
||||
if videoCodec != nil {
|
||||
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
|
||||
} else {
|
||||
return videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
|
||||
}
|
||||
}
|
||||
}
|
260
package/ios/Core/CameraSession.swift
Normal file
260
package/ios/Core/CameraSession.swift
Normal file
@ -0,0 +1,260 @@
|
||||
//
|
||||
// CameraSession.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
A fully-featured Camera Session supporting preview, video, photo, frame processing, and code scanning outputs.
|
||||
All changes to the session have to be controlled via the `configure` function.
|
||||
*/
|
||||
class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
// Configuration
|
||||
var configuration: CameraConfiguration?
|
||||
// Capture Session
|
||||
let captureSession = AVCaptureSession()
|
||||
let audioCaptureSession = AVCaptureSession()
|
||||
// Inputs & Outputs
|
||||
var videoDeviceInput: AVCaptureDeviceInput?
|
||||
var audioDeviceInput: AVCaptureDeviceInput?
|
||||
var photoOutput: AVCapturePhotoOutput?
|
||||
var videoOutput: AVCaptureVideoDataOutput?
|
||||
var audioOutput: AVCaptureAudioDataOutput?
|
||||
var codeScannerOutput: AVCaptureMetadataOutput?
|
||||
// State
|
||||
var recordingSession: RecordingSession?
|
||||
var isRecording = false
|
||||
|
||||
// Callbacks
|
||||
weak var delegate: CameraSessionDelegate?
|
||||
|
||||
// Public accessors
|
||||
var maxZoom: Double {
|
||||
if let device = videoDeviceInput?.device {
|
||||
return device.maxAvailableVideoZoomFactor
|
||||
}
|
||||
return 1.0
|
||||
}
|
||||
|
||||
/**
|
||||
Create a new instance of the `CameraSession`.
|
||||
The `onError` callback is used for any runtime errors.
|
||||
*/
|
||||
override init() {
|
||||
super.init()
|
||||
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(sessionRuntimeError),
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: captureSession)
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(sessionRuntimeError),
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: audioCaptureSession)
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(audioSessionInterrupted),
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: captureSession)
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: audioCaptureSession)
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
}
|
||||
|
||||
/**
|
||||
Creates a PreviewView for the current Capture Session
|
||||
*/
|
||||
func createPreviewView(frame: CGRect) -> PreviewView {
|
||||
return PreviewView(frame: frame, session: captureSession)
|
||||
}
|
||||
|
||||
func onConfigureError(_ error: Error) {
|
||||
if let error = error as? CameraError {
|
||||
// It's a typed Error
|
||||
delegate?.onError(error)
|
||||
} else {
|
||||
// It's any kind of unknown error
|
||||
let cameraError = CameraError.unknown(message: error.localizedDescription)
|
||||
delegate?.onError(cameraError)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Update the session configuration.
|
||||
Any changes in here will be re-configured only if required, and under a lock.
|
||||
The `configuration` object is a copy of the currently active configuration that can be modified by the caller in the lambda.
|
||||
*/
|
||||
func configure(_ lambda: (_ configuration: CameraConfiguration) throws -> Void) {
|
||||
ReactLogger.log(level: .info, message: "Updating Session Configuration...")
|
||||
|
||||
// Let caller configure a new configuration for the Camera.
|
||||
let config = CameraConfiguration(copyOf: configuration)
|
||||
do {
|
||||
try lambda(config)
|
||||
} catch {
|
||||
onConfigureError(error)
|
||||
}
|
||||
let difference = CameraConfiguration.Difference(between: configuration, and: config)
|
||||
|
||||
// Set up Camera (Video) Capture Session (on camera queue)
|
||||
CameraQueues.cameraQueue.async {
|
||||
do {
|
||||
// If needed, configure the AVCaptureSession (inputs, outputs)
|
||||
if difference.isSessionConfigurationDirty {
|
||||
// Lock Capture Session for configuration
|
||||
ReactLogger.log(level: .info, message: "Beginning CameraSession configuration...")
|
||||
self.captureSession.beginConfiguration()
|
||||
|
||||
// 1. Update input device
|
||||
if difference.inputChanged {
|
||||
try self.configureDevice(configuration: config)
|
||||
}
|
||||
// 2. Update outputs
|
||||
if difference.outputsChanged {
|
||||
try self.configureOutputs(configuration: config)
|
||||
}
|
||||
// 3. Update output orientation
|
||||
if difference.orientationChanged {
|
||||
self.configureOrientation(configuration: config)
|
||||
}
|
||||
|
||||
// Unlock Capture Session again and submit configuration to Hardware
|
||||
self.captureSession.commitConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Committed CameraSession configuration!")
|
||||
}
|
||||
|
||||
// If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..)
|
||||
if difference.isDeviceConfigurationDirty {
|
||||
guard let device = self.videoDeviceInput?.device else {
|
||||
throw CameraError.session(.cameraNotReady)
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Beginning CaptureDevice configuration...")
|
||||
try device.lockForConfiguration()
|
||||
|
||||
// 4. Configure format
|
||||
if difference.formatChanged {
|
||||
try self.configureFormat(configuration: config)
|
||||
}
|
||||
// 5. Configure side-props (fps, lowLightBoost)
|
||||
if difference.sidePropsChanged {
|
||||
try self.configureSideProps(configuration: config)
|
||||
}
|
||||
// 6. Configure zoom
|
||||
if difference.zoomChanged {
|
||||
try self.configureZoom(configuration: config)
|
||||
}
|
||||
|
||||
device.unlockForConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Committed CaptureDevice configuration!")
|
||||
}
|
||||
|
||||
// 6. Start or stop the session if needed
|
||||
self.checkIsActive(configuration: config)
|
||||
|
||||
// Update successful, set the new configuration!
|
||||
self.configuration = config
|
||||
} catch {
|
||||
self.onConfigureError(error)
|
||||
}
|
||||
}
|
||||
|
||||
// Set up Audio Capture Session (on audio queue)
|
||||
if difference.audioSessionChanged {
|
||||
CameraQueues.audioQueue.async {
|
||||
do {
|
||||
// Lock Capture Session for configuration
|
||||
ReactLogger.log(level: .info, message: "Beginning AudioSession configuration...")
|
||||
self.audioCaptureSession.beginConfiguration()
|
||||
|
||||
try self.configureAudioSession(configuration: config)
|
||||
|
||||
// Unlock Capture Session again and submit configuration to Hardware
|
||||
self.audioCaptureSession.commitConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Committed AudioSession configuration!")
|
||||
} catch {
|
||||
self.onConfigureError(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Starts or stops the CaptureSession if needed (`isActive`)
|
||||
*/
|
||||
private func checkIsActive(configuration: CameraConfiguration) {
|
||||
if configuration.isActive == captureSession.isRunning {
|
||||
return
|
||||
}
|
||||
|
||||
// Start/Stop session
|
||||
if configuration.isActive {
|
||||
captureSession.startRunning()
|
||||
} else {
|
||||
captureSession.stopRunning()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Called for every new Frame in the Video output
|
||||
*/
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
// Call Frame Processor (delegate) for every Video Frame
|
||||
if captureOutput is AVCaptureVideoDataOutput {
|
||||
delegate?.onFrame(sampleBuffer: sampleBuffer)
|
||||
}
|
||||
|
||||
// Record Video Frame/Audio Sample to File in custom `RecordingSession` (AVAssetWriter)
|
||||
if isRecording {
|
||||
guard let recordingSession = recordingSession else {
|
||||
delegate?.onError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
||||
return
|
||||
}
|
||||
|
||||
switch captureOutput {
|
||||
case is AVCaptureVideoDataOutput:
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
||||
case is AVCaptureAudioDataOutput:
|
||||
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
|
||||
from: audioCaptureSession.masterClock ?? CMClockGetHostTimeClock(),
|
||||
to: captureSession.masterClock ?? CMClockGetHostTimeClock())
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Notifications
|
||||
|
||||
@objc
|
||||
func sessionRuntimeError(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
|
||||
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
|
||||
return
|
||||
}
|
||||
|
||||
// Notify consumer about runtime error
|
||||
delegate?.onError(.unknown(message: error._nsError.description, cause: error._nsError))
|
||||
|
||||
let shouldRestart = configuration?.isActive == true
|
||||
if shouldRestart {
|
||||
// restart capture session after an error occured
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.captureSession.startRunning()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
32
package/ios/Core/CameraSessionDelegate.swift
Normal file
32
package/ios/Core/CameraSessionDelegate.swift
Normal file
@ -0,0 +1,32 @@
|
||||
//
|
||||
// CameraSessionDelegate.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
A listener for [CameraSession] events
|
||||
*/
|
||||
protocol CameraSessionDelegate: AnyObject {
|
||||
/**
|
||||
Called when there is a Runtime Error in the [CameraSession]
|
||||
*/
|
||||
func onError(_ error: CameraError)
|
||||
/**
|
||||
Called when the [CameraSession] successfully initializes
|
||||
*/
|
||||
func onSessionInitialized()
|
||||
/**
|
||||
Called for every frame (if video or frameProcessor is enabled)
|
||||
*/
|
||||
func onFrame(sampleBuffer: CMSampleBuffer)
|
||||
/**
|
||||
Called whenever a QR/Barcode has been scanned. Only if the CodeScanner Output is enabled
|
||||
*/
|
||||
func onCodeScanned(codes: [CameraSession.Code])
|
||||
}
|
@ -8,6 +8,7 @@
|
||||
|
||||
import AVFoundation
|
||||
|
||||
// Keeps a strong reference on delegates, as the AVCapturePhotoOutput only holds a weak reference.
|
||||
private var delegatesReferences: [NSObject] = []
|
||||
|
||||
// MARK: - PhotoCaptureDelegate
|
||||
@ -42,7 +43,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
|
||||
let error = ErrorPointer(nilLiteral: ())
|
||||
guard let tempFilePath = RCTTempFilePath("jpeg", error)
|
||||
else {
|
||||
promise.reject(error: .capture(.createTempFileError), cause: error?.pointee)
|
||||
let message = error?.pointee?.description
|
||||
promise.reject(error: .capture(.createTempFileError(message: message)), cause: error?.pointee)
|
||||
return
|
||||
}
|
||||
let url = URL(string: "file://\(tempFilePath)")!
|
@ -93,11 +93,7 @@ class RecordingSession {
|
||||
/**
|
||||
Initializes an AssetWriter for audio frames (CMSampleBuffers).
|
||||
*/
|
||||
func initializeAudioWriter(withSettings settings: [String: Any]) {
|
||||
guard !settings.isEmpty else {
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Audio Writer with empty settings!")
|
||||
return
|
||||
}
|
||||
func initializeAudioWriter(withSettings settings: [String: Any]?) {
|
||||
guard audioWriter == nil else {
|
||||
ReactLogger.log(level: .error, message: "Tried to add Audio Writer twice!")
|
||||
return
|
@ -1,23 +0,0 @@
|
||||
//
|
||||
// AVAudioSession+trySetAllowHaptics.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAudioSession {
|
||||
/**
|
||||
Tries to set allowHapticsAndSystemSoundsDuringRecording and ignore errors.
|
||||
*/
|
||||
func trySetAllowHaptics(_ allowHaptics: Bool) {
|
||||
if #available(iOS 13.0, *) {
|
||||
if !self.allowHapticsAndSystemSoundsDuringRecording {
|
||||
try? self.setAllowHapticsAndSystemSoundsDuringRecording(allowHaptics)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -8,6 +8,7 @@
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension AVCaptureConnection {
|
||||
/**
|
||||
|
@ -10,6 +10,8 @@ import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
func toDictionary() -> [String: Any] {
|
||||
let formats = formats.map { CameraDeviceFormat(fromFormat: $0) }
|
||||
|
||||
return [
|
||||
"id": uniqueID,
|
||||
"physicalDevices": physicalDevices.map(\.deviceType.physicalDeviceDescriptor),
|
||||
@ -25,10 +27,8 @@ extension AVCaptureDevice {
|
||||
"supportsLowLightBoost": isLowLightBoostSupported,
|
||||
"supportsFocus": isFocusPointOfInterestSupported,
|
||||
"hardwareLevel": "full",
|
||||
"sensorOrientation": "portrait", // TODO: Sensor Orientation?
|
||||
"formats": formats.map { format -> [String: Any] in
|
||||
format.toDictionary()
|
||||
},
|
||||
"sensorOrientation": Orientation.landscapeLeft.jsValue,
|
||||
"formats": formats.map { $0.toJSValue() },
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -8,27 +8,20 @@
|
||||
|
||||
import AVFoundation
|
||||
|
||||
private func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
|
||||
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
|
||||
if #available(iOS 13, *) {
|
||||
modes.append(.cinematicExtended)
|
||||
}
|
||||
return modes
|
||||
}
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
var videoStabilizationModes: [AVCaptureVideoStabilizationMode] {
|
||||
return getAllVideoStabilizationModes().filter { self.isVideoStabilizationModeSupported($0) }
|
||||
let allModes = AVCaptureDevice.Format.getAllVideoStabilizationModes()
|
||||
return allModes.filter { self.isVideoStabilizationModeSupported($0) }
|
||||
}
|
||||
|
||||
var minFrameRate: Float64 {
|
||||
var minFps: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
var maxFrameRate: Float64 {
|
||||
var maxFps: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
@ -45,52 +38,20 @@ extension AVCaptureDevice.Format {
|
||||
return hdrFormats.contains(pixelFormat)
|
||||
}
|
||||
|
||||
func toDictionary() -> [String: AnyHashable] {
|
||||
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
|
||||
let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
|
||||
|
||||
return [
|
||||
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
|
||||
"autoFocusSystem": autoFocusSystem.descriptor,
|
||||
"photoHeight": photoDimensions.height,
|
||||
"photoWidth": photoDimensions.width,
|
||||
"videoHeight": videoDimensions.height,
|
||||
"videoWidth": videoDimensions.width,
|
||||
"maxISO": maxISO,
|
||||
"minISO": minISO,
|
||||
"fieldOfView": videoFieldOfView,
|
||||
"maxZoom": videoMaxZoomFactor,
|
||||
"supportsVideoHDR": supportsVideoHDR,
|
||||
"supportsPhotoHDR": false,
|
||||
"minFps": minFrameRate,
|
||||
"maxFps": maxFrameRate,
|
||||
"pixelFormats": pixelFormats.map(\.unionValue),
|
||||
"supportsDepthCapture": !supportedDepthDataFormats.isEmpty,
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
Compares this format to the given JS `CameraDeviceFormat`.
|
||||
Only the most important properties (such as dimensions and FPS) are taken into consideration,
|
||||
so this is not an exact equals, but more like a "matches filter" comparison.
|
||||
*/
|
||||
func isEqualTo(jsFormat dict: NSDictionary) -> Bool {
|
||||
guard dict["photoWidth"] as? Int32 == photoDimensions.width && dict["photoHeight"] as? Int32 == photoDimensions.height else {
|
||||
var supportsPhotoHDR: Bool {
|
||||
// TODO: Supports Photo HDR on iOS?
|
||||
return false
|
||||
}
|
||||
|
||||
guard dict["videoWidth"] as? Int32 == videoDimensions.width && dict["videoHeight"] as? Int32 == videoDimensions.height else {
|
||||
return false
|
||||
var supportsDepthCapture: Bool {
|
||||
return !supportedDepthDataFormats.isEmpty
|
||||
}
|
||||
|
||||
guard dict["minFps"] as? Float64 == minFrameRate && dict["maxFps"] as? Float64 == maxFrameRate else {
|
||||
return false
|
||||
private static func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
|
||||
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
|
||||
if #available(iOS 13, *) {
|
||||
modes.append(.cinematicExtended)
|
||||
}
|
||||
|
||||
guard dict["supportsVideoHDR"] as? Bool == supportsVideoHDR else {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
return modes
|
||||
}
|
||||
}
|
||||
|
52
package/ios/Extensions/AVCaptureOutput+mirror.swift
Normal file
52
package/ios/Extensions/AVCaptureOutput+mirror.swift
Normal file
@ -0,0 +1,52 @@
|
||||
//
|
||||
// AVCaptureOutput+mirror.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureOutput {
|
||||
/**
|
||||
Mirrors the video output if possible.
|
||||
*/
|
||||
func mirror() {
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Sets the target orientation of the video output.
|
||||
This does not always physically rotate image buffers.
|
||||
|
||||
- For Preview, an orientation hint is used to rotate the layer/view itself.
|
||||
- For Photos, an EXIF tag is used.
|
||||
- For Videos, the buffers are physically rotated if available, since we use an AVCaptureVideoDataOutput instead of an AVCaptureMovieFileOutput.
|
||||
*/
|
||||
func setOrientation(_ orientation: Orientation) {
|
||||
// Camera Sensors are always in 90deg rotation.
|
||||
// We are setting the target rotation here, so we need to rotate by 90deg once.
|
||||
let cameraOrientation = orientation.rotateRight()
|
||||
|
||||
// Set orientation for each connection
|
||||
connections.forEach { connection in
|
||||
// TODO: Use this once Xcode 15 is rolled out
|
||||
// if #available(iOS 17.0, *) {
|
||||
// let degrees = cameraOrientation.toDegrees()
|
||||
// if connection.isVideoRotationAngleSupported(degrees) {
|
||||
// connection.videoRotationAngle = degrees
|
||||
// }
|
||||
// } else {
|
||||
if connection.isVideoOrientationSupported {
|
||||
connection.videoOrientation = cameraOrientation.toAVCaptureVideoOrientation()
|
||||
}
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
//
|
||||
// AVCapturePhotoOutput+mirror.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCapturePhotoOutput {
|
||||
func mirror() {
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
//
|
||||
// AVCaptureVideoDataOutput+pixelFormat.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 12.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureVideoDataOutput {
|
||||
/**
|
||||
Gets or sets the PixelFormat this output streams in.
|
||||
By default, the first item in `availableVideoPixelFormatTypes` is chosen.
|
||||
*/
|
||||
var pixelFormat: OSType {
|
||||
get {
|
||||
let current = videoSettings[String(kCVPixelBufferPixelFormatTypeKey)] as? OSType
|
||||
return current ?? availableVideoPixelFormatTypes.first!
|
||||
}
|
||||
set {
|
||||
videoSettings[String(kCVPixelBufferPixelFormatTypeKey)] = newValue
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
//
|
||||
// UIInterfaceOrientation+descriptor.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 04.01.22.
|
||||
// Copyright © 2022 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension UIInterfaceOrientation {
|
||||
init(withString string: String) throws {
|
||||
switch string {
|
||||
case "portrait":
|
||||
self = .portrait
|
||||
return
|
||||
case "portrait-upside-down":
|
||||
self = .portraitUpsideDown
|
||||
return
|
||||
case "landscape-left":
|
||||
self = .landscapeLeft
|
||||
return
|
||||
case "landscape-right":
|
||||
self = .landscapeRight
|
||||
return
|
||||
default:
|
||||
throw EnumParserError.invalidValue
|
||||
}
|
||||
}
|
||||
}
|
41
package/ios/Types/AutoFocusSystem.swift
Normal file
41
package/ios/Types/AutoFocusSystem.swift
Normal file
@ -0,0 +1,41 @@
|
||||
//
|
||||
// AutoFocusSystem.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 13.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
enum AutoFocusSystem: String, JSUnionValue {
|
||||
case contrastDetection = "contrast-detection"
|
||||
case phaseDetection
|
||||
case none
|
||||
|
||||
init(jsValue: String) throws {
|
||||
if let parsed = AutoFocusSystem(rawValue: jsValue) {
|
||||
self = parsed
|
||||
} else {
|
||||
throw CameraError.parameter(.invalid(unionName: "autoFocusSystem", receivedValue: jsValue))
|
||||
}
|
||||
}
|
||||
|
||||
init(fromFocusSystem focusSystem: AVCaptureDevice.Format.AutoFocusSystem) {
|
||||
switch focusSystem {
|
||||
case .none:
|
||||
self = .none
|
||||
case .contrastDetection:
|
||||
self = .contrastDetection
|
||||
case .phaseDetection:
|
||||
self = .phaseDetection
|
||||
@unknown default:
|
||||
self = .none
|
||||
}
|
||||
}
|
||||
|
||||
var jsValue: String {
|
||||
return rawValue
|
||||
}
|
||||
}
|
119
package/ios/Types/CameraDeviceFormat.swift
Normal file
119
package/ios/Types/CameraDeviceFormat.swift
Normal file
@ -0,0 +1,119 @@
|
||||
//
|
||||
// CameraDeviceFormat.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 13.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
A serializable representation of [AVCaptureDevice.Format]
|
||||
*/
|
||||
struct CameraDeviceFormat: Equatable, CustomStringConvertible {
|
||||
let videoWidth: Int
|
||||
let videoHeight: Int
|
||||
|
||||
let photoWidth: Int
|
||||
let photoHeight: Int
|
||||
|
||||
let minFps: Double
|
||||
let maxFps: Double
|
||||
|
||||
let minISO: Float
|
||||
let maxISO: Float
|
||||
|
||||
let fieldOfView: Float
|
||||
let maxZoom: Double
|
||||
|
||||
let videoStabilizationModes: [VideoStabilizationMode]
|
||||
let autoFocusSystem: AutoFocusSystem
|
||||
|
||||
let supportsVideoHDR: Bool
|
||||
let supportsPhotoHDR: Bool
|
||||
|
||||
let pixelFormats: [PixelFormat]
|
||||
|
||||
let supportsDepthCapture: Bool
|
||||
|
||||
init(fromFormat format: AVCaptureDevice.Format) {
|
||||
videoWidth = Int(format.videoDimensions.width)
|
||||
videoHeight = Int(format.videoDimensions.height)
|
||||
photoWidth = Int(format.photoDimensions.width)
|
||||
photoHeight = Int(format.photoDimensions.height)
|
||||
minFps = format.minFps
|
||||
maxFps = format.maxFps
|
||||
minISO = format.minISO
|
||||
maxISO = format.maxISO
|
||||
fieldOfView = format.videoFieldOfView
|
||||
maxZoom = format.videoMaxZoomFactor
|
||||
videoStabilizationModes = format.videoStabilizationModes.map { VideoStabilizationMode(from: $0) }
|
||||
autoFocusSystem = AutoFocusSystem(fromFocusSystem: format.autoFocusSystem)
|
||||
supportsVideoHDR = format.supportsVideoHDR
|
||||
supportsPhotoHDR = format.supportsPhotoHDR
|
||||
pixelFormats = CameraDeviceFormat.getAllPixelFormats()
|
||||
supportsDepthCapture = format.supportsDepthCapture
|
||||
}
|
||||
|
||||
init(jsValue: NSDictionary) throws {
|
||||
// swiftlint:disable force_cast
|
||||
videoWidth = jsValue["videoWidth"] as! Int
|
||||
videoHeight = jsValue["videoHeight"] as! Int
|
||||
photoWidth = jsValue["photoWidth"] as! Int
|
||||
photoHeight = jsValue["photoHeight"] as! Int
|
||||
minFps = jsValue["minFps"] as! Double
|
||||
maxFps = jsValue["maxFps"] as! Double
|
||||
minISO = jsValue["minISO"] as! Float
|
||||
maxISO = jsValue["maxISO"] as! Float
|
||||
fieldOfView = jsValue["fieldOfView"] as! Float
|
||||
maxZoom = jsValue["maxZoom"] as! Double
|
||||
let jsVideoStabilizationModes = jsValue["videoStabilizationModes"] as! [String]
|
||||
videoStabilizationModes = try jsVideoStabilizationModes.map { try VideoStabilizationMode(jsValue: $0) }
|
||||
let jsAutoFocusSystem = jsValue["autoFocusSystem"] as! String
|
||||
autoFocusSystem = try AutoFocusSystem(jsValue: jsAutoFocusSystem)
|
||||
supportsVideoHDR = jsValue["supportsVideoHDR"] as! Bool
|
||||
supportsPhotoHDR = jsValue["supportsPhotoHDR"] as! Bool
|
||||
let jsPixelFormats = jsValue["pixelFormats"] as! [String]
|
||||
pixelFormats = try jsPixelFormats.map { try PixelFormat(jsValue: $0) }
|
||||
supportsDepthCapture = jsValue["supportsDepthCapture"] as! Bool
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
|
||||
func isEqualTo(format other: AVCaptureDevice.Format) -> Bool {
|
||||
let other = CameraDeviceFormat(fromFormat: other)
|
||||
return self == other
|
||||
}
|
||||
|
||||
func toJSValue() -> NSDictionary {
|
||||
return [
|
||||
"videoStabilizationModes": videoStabilizationModes.map(\.jsValue),
|
||||
"autoFocusSystem": autoFocusSystem.jsValue,
|
||||
"photoHeight": photoHeight,
|
||||
"photoWidth": photoWidth,
|
||||
"videoHeight": videoHeight,
|
||||
"videoWidth": videoWidth,
|
||||
"maxISO": maxISO,
|
||||
"minISO": minISO,
|
||||
"fieldOfView": fieldOfView,
|
||||
"maxZoom": maxZoom,
|
||||
"supportsVideoHDR": supportsVideoHDR,
|
||||
"supportsPhotoHDR": supportsPhotoHDR,
|
||||
"minFps": minFps,
|
||||
"maxFps": maxFps,
|
||||
"pixelFormats": pixelFormats.map(\.jsValue),
|
||||
"supportsDepthCapture": supportsDepthCapture,
|
||||
]
|
||||
}
|
||||
|
||||
var description: String {
|
||||
return "\(photoWidth)x\(photoHeight) | \(videoWidth)x\(videoHeight)@\(maxFps) (ISO: \(minISO)..\(maxISO), Pixel Formats: \(pixelFormats))"
|
||||
}
|
||||
|
||||
// On iOS, all PixelFormats are always supported for every format (it can convert natively)
|
||||
private static func getAllPixelFormats() -> [PixelFormat] {
|
||||
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
|
||||
return availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
|
||||
}
|
||||
}
|
@ -9,7 +9,7 @@
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
class CodeScanner {
|
||||
struct CodeScanner: Equatable {
|
||||
let codeTypes: [AVMetadataObject.ObjectType]
|
||||
let interval: Int
|
||||
let regionOfInterest: CGRect?
|
||||
|
15
package/ios/Types/JSUnionValue.swift
Normal file
15
package/ios/Types/JSUnionValue.swift
Normal file
@ -0,0 +1,15 @@
|
||||
//
|
||||
// JSUnionValue.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 13.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
protocol JSUnionValue {
|
||||
init(jsValue: String) throws
|
||||
|
||||
var jsValue: String { get }
|
||||
}
|
83
package/ios/Types/Orientation.swift
Normal file
83
package/ios/Types/Orientation.swift
Normal file
@ -0,0 +1,83 @@
|
||||
//
|
||||
// Orientation.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
The Orientation used for the Preview, Photo, Video and Frame Processor outputs.
|
||||
*/
|
||||
enum Orientation: String, JSUnionValue {
|
||||
/**
|
||||
Phone is in upright portrait mode, home button/indicator is at the bottom
|
||||
*/
|
||||
case portrait
|
||||
/**
|
||||
Phone is in landscape mode, home button/indicator is on the left
|
||||
*/
|
||||
case landscapeLeft = "landscape-left"
|
||||
/**
|
||||
Phone is in upside-down portrait mode, home button/indicator is at the top
|
||||
*/
|
||||
case portraitUpsideDown = "portrait-upside-down"
|
||||
/**
|
||||
Phone is in landscape mode, home button/indicator is on the right
|
||||
*/
|
||||
case landscapeRight = "landscape-right"
|
||||
|
||||
init(jsValue: String) throws {
|
||||
if let parsed = Orientation(rawValue: jsValue) {
|
||||
self = parsed
|
||||
} else {
|
||||
throw CameraError.parameter(.invalid(unionName: "orientation", receivedValue: jsValue))
|
||||
}
|
||||
}
|
||||
|
||||
var jsValue: String {
|
||||
return rawValue
|
||||
}
|
||||
|
||||
func toAVCaptureVideoOrientation() -> AVCaptureVideoOrientation {
|
||||
switch self {
|
||||
case .portrait:
|
||||
return .portrait
|
||||
case .landscapeLeft:
|
||||
return .landscapeLeft
|
||||
case .portraitUpsideDown:
|
||||
return .portraitUpsideDown
|
||||
case .landscapeRight:
|
||||
return .landscapeRight
|
||||
}
|
||||
}
|
||||
|
||||
func toDegrees() -> Double {
|
||||
switch self {
|
||||
case .portrait:
|
||||
return 0
|
||||
case .landscapeLeft:
|
||||
return 90
|
||||
case .portraitUpsideDown:
|
||||
return 180
|
||||
case .landscapeRight:
|
||||
return 270
|
||||
}
|
||||
}
|
||||
|
||||
func rotateRight() -> Orientation {
|
||||
switch self {
|
||||
case .portrait:
|
||||
return .landscapeLeft
|
||||
case .landscapeLeft:
|
||||
return .portraitUpsideDown
|
||||
case .portraitUpsideDown:
|
||||
return .landscapeRight
|
||||
case .landscapeRight:
|
||||
return .portrait
|
||||
}
|
||||
}
|
||||
}
|
@ -9,38 +9,22 @@
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
enum PixelFormat {
|
||||
enum PixelFormat: String, JSUnionValue {
|
||||
case yuv
|
||||
case rgb
|
||||
case native
|
||||
case unknown
|
||||
|
||||
var unionValue: String {
|
||||
switch self {
|
||||
case .yuv:
|
||||
return "yuv"
|
||||
case .rgb:
|
||||
return "rgb"
|
||||
case .native:
|
||||
return "native"
|
||||
case .unknown:
|
||||
return "unknown"
|
||||
init(jsValue: String) throws {
|
||||
if let parsed = PixelFormat(rawValue: jsValue) {
|
||||
self = parsed
|
||||
} else {
|
||||
throw CameraError.parameter(.invalid(unionName: "pixelFormat", receivedValue: jsValue))
|
||||
}
|
||||
}
|
||||
|
||||
init(unionValue: String) throws {
|
||||
switch unionValue {
|
||||
case "yuv":
|
||||
self = .yuv
|
||||
case "rgb":
|
||||
self = .rgb
|
||||
case "native":
|
||||
self = .native
|
||||
case "unknown":
|
||||
self = .unknown
|
||||
default:
|
||||
throw CameraError.parameter(.invalid(unionName: "pixelFormat", receivedValue: unionValue))
|
||||
}
|
||||
var jsValue: String {
|
||||
return rawValue
|
||||
}
|
||||
|
||||
init(mediaSubType: OSType) {
|
30
package/ios/Types/RecordVideoOptions.swift
Normal file
30
package/ios/Types/RecordVideoOptions.swift
Normal file
@ -0,0 +1,30 @@
|
||||
//
|
||||
// RecordVideoOptions.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 12.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
struct RecordVideoOptions {
|
||||
var fileType: AVFileType = .mov
|
||||
var flash: Torch = .off
|
||||
var codec: AVVideoCodecType?
|
||||
/**
|
||||
Bit-Rate of the Video, in Megabits per second (Mbps)
|
||||
*/
|
||||
var bitRate: Double?
|
||||
|
||||
init(fromJSValue dictionary: NSDictionary) throws {
|
||||
// File Type (.mov or .mp4)
|
||||
if let fileTypeOption = dictionary["fileType"] as? String {
|
||||
guard let parsed = try? AVFileType(withString: fileTypeOption) else {
|
||||
throw CameraError.parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption))
|
||||
}
|
||||
fileType = parsed
|
||||
}
|
||||
}
|
||||
}
|
@ -6,12 +6,13 @@
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
A ResizeMode used for the PreviewView.
|
||||
*/
|
||||
enum ResizeMode {
|
||||
enum ResizeMode: String, JSUnionValue {
|
||||
/**
|
||||
Keep aspect ratio, but fill entire parent view (centered).
|
||||
*/
|
||||
@ -21,15 +22,15 @@ enum ResizeMode {
|
||||
*/
|
||||
case contain
|
||||
|
||||
init(fromTypeScriptUnion union: String) {
|
||||
switch union {
|
||||
case "cover":
|
||||
self = .cover
|
||||
case "contain":
|
||||
self = .contain
|
||||
default:
|
||||
// TODO: Use the onError event for safer error handling!
|
||||
fatalError("Invalid value passed for resizeMode! (\(union))")
|
||||
init(jsValue: String) throws {
|
||||
if let parsed = ResizeMode(rawValue: jsValue) {
|
||||
self = parsed
|
||||
} else {
|
||||
throw CameraError.parameter(.invalid(unionName: "resizeMode", receivedValue: jsValue))
|
||||
}
|
||||
}
|
||||
|
||||
var jsValue: String {
|
||||
return rawValue
|
||||
}
|
||||
}
|
||||
|
45
package/ios/Types/Torch.swift
Normal file
45
package/ios/Types/Torch.swift
Normal file
@ -0,0 +1,45 @@
|
||||
//
|
||||
// Torch.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 11.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
A Torch used for permanent flash.
|
||||
*/
|
||||
enum Torch: String, JSUnionValue {
|
||||
/**
|
||||
Torch (flash unit) is always off.
|
||||
*/
|
||||
case off
|
||||
/**
|
||||
Torch (flash unit) is always on.
|
||||
*/
|
||||
case on
|
||||
|
||||
init(jsValue: String) throws {
|
||||
if let parsed = Torch(rawValue: jsValue) {
|
||||
self = parsed
|
||||
} else {
|
||||
throw CameraError.parameter(.invalid(unionName: "torch", receivedValue: jsValue))
|
||||
}
|
||||
}
|
||||
|
||||
var jsValue: String {
|
||||
return rawValue
|
||||
}
|
||||
|
||||
func toTorchMode() -> AVCaptureDevice.TorchMode {
|
||||
switch self {
|
||||
case .on:
|
||||
return .on
|
||||
case .off:
|
||||
return .off
|
||||
}
|
||||
}
|
||||
}
|
28
package/ios/Types/Video.swift
Normal file
28
package/ios/Types/Video.swift
Normal file
@ -0,0 +1,28 @@
|
||||
//
|
||||
// Video.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 12.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
struct Video {
|
||||
/**
|
||||
Path to the temporary video file
|
||||
*/
|
||||
var path: String
|
||||
/**
|
||||
Duration of the recorded video (in seconds)
|
||||
*/
|
||||
var duration: Double
|
||||
|
||||
func toJSValue() -> NSDictionary {
|
||||
return [
|
||||
"path": path,
|
||||
"duration": duration,
|
||||
]
|
||||
}
|
||||
}
|
47
package/ios/Types/VideoStabilizationMode.swift
Normal file
47
package/ios/Types/VideoStabilizationMode.swift
Normal file
@ -0,0 +1,47 @@
|
||||
//
|
||||
// VideoStabilizationMode.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 13.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
enum VideoStabilizationMode: String, JSUnionValue {
|
||||
case off
|
||||
case standard
|
||||
case cinematic
|
||||
case cinematicExtended = "cinematic-extended"
|
||||
case auto
|
||||
|
||||
init(jsValue: String) throws {
|
||||
if let parsed = VideoStabilizationMode(rawValue: jsValue) {
|
||||
self = parsed
|
||||
} else {
|
||||
throw CameraError.parameter(.invalid(unionName: "videoStabilizationMode", receivedValue: jsValue))
|
||||
}
|
||||
}
|
||||
|
||||
init(from mode: AVCaptureVideoStabilizationMode) {
|
||||
switch mode {
|
||||
case .off:
|
||||
self = .off
|
||||
case .standard:
|
||||
self = .standard
|
||||
case .cinematic:
|
||||
self = .cinematic
|
||||
case .cinematicExtended:
|
||||
self = .cinematicExtended
|
||||
case .auto:
|
||||
self = .auto
|
||||
default:
|
||||
self = .off
|
||||
}
|
||||
}
|
||||
|
||||
var jsValue: String {
|
||||
return rawValue
|
||||
}
|
||||
}
|
@ -16,16 +16,25 @@
|
||||
B8446E502ABA14C900E56077 /* CameraDevicesManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */; };
|
||||
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
|
||||
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
|
||||
B85882322AD966FC00317161 /* CameraDeviceFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882312AD966FC00317161 /* CameraDeviceFormat.swift */; };
|
||||
B85882342AD969E000317161 /* VideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882332AD969E000317161 /* VideoStabilizationMode.swift */; };
|
||||
B85882362AD96AFF00317161 /* AutoFocusSystem.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882352AD96AFF00317161 /* AutoFocusSystem.swift */; };
|
||||
B85882382AD96B4400317161 /* JSUnionValue.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882372AD96B4400317161 /* JSUnionValue.swift */; };
|
||||
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; };
|
||||
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */; };
|
||||
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86400512784A23400E9D2CA /* CameraView+Orientation.swift */; };
|
||||
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
|
||||
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
|
||||
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
|
||||
B87B11BF2A8E63B700732EBF /* PixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87B11BE2A8E63B700732EBF /* PixelFormat.swift */; };
|
||||
B88103DB2AD6F0A00087F063 /* CameraSession+Audio.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DA2AD6F0A00087F063 /* CameraSession+Audio.swift */; };
|
||||
B88103DD2AD6F62C0087F063 /* CameraSession+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DC2AD6F62C0087F063 /* CameraSession+Focus.swift */; };
|
||||
B88103DF2AD6FB230087F063 /* Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DE2AD6FB230087F063 /* Orientation.swift */; };
|
||||
B88103E12AD7046E0087F063 /* Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103E02AD7046E0087F063 /* Torch.swift */; };
|
||||
B88103E32AD7065C0087F063 /* CameraSessionDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */; };
|
||||
B881D35E2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */; };
|
||||
B881D3602ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */; };
|
||||
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */; };
|
||||
B88685E52AD68D9300E93869 /* CameraSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E42AD68D9300E93869 /* CameraSession.swift */; };
|
||||
B88685E72AD698DF00E93869 /* CameraConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E62AD698DF00E93869 /* CameraConfiguration.swift */; };
|
||||
B88685E92AD6A5D600E93869 /* CameraSession+Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E82AD6A5D600E93869 /* CameraSession+Video.swift */; };
|
||||
B88685EB2AD6A5DE00E93869 /* CameraSession+Photo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685EA2AD6A5DE00E93869 /* CameraSession+Photo.swift */; };
|
||||
B88685ED2AD6A5E600E93869 /* CameraSession+CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685EC2AD6A5E600E93869 /* CameraSession+CodeScanner.swift */; };
|
||||
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
|
||||
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
|
||||
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B887515F25E0102000DB86D6 /* CameraViewManager.m */; };
|
||||
@ -34,7 +43,7 @@
|
||||
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */; };
|
||||
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */; };
|
||||
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */; };
|
||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */; };
|
||||
B887518F25E0102000DB86D6 /* AVCaptureOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCaptureOutput+mirror.swift */; };
|
||||
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
|
||||
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516E25E0102000DB86D6 /* MakeReactError.swift */; };
|
||||
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
|
||||
@ -50,21 +59,23 @@
|
||||
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */; };
|
||||
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */; };
|
||||
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */; };
|
||||
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518025E0102000DB86D6 /* CameraView+Focus.swift */; };
|
||||
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518125E0102000DB86D6 /* CameraViewManager.swift */; };
|
||||
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
|
||||
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
|
||||
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
|
||||
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
|
||||
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */; };
|
||||
B8A1AEC42AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC32AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift */; };
|
||||
B8A1AEC62AD7F08E00169C0D /* CameraView+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC52AD7F08E00169C0D /* CameraView+Focus.swift */; };
|
||||
B8A1AEC82AD8005400169C0D /* CameraSession+Configuration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC72AD8005400169C0D /* CameraSession+Configuration.swift */; };
|
||||
B8A1AECA2AD8034E00169C0D /* RecordVideoOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC92AD8034E00169C0D /* RecordVideoOptions.swift */; };
|
||||
B8A1AECC2AD803B200169C0D /* Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AECB2AD803B200169C0D /* Video.swift */; };
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
|
||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
|
||||
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
|
||||
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
|
||||
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */; };
|
||||
B8F127D02ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */; };
|
||||
B8FF60AC2ACC93EF009D612F /* CameraView+CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60AB2ACC93EF009D612F /* CameraView+CodeScanner.swift */; };
|
||||
B8FF60AE2ACC9731009D612F /* CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60AD2ACC9731009D612F /* CodeScanner.swift */; };
|
||||
B8FF60B12ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60B02ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift */; };
|
||||
/* End PBXBuildFile section */
|
||||
@ -98,16 +109,25 @@
|
||||
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
|
||||
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
|
||||
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
|
||||
B85882312AD966FC00317161 /* CameraDeviceFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraDeviceFormat.swift; sourceTree = "<group>"; };
|
||||
B85882332AD969E000317161 /* VideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoStabilizationMode.swift; sourceTree = "<group>"; };
|
||||
B85882352AD96AFF00317161 /* AutoFocusSystem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AutoFocusSystem.swift; sourceTree = "<group>"; };
|
||||
B85882372AD96B4400317161 /* JSUnionValue.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JSUnionValue.swift; sourceTree = "<group>"; };
|
||||
B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
|
||||
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; };
|
||||
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
|
||||
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
|
||||
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
|
||||
B87B11BE2A8E63B700732EBF /* PixelFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PixelFormat.swift; sourceTree = "<group>"; };
|
||||
B88103DA2AD6F0A00087F063 /* CameraSession+Audio.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraSession+Audio.swift"; sourceTree = "<group>"; };
|
||||
B88103DC2AD6F62C0087F063 /* CameraSession+Focus.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraSession+Focus.swift"; sourceTree = "<group>"; };
|
||||
B88103DE2AD6FB230087F063 /* Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Orientation.swift; sourceTree = "<group>"; };
|
||||
B88103E02AD7046E0087F063 /* Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Torch.swift; sourceTree = "<group>"; };
|
||||
B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraSessionDelegate.swift; sourceTree = "<group>"; };
|
||||
B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+toDictionary.swift"; sourceTree = "<group>"; };
|
||||
B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoDataOutput+findPixelFormat.swift"; sourceTree = "<group>"; };
|
||||
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = "<group>"; };
|
||||
B88685E42AD68D9300E93869 /* CameraSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraSession.swift; sourceTree = "<group>"; };
|
||||
B88685E62AD698DF00E93869 /* CameraConfiguration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraConfiguration.swift; sourceTree = "<group>"; };
|
||||
B88685E82AD6A5D600E93869 /* CameraSession+Video.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraSession+Video.swift"; sourceTree = "<group>"; };
|
||||
B88685EA2AD6A5DE00E93869 /* CameraSession+Photo.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraSession+Photo.swift"; sourceTree = "<group>"; };
|
||||
B88685EC2AD6A5E600E93869 /* CameraSession+CodeScanner.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraSession+CodeScanner.swift"; sourceTree = "<group>"; };
|
||||
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
|
||||
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
|
||||
B887515E25E0102000DB86D6 /* CameraBridge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraBridge.h; sourceTree = "<group>"; };
|
||||
@ -117,7 +137,7 @@
|
||||
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+isMultiCam.swift"; sourceTree = "<group>"; };
|
||||
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+physicalDevices.swift"; sourceTree = "<group>"; };
|
||||
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVFrameRateRange+includes.swift"; sourceTree = "<group>"; };
|
||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput+mirror.swift"; sourceTree = "<group>"; };
|
||||
B887516825E0102000DB86D6 /* AVCaptureOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureOutput+mirror.swift"; sourceTree = "<group>"; };
|
||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+toDictionary.swift"; sourceTree = "<group>"; };
|
||||
B887516E25E0102000DB86D6 /* MakeReactError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MakeReactError.swift; sourceTree = "<group>"; };
|
||||
B887516F25E0102000DB86D6 /* ReactLogger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactLogger.swift; sourceTree = "<group>"; };
|
||||
@ -133,7 +153,6 @@
|
||||
B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Position+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.FlashMode+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887518025E0102000DB86D6 /* CameraView+Focus.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+Focus.swift"; sourceTree = "<group>"; };
|
||||
B887518125E0102000DB86D6 /* CameraViewManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraViewManager.swift; sourceTree = "<group>"; };
|
||||
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+Zoom.swift"; sourceTree = "<group>"; };
|
||||
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = "<group>"; };
|
||||
@ -141,6 +160,11 @@
|
||||
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
|
||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
|
||||
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; };
|
||||
B8A1AEC32AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoDataOutput+pixelFormat.swift"; sourceTree = "<group>"; };
|
||||
B8A1AEC52AD7F08E00169C0D /* CameraView+Focus.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Focus.swift"; sourceTree = "<group>"; };
|
||||
B8A1AEC72AD8005400169C0D /* CameraSession+Configuration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraSession+Configuration.swift"; sourceTree = "<group>"; };
|
||||
B8A1AEC92AD8034E00169C0D /* RecordVideoOptions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordVideoOptions.swift; sourceTree = "<group>"; };
|
||||
B8A1AECB2AD803B200169C0D /* Video.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Video.swift; sourceTree = "<group>"; };
|
||||
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
|
||||
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
|
||||
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
|
||||
@ -148,12 +172,10 @@
|
||||
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
|
||||
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
|
||||
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
|
||||
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
|
||||
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessor.mm; sourceTree = "<group>"; };
|
||||
B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMVideoDimensions+toCGSize.swift"; sourceTree = "<group>"; };
|
||||
B8F7DDD1266F715D00120533 /* Frame.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Frame.m; sourceTree = "<group>"; };
|
||||
B8FF60AB2ACC93EF009D612F /* CameraView+CodeScanner.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+CodeScanner.swift"; sourceTree = "<group>"; };
|
||||
B8FF60AD2ACC9731009D612F /* CodeScanner.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CodeScanner.swift; sourceTree = "<group>"; };
|
||||
B8FF60B02ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVMetadataObject.ObjectType+descriptor.swift"; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
@ -180,28 +202,19 @@
|
||||
58B511D21A9E6C8500147676 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B88685E32AD68D8A00E93869 /* Core */,
|
||||
B80175EA2ABDEBBB00E7DE90 /* Types */,
|
||||
B8DCF2D725EA940700EA5C72 /* Frame Processor */,
|
||||
B887515E25E0102000DB86D6 /* CameraBridge.h */,
|
||||
B84760DE2608F57D004C3180 /* CameraQueues.swift */,
|
||||
B887518325E0102000DB86D6 /* CameraError.swift */,
|
||||
B887518425E0102000DB86D6 /* CameraView.swift */,
|
||||
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */,
|
||||
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */,
|
||||
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */,
|
||||
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
|
||||
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
|
||||
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
|
||||
B8FF60AB2ACC93EF009D612F /* CameraView+CodeScanner.swift */,
|
||||
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
|
||||
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
|
||||
B8A1AEC52AD7F08E00169C0D /* CameraView+Focus.swift */,
|
||||
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
|
||||
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
|
||||
B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */,
|
||||
B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */,
|
||||
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
|
||||
B83D5EE629377117000AFD2F /* PreviewView.swift */,
|
||||
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
|
||||
B887516125E0102000DB86D6 /* Extensions */,
|
||||
B887517225E0102000DB86D6 /* Parsers */,
|
||||
B887516D25E0102000DB86D6 /* React Utils */,
|
||||
@ -214,15 +227,44 @@
|
||||
children = (
|
||||
B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */,
|
||||
B8FF60AD2ACC9731009D612F /* CodeScanner.swift */,
|
||||
B88103DE2AD6FB230087F063 /* Orientation.swift */,
|
||||
B88103E02AD7046E0087F063 /* Torch.swift */,
|
||||
B8A1AEC92AD8034E00169C0D /* RecordVideoOptions.swift */,
|
||||
B8A1AECB2AD803B200169C0D /* Video.swift */,
|
||||
B85882312AD966FC00317161 /* CameraDeviceFormat.swift */,
|
||||
B85882332AD969E000317161 /* VideoStabilizationMode.swift */,
|
||||
B85882352AD96AFF00317161 /* AutoFocusSystem.swift */,
|
||||
B87B11BE2A8E63B700732EBF /* PixelFormat.swift */,
|
||||
B85882372AD96B4400317161 /* JSUnionValue.swift */,
|
||||
);
|
||||
path = Types;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B88685E32AD68D8A00E93869 /* Core */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B88685E42AD68D9300E93869 /* CameraSession.swift */,
|
||||
B8A1AEC72AD8005400169C0D /* CameraSession+Configuration.swift */,
|
||||
B88685E82AD6A5D600E93869 /* CameraSession+Video.swift */,
|
||||
B88685EA2AD6A5DE00E93869 /* CameraSession+Photo.swift */,
|
||||
B88685EC2AD6A5E600E93869 /* CameraSession+CodeScanner.swift */,
|
||||
B88103DA2AD6F0A00087F063 /* CameraSession+Audio.swift */,
|
||||
B88103DC2AD6F62C0087F063 /* CameraSession+Focus.swift */,
|
||||
B88685E62AD698DF00E93869 /* CameraConfiguration.swift */,
|
||||
B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */,
|
||||
B83D5EE629377117000AFD2F /* PreviewView.swift */,
|
||||
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
|
||||
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
|
||||
B84760DE2608F57D004C3180 /* CameraQueues.swift */,
|
||||
B887518325E0102000DB86D6 /* CameraError.swift */,
|
||||
);
|
||||
path = Core;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B887516125E0102000DB86D6 /* Extensions */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */,
|
||||
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */,
|
||||
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */,
|
||||
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */,
|
||||
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */,
|
||||
@ -230,13 +272,14 @@
|
||||
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */,
|
||||
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */,
|
||||
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */,
|
||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */,
|
||||
B887516825E0102000DB86D6 /* AVCaptureOutput+mirror.swift */,
|
||||
B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */,
|
||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
|
||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */,
|
||||
B887516225E0102000DB86D6 /* Collection+safe.swift */,
|
||||
B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */,
|
||||
B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */,
|
||||
B8A1AEC32AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift */,
|
||||
);
|
||||
path = Extensions;
|
||||
sourceTree = "<group>";
|
||||
@ -267,8 +310,6 @@
|
||||
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */,
|
||||
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */,
|
||||
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */,
|
||||
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */,
|
||||
B87B11BE2A8E63B700732EBF /* PixelFormat.swift */,
|
||||
B8FF60B02ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift */,
|
||||
);
|
||||
path = Parsers;
|
||||
@ -395,65 +436,76 @@
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */,
|
||||
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
|
||||
B8A1AECA2AD8034E00169C0D /* RecordVideoOptions.swift in Sources */,
|
||||
B85882322AD966FC00317161 /* CameraDeviceFormat.swift in Sources */,
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift in Sources */,
|
||||
B8A1AEC82AD8005400169C0D /* CameraSession+Configuration.swift in Sources */,
|
||||
B88685E92AD6A5D600E93869 /* CameraSession+Video.swift in Sources */,
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
|
||||
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
|
||||
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
|
||||
B8A1AEC42AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift in Sources */,
|
||||
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
|
||||
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
|
||||
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
|
||||
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
|
||||
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
|
||||
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
|
||||
B88103DD2AD6F62C0087F063 /* CameraSession+Focus.swift in Sources */,
|
||||
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
|
||||
B88685E72AD698DF00E93869 /* CameraConfiguration.swift in Sources */,
|
||||
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
|
||||
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
|
||||
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
|
||||
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
|
||||
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
|
||||
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
|
||||
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
|
||||
B88685EB2AD6A5DE00E93869 /* CameraSession+Photo.swift in Sources */,
|
||||
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
|
||||
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
|
||||
B8FF60B12ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift in Sources */,
|
||||
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
|
||||
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */,
|
||||
B85882382AD96B4400317161 /* JSUnionValue.swift in Sources */,
|
||||
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
|
||||
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
|
||||
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */,
|
||||
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */,
|
||||
B881D3602ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */,
|
||||
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
|
||||
B88103DB2AD6F0A00087F063 /* CameraSession+Audio.swift in Sources */,
|
||||
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
|
||||
B85882362AD96AFF00317161 /* AutoFocusSystem.swift in Sources */,
|
||||
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
|
||||
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
|
||||
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
|
||||
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */,
|
||||
B88685E52AD68D9300E93869 /* CameraSession.swift in Sources */,
|
||||
B881D35E2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift in Sources */,
|
||||
B87B11BF2A8E63B700732EBF /* PixelFormat.swift in Sources */,
|
||||
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
|
||||
B8FF60AC2ACC93EF009D612F /* CameraView+CodeScanner.swift in Sources */,
|
||||
B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */,
|
||||
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */,
|
||||
B88685ED2AD6A5E600E93869 /* CameraSession+CodeScanner.swift in Sources */,
|
||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
||||
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */,
|
||||
B8FF60AE2ACC9731009D612F /* CodeScanner.swift in Sources */,
|
||||
B8446E502ABA14C900E56077 /* CameraDevicesManager.m in Sources */,
|
||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
|
||||
B887518F25E0102000DB86D6 /* AVCaptureOutput+mirror.swift in Sources */,
|
||||
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
|
||||
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */,
|
||||
B85882342AD969E000317161 /* VideoStabilizationMode.swift in Sources */,
|
||||
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
|
||||
B88103E12AD7046E0087F063 /* Torch.swift in Sources */,
|
||||
B8446E4D2ABA147C00E56077 /* CameraDevicesManager.swift in Sources */,
|
||||
B8A1AECC2AD803B200169C0D /* Video.swift in Sources */,
|
||||
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
|
||||
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
|
||||
B88103DF2AD6FB230087F063 /* Orientation.swift in Sources */,
|
||||
B8F127D02ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift in Sources */,
|
||||
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */,
|
||||
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
|
||||
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
|
||||
B8A1AEC62AD7F08E00169C0D /* CameraView+Focus.swift in Sources */,
|
||||
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */,
|
||||
B88103E32AD7065C0087F063 /* CameraSessionDelegate.swift in Sources */,
|
||||
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
|
Loading…
Reference in New Issue
Block a user