Extract AVCaptureSession and AVAudioSession setup to extensions

This commit is contained in:
Marc Rousavy 2021-03-26 16:20:57 +01:00
parent 56c67f25c7
commit 9404b93dc3
8 changed files with 359 additions and 321 deletions

View File

@ -279,7 +279,7 @@ PODS:
- react-native-video/Video (= 5.1.1)
- react-native-video/Video (5.1.1):
- React-Core
- react-native-vision-camera (1.0.3):
- react-native-vision-camera (1.0.4):
- React-Core
- React-perflogger (0.64.0)
- React-RCTActionSheet (0.64.0):
@ -554,9 +554,9 @@ EXTERNAL SOURCES:
SPEC CHECKSUMS:
boost-for-react-native: 39c7adb57c4e60d6c5479dd8623128eb5b3f0f2c
CocoaAsyncSocket: 065fd1e645c7abab64f7a6a2007a48038fdc6a99
DoubleConversion: cde416483dac037923206447da6e1454df403714
DoubleConversion: cf9b38bf0b2d048436d9a82ad2abe1404f11e7de
FBLazyVector: 49cbe4b43e445b06bf29199b6ad2057649e4c8f5
FBReactNativeSpec: e800dc469340da7e8e47f45145f69d75a7b06874
FBReactNativeSpec: 06c29ba6920affcab9cda6154497386d21f43410
Flipper: d3da1aa199aad94455ae725e9f3aa43f3ec17021
Flipper-DoubleConversion: 38631e41ef4f9b12861c67d17cb5518d06badc41
Flipper-Folly: f7a3caafbd74bda4827954fd7a6e000e36355489
@ -564,7 +564,7 @@ SPEC CHECKSUMS:
Flipper-PeerTalk: 116d8f857dc6ef55c7a5a75ea3ceaafe878aadc9
Flipper-RSocket: 602921fee03edacf18f5d6f3d3594ba477f456e5
FlipperKit: 8a20b5c5fcf9436cac58551dc049867247f64b00
glog: 40a13f7840415b9a77023fbcae0f1e6f43192af3
glog: 73c2498ac6884b13ede40eda8228cb1eee9d9d62
hermes-engine: 7d97ba46a1e29bacf3e3c61ecb2804a5ddd02d4f
libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913
OpenSSL-Universal: 1aa4f6a6ee7256b83db99ec1ccdaa80d10f9af9b
@ -583,7 +583,7 @@ SPEC CHECKSUMS:
react-native-cameraroll: 1965db75c851b15e77a22ca0ac78e32af6b571ae
react-native-slider: e99fc201cefe81270fc9d81714a7a0f5e566b168
react-native-video: 0bb76b6d6b77da3009611586c7dbf817b947f30e
react-native-vision-camera: 83bc97de3bc01be3a99037dd4cf6c672aef632b7
react-native-vision-camera: d0d6fdd334f1536d016b3ca92064f25a6312e09c
React-perflogger: 9c547d8f06b9bf00cb447f2b75e8d7f19b7e02af
React-RCTActionSheet: 3080b6e12e0e1a5b313c8c0050699b5c794a1b11
React-RCTAnimation: 3f96f21a497ae7dabf4d2f150ee43f906aaf516f

View File

@ -0,0 +1,75 @@
//
// CameraView+AVAudioSession.swift
// VisionCamera
//
// Created by Marc Rousavy on 26.03.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
import AVFoundation
/**
Extension for CameraView that sets up the AVAudioSession.
*/
extension CameraView {
@objc
func audioSessionInterrupted(notification: Notification) {
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
// TODO: Should we also disable the camera here? I think it will throw a runtime error
// disable audio session
try? AVAudioSession.sharedInstance().setActive(false)
break
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// restart audio session because interruption is over
configureAudioSession()
} else {
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
}
break
}
}
private final func setAutomaticallyConfiguresAudioSession(_ automaticallyConfiguresAudioSession: Bool) {
if captureSession.automaticallyConfiguresApplicationAudioSession != automaticallyConfiguresAudioSession {
captureSession.beginConfiguration()
captureSession.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresAudioSession
captureSession.commitConfiguration()
}
}
/**
Configures the Audio session to allow background-music playback while recording.
*/
internal final func configureAudioSession() {
let start = DispatchTime.now()
do {
setAutomaticallyConfiguresAudioSession(false)
let audioSession = AVAudioSession.sharedInstance()
if audioSession.category != .playAndRecord {
// allow background music playback
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
}
// TODO: Use https://developer.apple.com/documentation/avfaudio/avaudiosession/3726094-setprefersnointerruptionsfromsys
audioSession.trySetAllowHaptics(true)
// activate current audio session because camera is active
try audioSession.setActive(true)
} catch let error as NSError {
self.invokeOnError(.session(.audioSessionSetupFailed(reason: error.description)), cause: error)
setAutomaticallyConfiguresAudioSession(true)
}
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
}
}

View File

@ -0,0 +1,261 @@
//
// CameraView+AVCaptureSession.swift
// VisionCamera
//
// Created by Marc Rousavy on 26.03.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
import AVFoundation
/**
Extension for CameraView that sets up the AVCaptureSession, Device and Format.
*/
extension CameraView {
@objc
func sessionRuntimeError(notification: Notification) {
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError)
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
}
/**
Configures the Capture Session.
*/
internal final func configureCaptureSession() {
ReactLogger.logJS(level: .info, message: "Configuring Session...")
isReady = false
#if targetEnvironment(simulator)
return invokeOnError(.device(.notAvailableOnSimulator))
#endif
guard cameraId != nil else {
return invokeOnError(.device(.noDevice))
}
let cameraId = self.cameraId! as String
ReactLogger.log(level: .info, message: "Initializing Camera with device \(cameraId)...")
captureSession.beginConfiguration()
defer {
captureSession.commitConfiguration()
}
if let preset = self.preset {
var sessionPreset: AVCaptureSession.Preset?
do {
sessionPreset = try AVCaptureSession.Preset(withString: preset)
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
return invokeOnError(.parameter(.unsupportedOS(unionName: "Preset", receivedValue: preset, supportedOnOs: os)))
} catch {
return invokeOnError(.parameter(.invalid(unionName: "Preset", receivedValue: preset)))
}
if sessionPreset != nil {
if captureSession.canSetSessionPreset(sessionPreset!) {
captureSession.sessionPreset = sessionPreset!
} else {
// non-fatal error, so continue with configuration
invokeOnError(.format(.invalidPreset(preset: preset)))
}
}
}
// INPUTS
// Video Input
do {
if let videoDeviceInput = self.videoDeviceInput {
captureSession.removeInput(videoDeviceInput)
}
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
return invokeOnError(.device(.invalid))
}
zoom = NSNumber(value: Double(videoDevice.neutralZoomPercent))
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
guard captureSession.canAddInput(videoDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "video-input")))
}
captureSession.addInput(videoDeviceInput!)
} catch {
return invokeOnError(.device(.invalid))
}
// Microphone (Audio Input)
do {
if let audioDeviceInput = self.audioDeviceInput {
captureSession.removeInput(audioDeviceInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard captureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
captureSession.addInput(audioDeviceInput!)
} catch {
return invokeOnError(.device(.invalid))
}
// OUTPUTS
if let photoOutput = self.photoOutput {
captureSession.removeOutput(photoOutput)
}
// Photo Output
photoOutput = AVCapturePhotoOutput()
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture
}
if #available(iOS 12.0, *) {
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery
}
guard captureSession.canAddOutput(photoOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
}
captureSession.addOutput(photoOutput!)
if videoDeviceInput!.device.position == .front {
photoOutput!.mirror()
}
// Video Output
if let movieOutput = self.movieOutput {
captureSession.removeOutput(movieOutput)
}
movieOutput = AVCaptureMovieFileOutput()
guard captureSession.canAddOutput(movieOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "movie-output")))
}
captureSession.addOutput(movieOutput!)
if videoDeviceInput!.device.position == .front {
movieOutput!.mirror()
}
// Barcode Scanning
if let metadataOutput = self.metadataOutput {
captureSession.removeOutput(metadataOutput)
}
if let scannableCodes = self.scannableCodes {
// scannableCodes prop is not nil, so enable barcode scanning.
guard onCodeScanned != nil else {
return invokeOnError(.parameter(.invalidCombination(provided: "scannableCodes", missing: "onCodeScanned")))
}
metadataOutput = AVCaptureMetadataOutput()
guard captureSession.canAddOutput(metadataOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "metadata-output")))
}
captureSession.addOutput(metadataOutput!)
metadataOutput!.setMetadataObjectsDelegate(self, queue: queue)
var objectTypes: [AVMetadataObject.ObjectType] = []
scannableCodes.forEach { code in
do {
objectTypes.append(try AVMetadataObject.ObjectType(withString: code))
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
invokeOnError(.parameter(.unsupportedOS(unionName: "CodeType", receivedValue: code, supportedOnOs: os)))
} catch {
invokeOnError(.parameter(.invalid(unionName: "CodeType", receivedValue: code)))
}
}
metadataOutput!.metadataObjectTypes = objectTypes
}
invokeOnInitialized()
isReady = true
ReactLogger.logJS(level: .info, message: "Session successfully configured!")
}
/**
Configures the Video Device to find the best matching Format.
*/
internal final func configureFormat() {
ReactLogger.logJS(level: .info, message: "Configuring Format...")
guard let filter = self.format else {
// Format Filter was null. Ignore it.
return
}
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
if device.activeFormat.matchesFilter(filter) {
ReactLogger.log(level: .info, message: "Active format already matches filter.")
return
}
// get matching format
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
guard let format = matchingFormats.first else {
return invokeOnError(.format(.invalidFormat))
}
do {
try device.lockForConfiguration()
device.activeFormat = format
device.unlockForConfiguration()
ReactLogger.logJS(level: .info, message: "Format successfully configured!")
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
/**
Configures the Video Device with the given FPS, HDR and ColorSpace.
*/
internal final func configureDevice() {
ReactLogger.logJS(level: .info, message: "Configuring Device...")
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
do {
try device.lockForConfiguration()
if let fps = self.fps?.int32Value {
let duration = CMTimeMake(value: 1, timescale: fps)
device.activeVideoMinFrameDuration = duration
device.activeVideoMaxFrameDuration = duration
} else {
device.activeVideoMinFrameDuration = CMTime.invalid
device.activeVideoMaxFrameDuration = CMTime.invalid
}
if hdr != nil {
if hdr == true && !device.activeFormat.isVideoHDRSupported {
return invokeOnError(.format(.invalidHdr))
}
if !device.automaticallyAdjustsVideoHDREnabled {
if device.isVideoHDREnabled != hdr!.boolValue {
device.isVideoHDREnabled = hdr!.boolValue
}
}
}
if lowLightBoost != nil {
if lowLightBoost == true && !device.isLowLightBoostSupported {
return invokeOnError(.device(.lowLightBoostNotSupported))
}
if device.automaticallyEnablesLowLightBoostWhenAvailable != lowLightBoost!.boolValue {
device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue
}
}
if colorSpace != nil, let avColorSpace = try? AVCaptureColorSpace(string: String(colorSpace!)) {
device.activeColorSpace = avColorSpace
}
device.unlockForConfiguration()
ReactLogger.logJS(level: .info, message: "Device successfully configured!")
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
}

View File

@ -137,6 +137,8 @@ final class CameraView: UIView {
return AVCaptureVideoPreviewLayer.self
}
internal let captureSession = AVCaptureSession()
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@ -197,49 +199,6 @@ final class CameraView: UIView {
return layer as! AVCaptureVideoPreviewLayer
}
@objc
func sessionRuntimeError(notification: Notification) {
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
invokeOnError(.unknown(message: error.description), cause: error as NSError)
}
@objc
func audioSessionInterrupted(notification: Notification) {
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
// TODO: Should we also disable the camera here? I think it will throw a runtime error
// disable audio session
try? AVAudioSession.sharedInstance().setActive(false)
break
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// restart audio session because interruption is over
configureAudioSession()
} else {
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
}
break
}
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
@ -302,269 +261,4 @@ final class CameraView: UIView {
onInitialized([String: Any]())
}
// MARK: Private
private let captureSession = AVCaptureSession()
private final func setAutomaticallyConfiguresAudioSession(_ automaticallyConfiguresAudioSession: Bool) {
if captureSession.automaticallyConfiguresApplicationAudioSession != automaticallyConfiguresAudioSession {
captureSession.beginConfiguration()
captureSession.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresAudioSession
captureSession.commitConfiguration()
}
}
// pragma MARK: Session, Device and Format Configuration
/**
Configures the Audio session to allow background-music playback while recording.
*/
private final func configureAudioSession() {
let start = DispatchTime.now()
do {
setAutomaticallyConfiguresAudioSession(false)
let audioSession = AVAudioSession.sharedInstance()
if audioSession.category != .playAndRecord {
// allow background music playback
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
}
// TODO: Use https://developer.apple.com/documentation/avfaudio/avaudiosession/3726094-setprefersnointerruptionsfromsys
audioSession.trySetAllowHaptics(true)
// activate current audio session because camera is active
try audioSession.setActive(true)
} catch let error as NSError {
self.invokeOnError(.session(.audioSessionSetupFailed(reason: error.description)), cause: error)
setAutomaticallyConfiguresAudioSession(true)
}
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
}
/**
Configures the Capture Session.
*/
private final func configureCaptureSession() {
ReactLogger.logJS(level: .info, message: "Configuring Session...")
isReady = false
#if targetEnvironment(simulator)
return invokeOnError(.device(.notAvailableOnSimulator))
#endif
guard cameraId != nil else {
return invokeOnError(.device(.noDevice))
}
let cameraId = self.cameraId! as String
ReactLogger.log(level: .info, message: "Initializing Camera with device \(cameraId)...")
captureSession.beginConfiguration()
defer {
captureSession.commitConfiguration()
}
if let preset = self.preset {
var sessionPreset: AVCaptureSession.Preset?
do {
sessionPreset = try AVCaptureSession.Preset(withString: preset)
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
return invokeOnError(.parameter(.unsupportedOS(unionName: "Preset", receivedValue: preset, supportedOnOs: os)))
} catch {
return invokeOnError(.parameter(.invalid(unionName: "Preset", receivedValue: preset)))
}
if sessionPreset != nil {
if captureSession.canSetSessionPreset(sessionPreset!) {
captureSession.sessionPreset = sessionPreset!
} else {
// non-fatal error, so continue with configuration
invokeOnError(.format(.invalidPreset(preset: preset)))
}
}
}
// INPUTS
// Video Input
do {
if let videoDeviceInput = self.videoDeviceInput {
captureSession.removeInput(videoDeviceInput)
}
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
return invokeOnError(.device(.invalid))
}
zoom = NSNumber(value: Double(videoDevice.neutralZoomPercent))
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
guard captureSession.canAddInput(videoDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "video-input")))
}
captureSession.addInput(videoDeviceInput!)
} catch {
return invokeOnError(.device(.invalid))
}
// Microphone (Audio Input)
do {
if let audioDeviceInput = self.audioDeviceInput {
captureSession.removeInput(audioDeviceInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard captureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
captureSession.addInput(audioDeviceInput!)
} catch {
return invokeOnError(.device(.invalid))
}
// OUTPUTS
if let photoOutput = self.photoOutput {
captureSession.removeOutput(photoOutput)
}
// Photo Output
photoOutput = AVCapturePhotoOutput()
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture
}
if #available(iOS 12.0, *) {
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery
}
guard captureSession.canAddOutput(photoOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
}
captureSession.addOutput(photoOutput!)
if videoDeviceInput!.device.position == .front {
photoOutput!.mirror()
}
// Video Output
if let movieOutput = self.movieOutput {
captureSession.removeOutput(movieOutput)
}
movieOutput = AVCaptureMovieFileOutput()
guard captureSession.canAddOutput(movieOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "movie-output")))
}
captureSession.addOutput(movieOutput!)
if videoDeviceInput!.device.position == .front {
movieOutput!.mirror()
}
// Barcode Scanning
if let metadataOutput = self.metadataOutput {
captureSession.removeOutput(metadataOutput)
}
if let scannableCodes = self.scannableCodes {
// scannableCodes prop is not nil, so enable barcode scanning.
guard onCodeScanned != nil else {
return invokeOnError(.parameter(.invalidCombination(provided: "scannableCodes", missing: "onCodeScanned")))
}
metadataOutput = AVCaptureMetadataOutput()
guard captureSession.canAddOutput(metadataOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "metadata-output")))
}
captureSession.addOutput(metadataOutput!)
metadataOutput!.setMetadataObjectsDelegate(self, queue: queue)
var objectTypes: [AVMetadataObject.ObjectType] = []
scannableCodes.forEach { code in
do {
objectTypes.append(try AVMetadataObject.ObjectType(withString: code))
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
invokeOnError(.parameter(.unsupportedOS(unionName: "CodeType", receivedValue: code, supportedOnOs: os)))
} catch {
invokeOnError(.parameter(.invalid(unionName: "CodeType", receivedValue: code)))
}
}
metadataOutput!.metadataObjectTypes = objectTypes
}
invokeOnInitialized()
isReady = true
ReactLogger.logJS(level: .info, message: "Session successfully configured!")
}
/**
Configures the Video Device to find the best matching Format.
*/
private final func configureFormat() {
ReactLogger.logJS(level: .info, message: "Configuring Format...")
guard let filter = self.format else {
// Format Filter was null. Ignore it.
return
}
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
if device.activeFormat.matchesFilter(filter) {
ReactLogger.log(level: .info, message: "Active format already matches filter.")
return
}
// get matching format
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
guard let format = matchingFormats.first else {
return invokeOnError(.format(.invalidFormat))
}
do {
try device.lockForConfiguration()
device.activeFormat = format
device.unlockForConfiguration()
ReactLogger.logJS(level: .info, message: "Format successfully configured!")
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
/**
Configures the Video Device with the given FPS, HDR and ColorSpace.
*/
private final func configureDevice() {
ReactLogger.logJS(level: .info, message: "Configuring Device...")
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
do {
try device.lockForConfiguration()
if let fps = self.fps?.int32Value {
let duration = CMTimeMake(value: 1, timescale: fps)
device.activeVideoMinFrameDuration = duration
device.activeVideoMaxFrameDuration = duration
} else {
device.activeVideoMinFrameDuration = CMTime.invalid
device.activeVideoMaxFrameDuration = CMTime.invalid
}
if hdr != nil {
if hdr == true && !device.activeFormat.isVideoHDRSupported {
return invokeOnError(.format(.invalidHdr))
}
if !device.automaticallyAdjustsVideoHDREnabled {
if device.isVideoHDREnabled != hdr!.boolValue {
device.isVideoHDREnabled = hdr!.boolValue
}
}
}
if lowLightBoost != nil {
if lowLightBoost == true && !device.isLowLightBoostSupported {
return invokeOnError(.device(.lowLightBoostNotSupported))
}
if device.automaticallyEnablesLowLightBoostWhenAvailable != lowLightBoost!.boolValue {
device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue
}
}
if colorSpace != nil, let avColorSpace = try? AVCaptureColorSpace(string: String(colorSpace!)) {
device.activeColorSpace = avColorSpace
}
device.unlockForConfiguration()
ReactLogger.logJS(level: .info, message: "Device successfully configured!")
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
}

View File

@ -15,8 +15,8 @@ extension AVAudioSession {
*/
func trySetAllowHaptics(_ allowHaptics: Bool) {
if #available(iOS 13.0, *) {
if !audioSession.allowHapticsAndSystemSoundsDuringRecording {
try? audioSession.setAllowHapticsAndSystemSoundsDuringRecording(true)
if !self.allowHapticsAndSystemSoundsDuringRecording {
try? self.setAllowHapticsAndSystemSoundsDuringRecording(true)
}
}
}

View File

@ -27,8 +27,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
defer {
delegatesReferences.removeAll(where: { $0 == self })
}
if let error = error {
return promise.reject(error: .capture(.unknown(message: error.description)), cause: error as NSError)
if let error = error as NSError? {
return promise.reject(error: .capture(.unknown(message: error.description)), cause: error)
}
let error = ErrorPointer(nilLiteral: ())
@ -66,8 +66,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
defer {
delegatesReferences.removeAll(where: { $0 == self })
}
if let error = error {
return promise.reject(error: .capture(.unknown(message: error.description)), cause: error as NSError)
if let error = error as NSError? {
return promise.reject(error: .capture(.unknown(message: error.description)), cause: error)
}
}

View File

@ -33,8 +33,8 @@ class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDeleg
self.resetTorchMode()
delegateReferences.removeAll(where: { $0 == self })
}
if let error = error {
return callback([NSNull(), makeReactError(.capture(.unknown(message: error.description)), cause: error as NSError)])
if let error = error as NSError? {
return callback([NSNull(), makeReactError(.capture(.unknown(message: error.description)), cause: error)])
}
let seconds = CMTimeGetSeconds(output.recordedDuration)

View File

@ -8,6 +8,8 @@
/* Begin PBXBuildFile section */
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B887515F25E0102000DB86D6 /* CameraViewManager.m */; };
@ -62,6 +64,8 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
B887515E25E0102000DB86D6 /* CameraBridge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraBridge.h; sourceTree = "<group>"; };
@ -127,6 +131,8 @@
B887515E25E0102000DB86D6 /* CameraBridge.h */,
B887518325E0102000DB86D6 /* CameraError.swift */,
B887518425E0102000DB86D6 /* CameraView.swift */,
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */,
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */,
B887516C25E0102000DB86D6 /* CameraView+CodeScanning.swift */,
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
@ -289,6 +295,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */,
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
@ -310,6 +317,7 @@
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,