From b25cf6a04f328b016c0f436cee068a8d8df7a80c Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 26 Mar 2021 16:28:08 +0100 Subject: [PATCH] Refactor lifecycle vars --- ios/.swiftformat | 1 - ios/CameraView+AVAudioSession.swift | 73 ++++---- ios/CameraView+AVCaptureSession.swift | 141 ++++++++-------- ios/CameraView.swift | 156 +++++++++--------- ios/CameraViewManager.swift | 22 ++- .../AVAudioSession+trySetAllowHaptics.swift | 4 +- 6 files changed, 196 insertions(+), 201 deletions(-) diff --git a/ios/.swiftformat b/ios/.swiftformat index 3aadd00..ffb39d1 100644 --- a/ios/.swiftformat +++ b/ios/.swiftformat @@ -7,7 +7,6 @@ --disable wrapMultilineStatementBraces --enable organizeDeclarations ---lifecycle didSetProps,requiresMainQueueSetup,view,methodQueue,getCameraView,removeFromSuperview --enable markTypes diff --git a/ios/CameraView+AVAudioSession.swift b/ios/CameraView+AVAudioSession.swift index 61babec..c5ecbc6 100644 --- a/ios/CameraView+AVAudioSession.swift +++ b/ios/CameraView+AVAudioSession.swift @@ -6,52 +6,17 @@ // Copyright © 2021 Facebook. All rights reserved. // -import Foundation import AVFoundation +import Foundation /** Extension for CameraView that sets up the AVAudioSession. */ extension CameraView { - @objc - func audioSessionInterrupted(notification: Notification) { - ReactLogger.log(level: .error, message: "The Audio Session was interrupted!") - guard let userInfo = notification.userInfo, - let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt, - let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { - return - } - switch type { - case .began: - // TODO: Should we also disable the camera here? I think it will throw a runtime error - // disable audio session - try? AVAudioSession.sharedInstance().setActive(false) - break - case .ended: - guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return } - let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue) - if options.contains(.shouldResume) { - // restart audio session because interruption is over - configureAudioSession() - } else { - ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!") - } - break - } - } - - private final func setAutomaticallyConfiguresAudioSession(_ automaticallyConfiguresAudioSession: Bool) { - if captureSession.automaticallyConfiguresApplicationAudioSession != automaticallyConfiguresAudioSession { - captureSession.beginConfiguration() - captureSession.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresAudioSession - captureSession.commitConfiguration() - } - } - /** Configures the Audio session to allow background-music playback while recording. */ - internal final func configureAudioSession() { + final func configureAudioSession() { let start = DispatchTime.now() do { setAutomaticallyConfiguresAudioSession(false) @@ -72,4 +37,38 @@ extension CameraView { let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!") } + + private final func setAutomaticallyConfiguresAudioSession(_ automaticallyConfiguresAudioSession: Bool) { + if captureSession.automaticallyConfiguresApplicationAudioSession != automaticallyConfiguresAudioSession { + captureSession.beginConfiguration() + captureSession.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresAudioSession + captureSession.commitConfiguration() + } + } + + @objc + func audioSessionInterrupted(notification: Notification) { + ReactLogger.log(level: .error, message: "The Audio Session was interrupted!") + guard let userInfo = notification.userInfo, + let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt, + let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { + return + } + switch type { + case .began: + // TODO: Should we also disable the camera here? I think it will throw a runtime error + // disable audio session + try? AVAudioSession.sharedInstance().setActive(false) + case .ended: + guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return } + let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue) + if options.contains(.shouldResume) { + // restart audio session because interruption is over + configureAudioSession() + } else { + ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!") + } + @unknown default: () + } + } } diff --git a/ios/CameraView+AVCaptureSession.swift b/ios/CameraView+AVCaptureSession.swift index 92647fc..a2e011c 100644 --- a/ios/CameraView+AVCaptureSession.swift +++ b/ios/CameraView+AVCaptureSession.swift @@ -6,53 +6,35 @@ // Copyright © 2021 Facebook. All rights reserved. // -import Foundation import AVFoundation +import Foundation /** Extension for CameraView that sets up the AVCaptureSession, Device and Format. */ extension CameraView { - @objc - func sessionRuntimeError(notification: Notification) { - ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!") - guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { - return - } - - invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError) - - if isActive { - // restart capture session after an error occured - queue.async { - self.captureSession.startRunning() - } - } - } - - /** Configures the Capture Session. */ - internal final func configureCaptureSession() { + final func configureCaptureSession() { ReactLogger.logJS(level: .info, message: "Configuring Session...") isReady = false - + #if targetEnvironment(simulator) - return invokeOnError(.device(.notAvailableOnSimulator)) + return invokeOnError(.device(.notAvailableOnSimulator)) #endif - + guard cameraId != nil else { return invokeOnError(.device(.noDevice)) } let cameraId = self.cameraId! as String - + ReactLogger.log(level: .info, message: "Initializing Camera with device \(cameraId)...") captureSession.beginConfiguration() defer { captureSession.commitConfiguration() } - + if let preset = self.preset { var sessionPreset: AVCaptureSession.Preset? do { @@ -71,7 +53,7 @@ extension CameraView { } } } - + // INPUTS // Video Input do { @@ -90,7 +72,7 @@ extension CameraView { } catch { return invokeOnError(.device(.invalid)) } - + // Microphone (Audio Input) do { if let audioDeviceInput = self.audioDeviceInput { @@ -99,7 +81,7 @@ extension CameraView { guard let audioDevice = AVCaptureDevice.default(for: .audio) else { return invokeOnError(.device(.microphoneUnavailable)) } - + audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice) guard captureSession.canAddInput(audioDeviceInput!) else { return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input"))) @@ -108,7 +90,7 @@ extension CameraView { } catch { return invokeOnError(.device(.invalid)) } - + // OUTPUTS if let photoOutput = self.photoOutput { captureSession.removeOutput(photoOutput) @@ -129,7 +111,7 @@ extension CameraView { if videoDeviceInput!.device.position == .front { photoOutput!.mirror() } - + // Video Output if let movieOutput = self.movieOutput { captureSession.removeOutput(movieOutput) @@ -142,7 +124,7 @@ extension CameraView { if videoDeviceInput!.device.position == .front { movieOutput!.mirror() } - + // Barcode Scanning if let metadataOutput = self.metadataOutput { captureSession.removeOutput(metadataOutput) @@ -170,58 +152,24 @@ extension CameraView { } metadataOutput!.metadataObjectTypes = objectTypes } - + invokeOnInitialized() isReady = true ReactLogger.logJS(level: .info, message: "Session successfully configured!") } - - /** - Configures the Video Device to find the best matching Format. - */ - internal final func configureFormat() { - ReactLogger.logJS(level: .info, message: "Configuring Format...") - guard let filter = self.format else { - // Format Filter was null. Ignore it. - return - } - guard let device = videoDeviceInput?.device else { - return invokeOnError(.session(.cameraNotReady)) - } - - if device.activeFormat.matchesFilter(filter) { - ReactLogger.log(level: .info, message: "Active format already matches filter.") - return - } - - // get matching format - let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) } - guard let format = matchingFormats.first else { - return invokeOnError(.format(.invalidFormat)) - } - - do { - try device.lockForConfiguration() - device.activeFormat = format - device.unlockForConfiguration() - ReactLogger.logJS(level: .info, message: "Format successfully configured!") - } catch let error as NSError { - return invokeOnError(.device(.configureError), cause: error) - } - } - + /** Configures the Video Device with the given FPS, HDR and ColorSpace. */ - internal final func configureDevice() { + final func configureDevice() { ReactLogger.logJS(level: .info, message: "Configuring Device...") guard let device = videoDeviceInput?.device else { return invokeOnError(.session(.cameraNotReady)) } - + do { try device.lockForConfiguration() - + if let fps = self.fps?.int32Value { let duration = CMTimeMake(value: 1, timescale: fps) device.activeVideoMinFrameDuration = duration @@ -251,11 +199,62 @@ extension CameraView { if colorSpace != nil, let avColorSpace = try? AVCaptureColorSpace(string: String(colorSpace!)) { device.activeColorSpace = avColorSpace } - + device.unlockForConfiguration() ReactLogger.logJS(level: .info, message: "Device successfully configured!") } catch let error as NSError { return invokeOnError(.device(.configureError), cause: error) } } + + /** + Configures the Video Device to find the best matching Format. + */ + final func configureFormat() { + ReactLogger.logJS(level: .info, message: "Configuring Format...") + guard let filter = self.format else { + // Format Filter was null. Ignore it. + return + } + guard let device = videoDeviceInput?.device else { + return invokeOnError(.session(.cameraNotReady)) + } + + if device.activeFormat.matchesFilter(filter) { + ReactLogger.log(level: .info, message: "Active format already matches filter.") + return + } + + // get matching format + let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) } + guard let format = matchingFormats.first else { + return invokeOnError(.format(.invalidFormat)) + } + + do { + try device.lockForConfiguration() + device.activeFormat = format + device.unlockForConfiguration() + ReactLogger.logJS(level: .info, message: "Format successfully configured!") + } catch let error as NSError { + return invokeOnError(.device(.configureError), cause: error) + } + } + + @objc + func sessionRuntimeError(notification: Notification) { + ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!") + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { + return + } + + invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError) + + if isActive { + // restart capture session after an error occured + queue.async { + self.captureSession.startRunning() + } + } + } } diff --git a/ios/CameraView.swift b/ios/CameraView.swift index 706d6c1..7189a27 100644 --- a/ios/CameraView.swift +++ b/ios/CameraView.swift @@ -6,8 +6,6 @@ // Copyright © 2020 Facebook. All rights reserved. // -// swiftlint:disable file_length - import AVFoundation import Foundation import UIKit @@ -39,6 +37,7 @@ private let propsThatRequireDeviceReconfiguration = ["fps", final class CameraView: UIView { // MARK: Lifecycle + // pragma MARK: Setup override init(frame: CGRect) { super.init(frame: frame) videoPreviewLayer.session = captureSession @@ -69,6 +68,83 @@ final class CameraView: UIView { fatalError("init(coder:) is not implemented.") } + // MARK: Internal + + override class var layerClass: AnyClass { + return AVCaptureVideoPreviewLayer.self + } + + // pragma MARK: Exported Properties + // props that require reconfiguring + @objc var cameraId: NSString? + @objc var enableDepthData = false + @objc var enableHighResolutionCapture: NSNumber? // nullable bool + @objc var enablePortraitEffectsMatteDelivery = false + @objc var preset: String? + @objc var scannableCodes: [String]? + // props that require format reconfiguring + @objc var format: NSDictionary? + @objc var fps: NSNumber? + @objc var hdr: NSNumber? // nullable bool + @objc var lowLightBoost: NSNumber? // nullable bool + @objc var colorSpace: NSString? + // other props + @objc var isActive = false + @objc var torch = "off" + @objc var zoom: NSNumber = 0.0 // in percent + // events + @objc var onInitialized: RCTDirectEventBlock? + @objc var onError: RCTDirectEventBlock? + @objc var onCodeScanned: RCTBubblingEventBlock? + + // pragma MARK: Private Properties + internal var isReady = false + /// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes) + internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil) + // Capture Session + internal let captureSession = AVCaptureSession() + // Inputs + internal var videoDeviceInput: AVCaptureDeviceInput? + internal var audioDeviceInput: AVCaptureDeviceInput? + // Outputs + internal var photoOutput: AVCapturePhotoOutput? + internal var movieOutput: AVCaptureMovieFileOutput? + internal var metadataOutput: AVCaptureMetadataOutput? + // CameraView+TakePhoto + internal var photoCaptureDelegates: [PhotoCaptureDelegate] = [] + // CameraView+RecordVideo + internal var recordingDelegateResolver: RCTPromiseResolveBlock? + internal var recordingDelegateRejecter: RCTPromiseRejectBlock? + // CameraView+Zoom + internal var pinchGestureRecognizer: UIPinchGestureRecognizer? + internal var pinchScaleOffset: CGFloat = 1.0 + + @objc var enableZoomGesture = false { + didSet { + if enableZoomGesture { + addPinchGestureRecognizer() + } else { + removePinchGestureRecognizer() + } + } + } + + var isRunning: Bool { + return captureSession.isRunning + } + + /// Convenience wrapper to get layer as its statically known type. + var videoPreviewLayer: AVCaptureVideoPreviewLayer { + // swiftlint:disable force_cast + return layer as! AVCaptureVideoPreviewLayer + } + + override func removeFromSuperview() { + ReactLogger.log(level: .info, message: "Removing Camera View...") + captureSession.stopRunning() + super.removeFromSuperview() + } + // pragma MARK: Props updating override final func didSetProps(_ changedProps: [String]!) { ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...") @@ -124,81 +200,6 @@ final class CameraView: UIView { } } - override func removeFromSuperview() { - ReactLogger.log(level: .info, message: "Removing Camera View...") - captureSession.stopRunning() - super.removeFromSuperview() - } - - // MARK: Internal - - // pragma MARK: Setup - override class var layerClass: AnyClass { - return AVCaptureVideoPreviewLayer.self - } - - internal let captureSession = AVCaptureSession() - - // pragma MARK: Exported Properties - // props that require reconfiguring - @objc var cameraId: NSString? - @objc var enableDepthData = false - @objc var enableHighResolutionCapture: NSNumber? // nullable bool - @objc var enablePortraitEffectsMatteDelivery = false - @objc var preset: String? - @objc var scannableCodes: [String]? - // props that require format reconfiguring - @objc var format: NSDictionary? - @objc var fps: NSNumber? - @objc var hdr: NSNumber? // nullable bool - @objc var lowLightBoost: NSNumber? // nullable bool - @objc var colorSpace: NSString? - // other props - @objc var isActive = false - @objc var torch = "off" - @objc var zoom: NSNumber = 0.0 // in percent - // events - @objc var onInitialized: RCTDirectEventBlock? - @objc var onError: RCTDirectEventBlock? - @objc var onCodeScanned: RCTBubblingEventBlock? - var isReady = false - // pragma MARK: Private Properties - /// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes) - internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil) - internal var videoDeviceInput: AVCaptureDeviceInput? - internal var audioDeviceInput: AVCaptureDeviceInput? - internal var photoOutput: AVCapturePhotoOutput? - internal var movieOutput: AVCaptureMovieFileOutput? - internal var metadataOutput: AVCaptureMetadataOutput? - // CameraView+TakePhoto - internal var photoCaptureDelegates: [PhotoCaptureDelegate] = [] - // CameraView+RecordVideo - internal var recordingDelegateResolver: RCTPromiseResolveBlock? - internal var recordingDelegateRejecter: RCTPromiseRejectBlock? - // CameraView+Zoom - internal var pinchGestureRecognizer: UIPinchGestureRecognizer? - internal var pinchScaleOffset: CGFloat = 1.0 - - @objc var enableZoomGesture = false { - didSet { - if enableZoomGesture { - addPinchGestureRecognizer() - } else { - removePinchGestureRecognizer() - } - } - } - - var isRunning: Bool { - return captureSession.isRunning - } - - /// Convenience wrapper to get layer as its statically known type. - var videoPreviewLayer: AVCaptureVideoPreviewLayer { - // swiftlint:disable force_cast - return layer as! AVCaptureVideoPreviewLayer - } - internal final func setTorchMode(_ torchMode: String) { guard let device = videoDeviceInput?.device else { return invokeOnError(.session(.cameraNotReady)) @@ -260,5 +261,4 @@ final class CameraView: UIView { guard let onInitialized = self.onInitialized else { return } onInitialized([String: Any]()) } - } diff --git a/ios/CameraViewManager.swift b/ios/CameraViewManager.swift index 10b8ab2..16a33b9 100644 --- a/ios/CameraViewManager.swift +++ b/ios/CameraViewManager.swift @@ -11,7 +11,11 @@ import Foundation @objc(CameraViewManager) final class CameraViewManager: RCTViewManager { - // MARK: Lifecycle + // MARK: Internal + + override var methodQueue: DispatchQueue! { + return DispatchQueue.main + } override static func requiresMainQueueSetup() -> Bool { return true @@ -22,17 +26,6 @@ final class CameraViewManager: RCTViewManager { return CameraView() } - private func getCameraView(withTag tag: NSNumber) -> CameraView { - // swiftlint:disable force_cast - return bridge.uiManager.view(forReactTag: tag) as! CameraView - } - - // MARK: Internal - - override var methodQueue: DispatchQueue! { - return DispatchQueue.main - } - // pragma MARK: Exported Functions @objc final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) { @@ -154,6 +147,11 @@ final class CameraViewManager: RCTViewManager { // MARK: Private + private func getCameraView(withTag tag: NSNumber) -> CameraView { + // swiftlint:disable force_cast + return bridge.uiManager.view(forReactTag: tag) as! CameraView + } + private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] { var deviceTypes: [AVCaptureDevice.DeviceType] = [] if #available(iOS 13.0, *) { diff --git a/ios/Extensions/AVAudioSession+trySetAllowHaptics.swift b/ios/Extensions/AVAudioSession+trySetAllowHaptics.swift index 23750c0..13677aa 100644 --- a/ios/Extensions/AVAudioSession+trySetAllowHaptics.swift +++ b/ios/Extensions/AVAudioSession+trySetAllowHaptics.swift @@ -6,8 +6,8 @@ // Copyright © 2021 Facebook. All rights reserved. // -import Foundation import AVFoundation +import Foundation extension AVAudioSession { /** @@ -16,7 +16,7 @@ extension AVAudioSession { func trySetAllowHaptics(_ allowHaptics: Bool) { if #available(iOS 13.0, *) { if !self.allowHapticsAndSystemSoundsDuringRecording { - try? self.setAllowHapticsAndSystemSoundsDuringRecording(true) + try? self.setAllowHapticsAndSystemSoundsDuringRecording(allowHaptics) } } }