Refactor lifecycle vars

This commit is contained in:
Marc Rousavy 2021-03-26 16:28:08 +01:00
parent 501827cb87
commit b25cf6a04f
6 changed files with 196 additions and 201 deletions

View File

@ -7,7 +7,6 @@
--disable wrapMultilineStatementBraces
--enable organizeDeclarations
--lifecycle didSetProps,requiresMainQueueSetup,view,methodQueue,getCameraView,removeFromSuperview
--enable markTypes

View File

@ -6,52 +6,17 @@
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
import AVFoundation
import Foundation
/**
Extension for CameraView that sets up the AVAudioSession.
*/
extension CameraView {
@objc
func audioSessionInterrupted(notification: Notification) {
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
// TODO: Should we also disable the camera here? I think it will throw a runtime error
// disable audio session
try? AVAudioSession.sharedInstance().setActive(false)
break
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// restart audio session because interruption is over
configureAudioSession()
} else {
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
}
break
}
}
private final func setAutomaticallyConfiguresAudioSession(_ automaticallyConfiguresAudioSession: Bool) {
if captureSession.automaticallyConfiguresApplicationAudioSession != automaticallyConfiguresAudioSession {
captureSession.beginConfiguration()
captureSession.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresAudioSession
captureSession.commitConfiguration()
}
}
/**
Configures the Audio session to allow background-music playback while recording.
*/
internal final func configureAudioSession() {
final func configureAudioSession() {
let start = DispatchTime.now()
do {
setAutomaticallyConfiguresAudioSession(false)
@ -72,4 +37,38 @@ extension CameraView {
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
}
private final func setAutomaticallyConfiguresAudioSession(_ automaticallyConfiguresAudioSession: Bool) {
if captureSession.automaticallyConfiguresApplicationAudioSession != automaticallyConfiguresAudioSession {
captureSession.beginConfiguration()
captureSession.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresAudioSession
captureSession.commitConfiguration()
}
}
@objc
func audioSessionInterrupted(notification: Notification) {
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
// TODO: Should we also disable the camera here? I think it will throw a runtime error
// disable audio session
try? AVAudioSession.sharedInstance().setActive(false)
case .ended:
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// restart audio session because interruption is over
configureAudioSession()
} else {
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
}
@unknown default: ()
}
}
}

View File

@ -6,35 +6,17 @@
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
import AVFoundation
import Foundation
/**
Extension for CameraView that sets up the AVCaptureSession, Device and Format.
*/
extension CameraView {
@objc
func sessionRuntimeError(notification: Notification) {
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError)
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
}
/**
Configures the Capture Session.
*/
internal final func configureCaptureSession() {
final func configureCaptureSession() {
ReactLogger.logJS(level: .info, message: "Configuring Session...")
isReady = false
@ -176,44 +158,10 @@ extension CameraView {
ReactLogger.logJS(level: .info, message: "Session successfully configured!")
}
/**
Configures the Video Device to find the best matching Format.
*/
internal final func configureFormat() {
ReactLogger.logJS(level: .info, message: "Configuring Format...")
guard let filter = self.format else {
// Format Filter was null. Ignore it.
return
}
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
if device.activeFormat.matchesFilter(filter) {
ReactLogger.log(level: .info, message: "Active format already matches filter.")
return
}
// get matching format
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
guard let format = matchingFormats.first else {
return invokeOnError(.format(.invalidFormat))
}
do {
try device.lockForConfiguration()
device.activeFormat = format
device.unlockForConfiguration()
ReactLogger.logJS(level: .info, message: "Format successfully configured!")
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
/**
Configures the Video Device with the given FPS, HDR and ColorSpace.
*/
internal final func configureDevice() {
final func configureDevice() {
ReactLogger.logJS(level: .info, message: "Configuring Device...")
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
@ -258,4 +206,55 @@ extension CameraView {
return invokeOnError(.device(.configureError), cause: error)
}
}
/**
Configures the Video Device to find the best matching Format.
*/
final func configureFormat() {
ReactLogger.logJS(level: .info, message: "Configuring Format...")
guard let filter = self.format else {
// Format Filter was null. Ignore it.
return
}
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
if device.activeFormat.matchesFilter(filter) {
ReactLogger.log(level: .info, message: "Active format already matches filter.")
return
}
// get matching format
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
guard let format = matchingFormats.first else {
return invokeOnError(.format(.invalidFormat))
}
do {
try device.lockForConfiguration()
device.activeFormat = format
device.unlockForConfiguration()
ReactLogger.logJS(level: .info, message: "Format successfully configured!")
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
@objc
func sessionRuntimeError(notification: Notification) {
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError)
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
}
}

View File

@ -6,8 +6,6 @@
// Copyright © 2020 Facebook. All rights reserved.
//
// swiftlint:disable file_length
import AVFoundation
import Foundation
import UIKit
@ -39,6 +37,7 @@ private let propsThatRequireDeviceReconfiguration = ["fps",
final class CameraView: UIView {
// MARK: Lifecycle
// pragma MARK: Setup
override init(frame: CGRect) {
super.init(frame: frame)
videoPreviewLayer.session = captureSession
@ -69,6 +68,83 @@ final class CameraView: UIView {
fatalError("init(coder:) is not implemented.")
}
// MARK: Internal
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
// pragma MARK: Private Properties
internal var isReady = false
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
// Capture Session
internal let captureSession = AVCaptureSession()
// Inputs
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
// Outputs
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
@objc var enableZoomGesture = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isRunning: Bool {
return captureSession.isRunning
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override func removeFromSuperview() {
ReactLogger.log(level: .info, message: "Removing Camera View...")
captureSession.stopRunning()
super.removeFromSuperview()
}
// pragma MARK: Props updating
override final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
@ -124,81 +200,6 @@ final class CameraView: UIView {
}
}
override func removeFromSuperview() {
ReactLogger.log(level: .info, message: "Removing Camera View...")
captureSession.stopRunning()
super.removeFromSuperview()
}
// MARK: Internal
// pragma MARK: Setup
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
internal let captureSession = AVCaptureSession()
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
var isReady = false
// pragma MARK: Private Properties
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
@objc var enableZoomGesture = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isRunning: Bool {
return captureSession.isRunning
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
@ -260,5 +261,4 @@ final class CameraView: UIView {
guard let onInitialized = self.onInitialized else { return }
onInitialized([String: Any]())
}
}

View File

@ -11,7 +11,11 @@ import Foundation
@objc(CameraViewManager)
final class CameraViewManager: RCTViewManager {
// MARK: Lifecycle
// MARK: Internal
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
override static func requiresMainQueueSetup() -> Bool {
return true
@ -22,17 +26,6 @@ final class CameraViewManager: RCTViewManager {
return CameraView()
}
private func getCameraView(withTag tag: NSNumber) -> CameraView {
// swiftlint:disable force_cast
return bridge.uiManager.view(forReactTag: tag) as! CameraView
}
// MARK: Internal
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
// pragma MARK: Exported Functions
@objc
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
@ -154,6 +147,11 @@ final class CameraViewManager: RCTViewManager {
// MARK: Private
private func getCameraView(withTag tag: NSNumber) -> CameraView {
// swiftlint:disable force_cast
return bridge.uiManager.view(forReactTag: tag) as! CameraView
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {

View File

@ -6,8 +6,8 @@
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
import AVFoundation
import Foundation
extension AVAudioSession {
/**
@ -16,7 +16,7 @@ extension AVAudioSession {
func trySetAllowHaptics(_ allowHaptics: Bool) {
if #available(iOS 13.0, *) {
if !self.allowHapticsAndSystemSoundsDuringRecording {
try? self.setAllowHapticsAndSystemSoundsDuringRecording(true)
try? self.setAllowHapticsAndSystemSoundsDuringRecording(allowHaptics)
}
}
}