Refactor lifecycle vars
This commit is contained in:
@@ -6,8 +6,6 @@
|
||||
// Copyright © 2020 Facebook. All rights reserved.
|
||||
//
|
||||
|
||||
// swiftlint:disable file_length
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
@@ -39,6 +37,7 @@ private let propsThatRequireDeviceReconfiguration = ["fps",
|
||||
final class CameraView: UIView {
|
||||
// MARK: Lifecycle
|
||||
|
||||
// pragma MARK: Setup
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
videoPreviewLayer.session = captureSession
|
||||
@@ -69,6 +68,83 @@ final class CameraView: UIView {
|
||||
fatalError("init(coder:) is not implemented.")
|
||||
}
|
||||
|
||||
// MARK: Internal
|
||||
|
||||
override class var layerClass: AnyClass {
|
||||
return AVCaptureVideoPreviewLayer.self
|
||||
}
|
||||
|
||||
// pragma MARK: Exported Properties
|
||||
// props that require reconfiguring
|
||||
@objc var cameraId: NSString?
|
||||
@objc var enableDepthData = false
|
||||
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
|
||||
@objc var enablePortraitEffectsMatteDelivery = false
|
||||
@objc var preset: String?
|
||||
@objc var scannableCodes: [String]?
|
||||
// props that require format reconfiguring
|
||||
@objc var format: NSDictionary?
|
||||
@objc var fps: NSNumber?
|
||||
@objc var hdr: NSNumber? // nullable bool
|
||||
@objc var lowLightBoost: NSNumber? // nullable bool
|
||||
@objc var colorSpace: NSString?
|
||||
// other props
|
||||
@objc var isActive = false
|
||||
@objc var torch = "off"
|
||||
@objc var zoom: NSNumber = 0.0 // in percent
|
||||
// events
|
||||
@objc var onInitialized: RCTDirectEventBlock?
|
||||
@objc var onError: RCTDirectEventBlock?
|
||||
@objc var onCodeScanned: RCTBubblingEventBlock?
|
||||
|
||||
// pragma MARK: Private Properties
|
||||
internal var isReady = false
|
||||
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
|
||||
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
||||
// Capture Session
|
||||
internal let captureSession = AVCaptureSession()
|
||||
// Inputs
|
||||
internal var videoDeviceInput: AVCaptureDeviceInput?
|
||||
internal var audioDeviceInput: AVCaptureDeviceInput?
|
||||
// Outputs
|
||||
internal var photoOutput: AVCapturePhotoOutput?
|
||||
internal var movieOutput: AVCaptureMovieFileOutput?
|
||||
internal var metadataOutput: AVCaptureMetadataOutput?
|
||||
// CameraView+TakePhoto
|
||||
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
|
||||
// CameraView+RecordVideo
|
||||
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
|
||||
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
|
||||
// CameraView+Zoom
|
||||
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
|
||||
internal var pinchScaleOffset: CGFloat = 1.0
|
||||
|
||||
@objc var enableZoomGesture = false {
|
||||
didSet {
|
||||
if enableZoomGesture {
|
||||
addPinchGestureRecognizer()
|
||||
} else {
|
||||
removePinchGestureRecognizer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isRunning: Bool {
|
||||
return captureSession.isRunning
|
||||
}
|
||||
|
||||
/// Convenience wrapper to get layer as its statically known type.
|
||||
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
|
||||
// swiftlint:disable force_cast
|
||||
return layer as! AVCaptureVideoPreviewLayer
|
||||
}
|
||||
|
||||
override func removeFromSuperview() {
|
||||
ReactLogger.log(level: .info, message: "Removing Camera View...")
|
||||
captureSession.stopRunning()
|
||||
super.removeFromSuperview()
|
||||
}
|
||||
|
||||
// pragma MARK: Props updating
|
||||
override final func didSetProps(_ changedProps: [String]!) {
|
||||
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
|
||||
@@ -124,81 +200,6 @@ final class CameraView: UIView {
|
||||
}
|
||||
}
|
||||
|
||||
override func removeFromSuperview() {
|
||||
ReactLogger.log(level: .info, message: "Removing Camera View...")
|
||||
captureSession.stopRunning()
|
||||
super.removeFromSuperview()
|
||||
}
|
||||
|
||||
// MARK: Internal
|
||||
|
||||
// pragma MARK: Setup
|
||||
override class var layerClass: AnyClass {
|
||||
return AVCaptureVideoPreviewLayer.self
|
||||
}
|
||||
|
||||
internal let captureSession = AVCaptureSession()
|
||||
|
||||
// pragma MARK: Exported Properties
|
||||
// props that require reconfiguring
|
||||
@objc var cameraId: NSString?
|
||||
@objc var enableDepthData = false
|
||||
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
|
||||
@objc var enablePortraitEffectsMatteDelivery = false
|
||||
@objc var preset: String?
|
||||
@objc var scannableCodes: [String]?
|
||||
// props that require format reconfiguring
|
||||
@objc var format: NSDictionary?
|
||||
@objc var fps: NSNumber?
|
||||
@objc var hdr: NSNumber? // nullable bool
|
||||
@objc var lowLightBoost: NSNumber? // nullable bool
|
||||
@objc var colorSpace: NSString?
|
||||
// other props
|
||||
@objc var isActive = false
|
||||
@objc var torch = "off"
|
||||
@objc var zoom: NSNumber = 0.0 // in percent
|
||||
// events
|
||||
@objc var onInitialized: RCTDirectEventBlock?
|
||||
@objc var onError: RCTDirectEventBlock?
|
||||
@objc var onCodeScanned: RCTBubblingEventBlock?
|
||||
var isReady = false
|
||||
// pragma MARK: Private Properties
|
||||
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
|
||||
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
||||
internal var videoDeviceInput: AVCaptureDeviceInput?
|
||||
internal var audioDeviceInput: AVCaptureDeviceInput?
|
||||
internal var photoOutput: AVCapturePhotoOutput?
|
||||
internal var movieOutput: AVCaptureMovieFileOutput?
|
||||
internal var metadataOutput: AVCaptureMetadataOutput?
|
||||
// CameraView+TakePhoto
|
||||
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
|
||||
// CameraView+RecordVideo
|
||||
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
|
||||
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
|
||||
// CameraView+Zoom
|
||||
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
|
||||
internal var pinchScaleOffset: CGFloat = 1.0
|
||||
|
||||
@objc var enableZoomGesture = false {
|
||||
didSet {
|
||||
if enableZoomGesture {
|
||||
addPinchGestureRecognizer()
|
||||
} else {
|
||||
removePinchGestureRecognizer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isRunning: Bool {
|
||||
return captureSession.isRunning
|
||||
}
|
||||
|
||||
/// Convenience wrapper to get layer as its statically known type.
|
||||
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
|
||||
// swiftlint:disable force_cast
|
||||
return layer as! AVCaptureVideoPreviewLayer
|
||||
}
|
||||
|
||||
internal final func setTorchMode(_ torchMode: String) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
return invokeOnError(.session(.cameraNotReady))
|
||||
@@ -260,5 +261,4 @@ final class CameraView: UIView {
|
||||
guard let onInitialized = self.onInitialized else { return }
|
||||
onInitialized([String: Any]())
|
||||
}
|
||||
|
||||
}
|
||||
|
Reference in New Issue
Block a user