react-native-vision-camera/package/ios/CameraView.swift
Marc Rousavy 9809075507
feat: Support 10-bit Video HDR (#1827)
* feat: Select 10-bit YUV HDR format if HDR is enabled

* fix: Remove video EDR setting in favor of new 10-bit video HDR

* Format Swift
2023-09-21 16:30:05 +02:00

288 lines
10 KiB
Swift

//
// CameraView.swift
// mrousavy
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
//
// TODOs for the CameraView which are currently too hard to implement either because of AVFoundation's limitations, or my brain capacity
//
// CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
// CameraView+TakePhoto
// TODO: Photo HDR
private let propsThatRequireReconfiguration = ["cameraId",
"enableDepthData",
"enableHighQualityPhotos",
"enablePortraitEffectsMatteDelivery",
"photo",
"video",
"enableFrameProcessor",
"hdr",
"pixelFormat"]
private let propsThatRequireDeviceReconfiguration = ["fps",
"lowLightBoost"]
// MARK: - CameraView
public final class CameraView: UIView {
// pragma MARK: React Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
// use cases
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool
@objc var audio: NSNumber? // nullable bool
@objc var enableFrameProcessor = false
@objc var pixelFormat: NSString?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var orientation: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 1.0 // in "factor"
@objc var enableFpsGraph = false
@objc var videoStabilizationMode: NSString?
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onViewReady: RCTDirectEventBlock?
// zoom
@objc var enableZoomGesture = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
// pragma MARK: Internal Properties
var isMounted = false
var isReady = false
// Capture Session
let captureSession = AVCaptureSession()
let audioCaptureSession = AVCaptureSession()
// Inputs & Outputs
var videoDeviceInput: AVCaptureDeviceInput?
var audioDeviceInput: AVCaptureDeviceInput?
var photoOutput: AVCapturePhotoOutput?
var videoOutput: AVCaptureVideoDataOutput?
var audioOutput: AVCaptureAudioDataOutput?
// CameraView+RecordView (+ Frame Processor)
var isRecording = false
var recordingSession: RecordingSession?
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc public var frameProcessor: FrameProcessor?
#endif
// CameraView+Zoom
var pinchGestureRecognizer: UIPinchGestureRecognizer?
var pinchScaleOffset: CGFloat = 1.0
var previewView: PreviewView
#if DEBUG
var fpsGraph: RCTFPSGraph?
#endif
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
var isRunning: Bool {
return captureSession.isRunning
}
// pragma MARK: Setup
override public init(frame: CGRect) {
previewView = PreviewView(frame: frame, session: captureSession)
super.init(frame: frame)
addSubview(previewView)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: captureSession)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: audioCaptureSession)
NotificationCenter.default.addObserver(self,
selector: #selector(audioSessionInterrupted),
name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance)
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented.")
}
deinit {
NotificationCenter.default.removeObserver(self,
name: .AVCaptureSessionRuntimeError,
object: captureSession)
NotificationCenter.default.removeObserver(self,
name: .AVCaptureSessionRuntimeError,
object: audioCaptureSession)
NotificationCenter.default.removeObserver(self,
name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance)
}
override public func willMove(toSuperview newSuperview: UIView?) {
super.willMove(toSuperview: newSuperview)
if newSuperview != nil {
if !isMounted {
isMounted = true
onViewReady?(nil)
}
}
}
override public func layoutSubviews() {
previewView.frame = frame
previewView.bounds = bounds
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) }
let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format")
let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) }
let shouldReconfigureAudioSession = changedProps.contains("audio")
let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice
let shouldCheckActive = willReconfigure || changedProps.contains("isActive") || captureSession.isRunning != isActive
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
if changedProps.contains("enableFpsGraph") {
DispatchQueue.main.async {
self.setupFpsGraph()
}
}
if shouldReconfigure ||
shouldReconfigureAudioSession ||
shouldCheckActive ||
shouldUpdateTorch ||
shouldUpdateZoom ||
shouldReconfigureFormat ||
shouldReconfigureDevice ||
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
CameraQueues.cameraQueue.async {
// Video Configuration
if shouldReconfigure {
self.configureCaptureSession()
}
if shouldReconfigureFormat {
self.configureFormat()
}
if shouldReconfigureDevice {
self.configureDevice()
}
if shouldUpdateVideoStabilization, let videoStabilizationMode = self.videoStabilizationMode as String? {
self.captureSession.setVideoStabilizationMode(videoStabilizationMode)
}
if shouldUpdateZoom {
let zoomClamped = max(min(CGFloat(self.zoom.doubleValue), self.maxAvailableZoom), self.minAvailableZoom)
self.zoom(factor: zoomClamped, animated: false)
self.pinchScaleOffset = zoomClamped
}
if shouldCheckActive && self.captureSession.isRunning != self.isActive {
if self.isActive {
ReactLogger.log(level: .info, message: "Starting Session...")
self.captureSession.startRunning()
ReactLogger.log(level: .info, message: "Started Session!")
} else {
ReactLogger.log(level: .info, message: "Stopping Session...")
self.captureSession.stopRunning()
ReactLogger.log(level: .info, message: "Stopped Session!")
}
}
if shouldUpdateOrientation {
self.updateOrientation()
}
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
if shouldUpdateTorch {
CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
self.setTorchMode(self.torch)
}
}
}
// Audio Configuration
if shouldReconfigureAudioSession {
CameraQueues.audioQueue.async {
self.configureAudioSession()
}
}
}
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
// pragma MARK: Event Invokers
final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: "Invoking onError(): \(error.message)")
guard let onError = onError else { return }
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = [
"code": cause.code,
"domain": cause.domain,
"message": cause.description,
"details": cause.userInfo,
]
}
onError([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
final func invokeOnInitialized() {
ReactLogger.log(level: .info, message: "Camera initialized!")
guard let onInitialized = onInitialized else { return }
onInitialized([String: Any]())
}
}