This commit is contained in:
Marc Rousavy 2021-02-19 16:28:05 +01:00
parent b2594f3e12
commit 00c8970366
43 changed files with 2656 additions and 43 deletions

3
ios/.swiftformat Normal file
View File

@ -0,0 +1,3 @@
--allman false
--indent 2
--exclude Pods,Generated

20
ios/CameraBridge.h Normal file
View File

@ -0,0 +1,20 @@
//
// CameraBridge.h
// Cuvent
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 Facebook. All rights reserved.
//
#ifndef CameraBridge_h
#define CameraBridge_h
#import <Foundation/Foundation.h>
#import <React/RCTViewManager.h>
#import <React/RCTUIManager.h>
@interface CameraBridge: RCTViewManager
@end
#endif /* CameraBridge_h */

265
ios/CameraError.swift Normal file
View File

@ -0,0 +1,265 @@
//
// CameraError.swift
// Cuvent
//
// Created by Marc Rousavy on 14.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
enum PermissionError: String {
case microphone = "microphone-permission-denied"
case camera = "camera-permission-denied"
var code: String {
return rawValue
}
var message: String {
switch self {
case .microphone:
return "The Microphone permission was denied!"
case .camera:
return "The Camera permission was denied!"
}
}
}
enum ParameterError {
case invalid(unionName: String, receivedValue: String)
case unsupportedOS(unionName: String, receivedValue: String, supportedOnOs: String)
case unsupportedOutput(outputDescriptor: String)
case unsupportedInput(inputDescriptor: String)
case invalidCombination(provided: String, missing: String)
var code: String {
switch self {
case .invalid:
return "invalid-parameter"
case .unsupportedOS:
return "unsupported-os"
case .unsupportedOutput:
return "unsupported-output"
case .unsupportedInput:
return "unsupported-input"
case .invalidCombination:
return "invalid-combination"
}
}
var message: String {
switch self {
case let .invalid(unionName: unionName, receivedValue: receivedValue):
return "The value \"\(receivedValue)\" could not be parsed to type \(unionName)!"
case let .unsupportedOS(unionName: unionName, receivedValue: receivedValue, supportedOnOs: os):
return "The value \"\(receivedValue)\" for type \(unionName) is not supported on the current iOS version! Required OS: \(os) or higher"
case let .unsupportedOutput(outputDescriptor: output):
return "The output \"\(output)\" is not supported!"
case let .unsupportedInput(inputDescriptor: input):
return "The input \"\(input)\" is not supported!"
case let .invalidCombination(provided: provided, missing: missing):
return "Invalid combination! If \"\(provided)\" is provided, \"\(missing)\" also has to be set!"
}
}
}
enum DeviceError: String {
case configureError = "configuration-error"
case noDevice = "no-device"
case invalid = "invalid-device"
case torchUnavailable = "torch-unavailable"
case microphoneUnavailable = "microphone-unavailable"
case lowLightBoostNotSupported = "low-light-boost-not-supported"
case focusNotSupported = "focus-not-supported"
case notAvailableOnSimulator = "camera-not-available-on-simulator"
var code: String {
return rawValue
}
var message: String {
switch self {
case .configureError:
return "Failed to lock the device for configuration."
case .noDevice:
return "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device."
case .invalid:
return "The given Camera device was invalid. Use `getAvailableCameraDevices()` to select a suitable Camera device."
case .torchUnavailable:
return "The current camera device does not have a torch."
case .lowLightBoostNotSupported:
return "The currently selected camera device does not support low-light boost! Make sure you select a device where `supportsLowLightBoost` is true!"
case .focusNotSupported:
return "The currently selected camera device does not support focussing!"
case .microphoneUnavailable:
return "The microphone was unavailable."
case .notAvailableOnSimulator:
return "The Camera is not available on the iOS Simulator!"
}
}
}
enum FormatError {
case invalidFps(fps: Int)
case invalidHdr
case invalidFormat
case invalidPreset(preset: String)
var code: String {
switch self {
case .invalidFormat:
return "invalid-format"
case .invalidFps:
return "invalid-fps"
case .invalidHdr:
return "invalid-hdr"
case .invalidPreset:
return "invalid-preset"
}
}
var message: String {
switch self {
case .invalidFormat:
return "The given format was invalid. Did you check if the current device supports the given format by using `getAvailableCameraDevices(...)`?"
case let .invalidFps(fps):
return "The given FPS were not valid for the currently selected format. Make sure you select a format which `frameRateRanges` includes \(fps) FPS!"
case .invalidHdr:
return "The currently selected format does not support HDR capture! Make sure you select a format which `frameRateRanges` includes `supportsPhotoHDR`!"
case let .invalidPreset(preset):
return "The preset \"\(preset)\" is not available for the current camera device."
}
}
}
enum SessionError: String {
case cameraNotReady = "camera-not-ready"
var code: String {
return rawValue
}
var message: String {
switch self {
case .cameraNotReady:
return "The Camera is not ready yet! Wait for the onInitialized() callback!"
}
}
}
enum CaptureError {
case invalidPhotoFormat
case recordingInProgress
case noRecordingInProgress
case fileError
case createTempFileError
case invalidPhotoCodec
case unknown(message: String? = nil)
var code: String {
switch self {
case .invalidPhotoFormat:
return "invalid-photo-format"
case .recordingInProgress:
return "recording-in-progress"
case .noRecordingInProgress:
return "no-recording-in-progress"
case .fileError:
return "file-io-error"
case .createTempFileError:
return "create-temp-file-error"
case .invalidPhotoCodec:
return "invalid-photo-codec"
case .unknown:
return "unknown"
}
}
var message: String {
switch self {
case .invalidPhotoFormat:
return "The given photo format was invalid!"
case .invalidPhotoCodec:
return "The given photo codec was invalid!"
case .recordingInProgress:
return "There is already an active video recording in progress! Did you call startRecording() twice?"
case .noRecordingInProgress:
return "There was no active video recording in progress! Did you call stopRecording() twice?"
case .fileError:
return "An unexpected File IO error occured!"
case .createTempFileError:
return "Failed to create a temporary file!"
case let .unknown(message: message):
return message ?? "An unknown error occured while capturing a video/photo."
}
}
}
enum SystemError: String {
case noManager = "no-camera-manager"
var code: String {
return rawValue
}
var message: String {
switch self {
case .noManager:
return "No Camera Manager was found."
}
}
}
enum CameraError: Error {
case permission(_ id: PermissionError)
case parameter(_ id: ParameterError)
case device(_ id: DeviceError)
case format(_ id: FormatError)
case session(_ id: SessionError)
case capture(_ id: CaptureError)
case system(_ id: SystemError)
case unknown(message: String? = nil)
var code: String {
switch self {
case let .permission(id: id):
return "permission/\(id.code)"
case let .parameter(id: id):
return "parameter/\(id.code)"
case let .device(id: id):
return "device/\(id.code)"
case let .format(id: id):
return "format/\(id.code)"
case let .session(id: id):
return "session/\(id.code)"
case let .capture(id: id):
return "capture/\(id.code)"
case let .system(id: id):
return "system/\(id.code)"
case .unknown:
return "unknown/unknown"
}
}
var message: String {
switch self {
case let .permission(id: id):
return id.message
case let .parameter(id: id):
return id.message
case let .device(id: id):
return id.message
case let .format(id: id):
return id.message
case let .session(id: id):
return id.message
case let .capture(id: id):
return id.message
case let .system(id: id):
return id.message
case let .unknown(message: message):
return message ?? "An unexpected error occured."
}
}
}

View File

@ -0,0 +1,45 @@
//
// CameraView+CodeScanning.swift
// Cuvent
//
// Created by Marc Rousavy on 16.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
extension CameraView: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from _: AVCaptureConnection) {
guard metadataObjects.count > 0 else {
return
}
let objects = metadataObjects.map { (object) -> [String: Any]? in
guard let object = object as? AVMetadataMachineReadableCodeObject else {
return nil
}
return [
"code": object.stringValue as Any,
"type": object.type.descriptor,
"bounds": [
"minX": object.bounds.minX,
"minY": object.bounds.minY,
"maxX": object.bounds.maxX,
"maxY": object.bounds.maxY,
"width": object.bounds.width,
"height": object.bounds.height,
],
]
}
invokeOnCodeScanned(codes: objects)
}
private func invokeOnCodeScanned(codes: [[String: Any]?]) {
guard let onCodeScanned = self.onCodeScanned else {
ReactLogger.log(level: .warning, message: "onCodeScanned was invoked with no listeners. This means that the Camera is unnecessarily scanning codes. This indicates a memory leak.", alsoLogToJS: true)
return
}
onCodeScanned(["codes": codes])
}
}

View File

@ -0,0 +1,42 @@
//
// CameraView+focus.swift
// Cuvent
//
// Created by Marc Rousavy on 19.02.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
extension CameraView {
func focus(point: CGPoint, promise: Promise) {
withPromise(promise) {
guard let device = self.videoDeviceInput?.device else {
throw CameraError.session(SessionError.cameraNotReady)
}
if !device.isFocusPointOfInterestSupported {
throw CameraError.device(DeviceError.focusNotSupported)
}
let normalizedPoint = self.videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: point)
do {
try device.lockForConfiguration()
device.focusPointOfInterest = normalizedPoint
device.focusMode = .continuousAutoFocus
if device.isExposurePointOfInterestSupported {
device.exposurePointOfInterest = normalizedPoint
device.exposureMode = .continuousAutoExposure
}
device.unlockForConfiguration()
return nil
} catch {
throw CameraError.device(DeviceError.configureError)
}
}
}
}

View File

@ -0,0 +1,55 @@
//
// CameraView+RecordVideo.swift
// Cuvent
//
// Created by Marc Rousavy on 16.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension CameraView {
func startRecording(options: NSDictionary, callback: @escaping RCTResponseSenderBlock) {
queue.async {
guard let movieOutput = self.movieOutput else {
return callback([NSNull(), makeReactError(.session(.cameraNotReady))])
}
if movieOutput.isRecording {
return callback([NSNull(), makeReactError(.capture(.recordingInProgress))])
}
let errorPointer = ErrorPointer(nilLiteral: ())
guard let tempFilePath = RCTTempFilePath("mov", errorPointer) else {
return callback([NSNull(), makeReactError(.capture(.createTempFileError), cause: errorPointer?.pointee)])
}
let tempURL = URL(string: "file://\(tempFilePath)")!
if let flashMode = options["flash"] as? String {
// use the torch as the video's flash
self.setTorchMode(flashMode)
}
movieOutput.startRecording(to: tempURL, recordingDelegate: RecordingDelegateWithCallback(callback: callback, resetTorchMode: {
// reset torch in case it was used as the video's "flash"
self.setTorchMode(self.torch)
}))
// TODO: The startRecording() func cannot be async because RN doesn't allow both a callback and a Promise in a single function. Wait for TurboModules?
// return ["path": tempFilePath]
}
}
func stopRecording(promise: Promise) {
queue.async {
withPromise(promise) {
guard let movieOutput = self.movieOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
if !movieOutput.isRecording {
throw CameraError.capture(CaptureError.noRecordingInProgress)
}
movieOutput.stopRecording()
return nil
}
}
}
}

View File

@ -0,0 +1,83 @@
//
// CameraView+TakePhoto.swift
// Cuvent
//
// Created by Marc Rousavy on 16.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
struct TakePhotoOptions {
var videoCodec: AVVideoCodecType?
var qualityPrioritization: String?
init(fromDictionary dictionary: NSDictionary) {
if let videoCodec = dictionary.value(forKey: "videoCodec") as? String {
self.videoCodec = AVVideoCodecType(withString: videoCodec)
}
qualityPrioritization = dictionary.value(forKey: "qualityPrioritization") as? String
}
}
extension CameraView {
func takePhoto(options: NSDictionary, promise: Promise) {
queue.async {
guard let photoOutput = self.photoOutput, let videoDeviceInput = self.videoDeviceInput else {
return promise.reject(error: .session(.cameraNotReady))
}
var photoSettings = AVCapturePhotoSettings()
if let photoCodecString = options["photoCodec"] as? String {
guard let photoCodec = AVVideoCodecType(withString: photoCodecString) else {
return promise.reject(error: .capture(.invalidPhotoCodec))
}
if photoOutput.availablePhotoCodecTypes.contains(photoCodec) {
photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: photoCodec])
} else {
return promise.reject(error: .parameter(.invalid(unionName: "PhotoCodec", receivedValue: photoCodecString)))
}
}
if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String {
guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else {
return promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash)))
}
photoSettings.flashMode = flashMode
}
photoSettings.isHighResolutionPhotoEnabled = photoOutput.isHighResolutionCaptureEnabled
if !photoSettings.__availablePreviewPhotoPixelFormatTypes.isEmpty {
photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: photoSettings.__availablePreviewPhotoPixelFormatTypes.first!]
}
photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled
photoSettings.embedsDepthDataInPhoto = photoSettings.isDepthDataDeliveryEnabled
if #available(iOS 12.0, *) {
photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled
photoSettings.embedsPortraitEffectsMatteInPhoto = photoSettings.isPortraitEffectsMatteDeliveryEnabled
}
if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String {
guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else {
return promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization)))
}
photoSettings.photoQualityPrioritization = photoQualityPrioritization
}
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
}
if let enableVirtualDeviceFusion = options["enableVirtualDeviceFusion"] as? Bool {
if #available(iOS 13.0, *) {
photoSettings.isAutoVirtualDeviceFusionEnabled = enableVirtualDeviceFusion
} else {
photoSettings.isAutoDualCameraFusionEnabled = enableVirtualDeviceFusion
}
}
if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool {
photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization
}
if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool {
photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection
}
photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise))
}
}
}

73
ios/CameraView+Zoom.swift Normal file
View File

@ -0,0 +1,73 @@
//
// CameraView+Zoom.swift
// Cuvent
//
// Created by Marc Rousavy on 18.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import Foundation
extension CameraView {
var minAvailableZoom: CGFloat {
return videoDeviceInput?.device.minAvailableVideoZoomFactor ?? 1
}
var maxAvailableZoom: CGFloat {
return videoDeviceInput?.device.activeFormat.videoMaxZoomFactor ?? 1
}
@objc
final func onPinch(_ gesture: UIPinchGestureRecognizer) {
guard let device = videoDeviceInput?.device else {
return
}
let scale = max(min(gesture.scale * pinchScaleOffset, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0))
if gesture.state == .ended {
pinchScaleOffset = scale
return
}
do {
try device.lockForConfiguration()
device.videoZoomFactor = scale
device.unlockForConfiguration()
} catch {
invokeOnError(.device(.configureError))
}
}
func addPinchGestureRecognizer() {
removePinchGestureRecognizer()
pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(onPinch(_:)))
addGestureRecognizer(pinchGestureRecognizer!)
}
func removePinchGestureRecognizer() {
if let pinchGestureRecognizer = self.pinchGestureRecognizer {
removeGestureRecognizer(pinchGestureRecognizer)
self.pinchGestureRecognizer = nil
}
}
@objc
final func zoom(factor: CGFloat, animated: Bool) {
guard let device = videoDeviceInput?.device else {
return
}
do {
try device.lockForConfiguration()
let clamped = max(min(factor, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0))
if animated {
device.ramp(toVideoZoomFactor: clamped, withRate: 1)
} else {
device.videoZoomFactor = clamped
}
device.unlockForConfiguration()
} catch {
invokeOnError(.device(.configureError))
}
}
}

469
ios/CameraView.swift Normal file
View File

@ -0,0 +1,469 @@
//
// CameraView.swift
// Cuvent
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
//
// TODOs for the CameraView which are currently too hard to implement either because of AVFoundation's limitations, or my brain capacity
//
// CameraView
// TODO: enableSmoothAutoFocus
// TODO: enableLowLightBoost
// TODO: focus(x, y)
// CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
// TODO: videoStabilizationMode
// CameraView+TakePhoto
// TODO: Photo HDR
private let propsThatRequireReconfiguration = ["cameraId", "enableDepthData", "enableHighResolutionCapture", "enablePortraitEffectsMatteDelivery", "preset", "onCodeScanned", "scannableCodes"]
private let propsThatRequireDeviceReconfiguration = ["fps", "hdr", "lowLightBoost", "colorSpace"]
final class CameraView: UIView {
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
@objc var enableZoomGesture: Bool = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isReady: Bool = false
var isRunning: Bool {
return captureSession.isRunning
}
// pragma MARK: Private Properties
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
private let captureSession = AVCaptureSession()
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
// pragma MARK: Setup
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
override init(frame: CGRect) {
super.init(frame: frame)
videoPreviewLayer.session = captureSession
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.frame = layer.bounds
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: captureSession)
}
deinit {
NotificationCenter.default.removeObserver(self,
name: .AVCaptureSessionRuntimeError,
object: captureSession)
}
override func removeFromSuperview() {
captureSession.stopRunning()
super.removeFromSuperview()
}
@objc
func sessionRuntimeError(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
invokeOnError(.unknown(message: error.localizedDescription), cause: error as NSError)
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented.")
}
// pragma MARK: Props updating
override final func didSetProps(_ changedProps: [String]!) {
let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) }
let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format")
let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) }
let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice
let shouldCheckActive = willReconfigure || changedProps.contains("isActive") || captureSession.isRunning != isActive
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
if shouldReconfigure || shouldCheckActive || shouldUpdateTorch || shouldUpdateZoom || shouldReconfigureFormat || shouldReconfigureDevice {
queue.async {
if shouldReconfigure {
self.configureCaptureSession()
}
if shouldReconfigureFormat {
self.configureFormat()
}
if shouldReconfigureDevice {
self.configureDevice()
}
if shouldUpdateZoom {
let zoomPercent = CGFloat(max(min(self.zoom.doubleValue, 1.0), 0.0))
let zoomScaled = (zoomPercent * (self.maxAvailableZoom - self.minAvailableZoom)) + self.minAvailableZoom
self.zoom(factor: zoomScaled, animated: false)
self.pinchScaleOffset = zoomScaled
}
if shouldCheckActive && self.captureSession.isRunning != self.isActive {
if self.isActive {
self.captureSession.startRunning()
} else {
self.captureSession.stopRunning()
}
}
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
self.queue.asyncAfter(deadline: .now() + 0.1) {
if shouldUpdateTorch {
self.setTorchMode(self.torch)
}
}
}
}
}
// pragma MARK: Session, Device and Format Configuration
/**
Configures the Capture Session.
*/
private final func configureCaptureSession() {
isReady = false
#if targetEnvironment(simulator)
return invokeOnError(.device(.notAvailableOnSimulator))
#endif
guard cameraId != nil else {
return invokeOnError(.device(.noDevice))
}
let cameraId = self.cameraId! as String
ReactLogger.log(level: .info, message: "Initializing Camera with device \(cameraId)...")
captureSession.beginConfiguration()
defer {
captureSession.commitConfiguration()
}
if let preset = self.preset {
var sessionPreset: AVCaptureSession.Preset?
do {
sessionPreset = try AVCaptureSession.Preset(withString: preset)
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
return invokeOnError(.parameter(.unsupportedOS(unionName: "Preset", receivedValue: preset, supportedOnOs: os)))
} catch {
return invokeOnError(.parameter(.invalid(unionName: "Preset", receivedValue: preset)))
}
if sessionPreset != nil {
if captureSession.canSetSessionPreset(sessionPreset!) {
captureSession.sessionPreset = sessionPreset!
} else {
// non-fatal error, so continue with configuration
invokeOnError(.format(.invalidPreset(preset: preset)))
}
}
}
// INPUTS
// Video Input
do {
if let videoDeviceInput = self.videoDeviceInput {
captureSession.removeInput(videoDeviceInput)
}
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
return invokeOnError(.device(.invalid))
}
zoom = NSNumber(value: Double(videoDevice.neutralZoomPercent))
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
guard captureSession.canAddInput(videoDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "video-input")))
}
captureSession.addInput(videoDeviceInput!)
} catch {
return invokeOnError(.device(.invalid))
}
// Microphone (Audio Input)
do {
if let audioDeviceInput = self.audioDeviceInput {
captureSession.removeInput(audioDeviceInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard captureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
captureSession.addInput(audioDeviceInput!)
} catch {
return invokeOnError(.device(.invalid))
}
// OUTPUTS
if let photoOutput = self.photoOutput {
captureSession.removeOutput(photoOutput)
}
// Photo Output
photoOutput = AVCapturePhotoOutput()
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture
}
if #available(iOS 12.0, *) {
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery
}
guard captureSession.canAddOutput(photoOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
}
captureSession.addOutput(photoOutput!)
if videoDeviceInput!.device.position == .front {
photoOutput!.mirror()
}
// Video Output
if let movieOutput = self.movieOutput {
captureSession.removeOutput(movieOutput)
}
movieOutput = AVCaptureMovieFileOutput()
guard captureSession.canAddOutput(movieOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "movie-output")))
}
captureSession.addOutput(movieOutput!)
if videoDeviceInput!.device.position == .front {
movieOutput!.mirror()
}
// Barcode Scanning
if let metadataOutput = self.metadataOutput {
captureSession.removeOutput(metadataOutput)
}
if let scannableCodes = self.scannableCodes {
// scannableCodes prop is not nil, so enable barcode scanning.
guard onCodeScanned != nil else {
return invokeOnError(.parameter(.invalidCombination(provided: "scannableCodes", missing: "onCodeScanned")))
}
metadataOutput = AVCaptureMetadataOutput()
guard captureSession.canAddOutput(metadataOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "metadata-output")))
}
captureSession.addOutput(metadataOutput!)
metadataOutput!.setMetadataObjectsDelegate(self, queue: queue)
var objectTypes: [AVMetadataObject.ObjectType] = []
scannableCodes.forEach { code in
do {
objectTypes.append(try AVMetadataObject.ObjectType(withString: code))
} catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
invokeOnError(.parameter(.unsupportedOS(unionName: "CodeType", receivedValue: code, supportedOnOs: os)))
} catch {
invokeOnError(.parameter(.invalid(unionName: "CodeType", receivedValue: code)))
}
}
metadataOutput!.metadataObjectTypes = objectTypes
}
ReactLogger.log(level: .info, message: "Camera initialized!")
invokeOnInitialized()
isReady = true
}
/**
Configures the Video Device to find the best matching Format.
*/
private final func configureFormat() {
guard let filter = self.format else {
// Format Filter was null. Ignore it.
return
}
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
if device.activeFormat.matchesFilter(filter) {
ReactLogger.log(level: .info, message: "Active format already matches filter.")
return
}
// get matching format
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
guard let format = matchingFormats.first else {
return invokeOnError(.format(.invalidFormat))
}
do {
try device.lockForConfiguration()
device.activeFormat = format
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
/**
Configures the Video Device with the given FPS, HDR and ColorSpace.
*/
private final func configureDevice() {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
do {
try device.lockForConfiguration()
if let fps = self.fps?.int32Value {
let duration = CMTimeMake(value: 1, timescale: fps)
device.activeVideoMinFrameDuration = duration
device.activeVideoMaxFrameDuration = duration
} else {
device.activeVideoMinFrameDuration = CMTime.invalid
device.activeVideoMaxFrameDuration = CMTime.invalid
}
if hdr != nil {
if hdr == true && !device.activeFormat.isVideoHDRSupported {
return invokeOnError(.format(.invalidHdr))
}
if !device.automaticallyAdjustsVideoHDREnabled {
if device.isVideoHDREnabled != hdr!.boolValue {
device.isVideoHDREnabled = hdr!.boolValue
}
}
}
if lowLightBoost != nil {
if lowLightBoost == true && !device.isLowLightBoostSupported {
return invokeOnError(.device(.lowLightBoostNotSupported))
}
if device.automaticallyEnablesLowLightBoostWhenAvailable != lowLightBoost!.boolValue {
device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue
}
}
if colorSpace != nil, let avColorSpace = try? AVCaptureColorSpace(string: String(colorSpace!)) {
device.activeColorSpace = avColorSpace
}
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
return invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
return invokeOnError(.device(.torchUnavailable))
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
// pragma MARK: Event Invokers
internal final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: error.localizedDescription, alsoLogToJS: true)
guard let onError = self.onError else { return }
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = ["message": cause.localizedDescription, "details": cause.userInfo]
}
onError([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
internal final func invokeOnInitialized() {
ReactLogger.log(level: .info, message: "Camera onInitialized()", alsoLogToJS: true)
guard let onInitialized = self.onInitialized else { return }
onInitialized([String: Any]())
}
}

54
ios/CameraViewManager.m Normal file
View File

@ -0,0 +1,54 @@
//
// CameraViewManager.m
// Cuvent
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 Facebook. All rights reserved.
//
#import "CameraBridge.h"
#import <React/RCTViewManager.h>
@interface RCT_EXTERN_REMAP_MODULE(CameraView, CameraViewManager, RCTViewManager)
// Module Functions
RCT_EXTERN_METHOD(getCameraPermissionStatus:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(getMicrophonePermissionStatus:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(requestCameraPermission:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(requestMicrophonePermission:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(getAvailableCameraDevices:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
// Camera View Properties
RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL);
RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString);
RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableHighResolutionCapture, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
// device format
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(hdr, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(colorSpace, NSString);
// other props
RCT_EXPORT_VIEW_PROPERTY(preset, NSString);
RCT_EXPORT_VIEW_PROPERTY(scannableCodes, NSArray<NSString>);
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
// Camera View Properties
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTBubblingEventBlock);
// Camera View Functions
RCT_EXTERN_METHOD(startRecording:(nonnull NSNumber *)node options:(NSDictionary *)options onRecordCallback:(RCTResponseSenderBlock)onRecordCallback);
RCT_EXTERN_METHOD(stopRecording:(nonnull NSNumber *)node resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(takePhoto:(nonnull NSNumber *)node options:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(focus:(nonnull NSNumber *)node point:(NSDictionary *)point resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(getAvailableVideoCodecs:(nonnull NSNumber *)node resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(getAvailablePhotoCodecs:(nonnull NSNumber *)node resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
@end

161
ios/CameraViewManager.swift Normal file
View File

@ -0,0 +1,161 @@
//
// CameraViewManager.swift
// Cuvent
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
@objc(CameraViewManager)
final class CameraViewManager: RCTViewManager {
// pragma MARK: Setup
override final func view() -> UIView! {
return CameraView()
}
override static func requiresMainQueueSetup() -> Bool {
return true
}
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
// pragma MARK: Exported Functions
@objc
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
let component = bridge.uiManager.view(
forReactTag: node
) as! CameraView
component.startRecording(options: options, callback: onRecordCallback)
}
@objc
final func stopRecording(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = bridge.uiManager.view(
forReactTag: node
) as! CameraView
component.stopRecording(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func takePhoto(_ node: NSNumber, options: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = bridge.uiManager.view(
forReactTag: node
) as! CameraView
component.takePhoto(options: options, promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func focus(_ node: NSNumber, point: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let promise = Promise(resolver: resolve, rejecter: reject)
guard let x = point["x"] as? NSNumber, let y = point["y"] as? NSNumber else {
return promise.reject(error: .parameter(.invalid(unionName: "point", receivedValue: point.description)))
}
let component = bridge.uiManager.view(
forReactTag: node
) as! CameraView
component.focus(point: CGPoint(x: x.doubleValue, y: y.doubleValue), promise: promise)
}
@objc
final func getAvailableVideoCodecs(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let component = self.bridge.uiManager.view(forReactTag: node) as! CameraView
guard let movieOutput = component.movieOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
return movieOutput.availableVideoCodecTypes.map { $0.descriptor }
}
}
@objc
final func getAvailablePhotoCodecs(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let component = self.bridge.uiManager.view(forReactTag: node) as! CameraView
guard let photoOutput = component.photoOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
return photoOutput.availablePhotoCodecTypes.map { $0.descriptor }
}
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
// pragma MARK: View Manager funcs
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(), mediaType: .video, position: .unspecified)
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map { $0.deviceType.descriptor },
"position": $0.position.descriptor,
"name": $0.localizedName,
"hasFlash": $0.hasFlash,
"hasTorch": $0.hasTorch,
"minZoom": $0.minAvailableVideoZoomFactor,
"maxZoom": $0.maxAvailableVideoZoomFactor,
"neutralZoom": $0.neutralZoomPercent,
"isMultiCam": $0.isMultiCam,
"supportsDepthCapture": false, // TODO: supportsDepthCapture
"supportsRawCapture": false, // TODO: supportsRawCapture
"supportsLowLightBoost": $0.isLowLightBoostSupported,
"formats": $0.formats.map { (format) -> [String: Any] in
format.toDictionary()
},
]
}
}
}
@objc
final func getCameraPermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let status = AVCaptureDevice.authorizationStatus(for: .video)
return status.descriptor
}
}
@objc
final func getMicrophonePermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let status = AVCaptureDevice.authorizationStatus(for: .audio)
return status.descriptor
}
}
@objc
final func requestCameraPermission(_ resolve: @escaping RCTPromiseResolveBlock, reject _: @escaping RCTPromiseRejectBlock) {
AVCaptureDevice.requestAccess(for: .video) { granted in
let result: AVAuthorizationStatus = granted ? .authorized : .denied
resolve(result.descriptor)
}
}
@objc
final func requestMicrophonePermission(_ resolve: @escaping RCTPromiseResolveBlock, reject _: @escaping RCTPromiseRejectBlock) {
AVCaptureDevice.requestAccess(for: .audio) { granted in
let result: AVAuthorizationStatus = granted ? .authorized : .denied
resolve(result.descriptor)
}
}
}

View File

@ -0,0 +1,22 @@
//
// AVCaptureDevice+isMultiCam.swift
// Cuvent
//
// Created by Marc Rousavy on 07.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice {
/**
Returns true if the device is a virtual multi-cam, false otherwise.
*/
var isMultiCam: Bool {
if #available(iOS 13.0, *) {
return self.isVirtualDevice
} else {
return false
}
}
}

View File

@ -0,0 +1,32 @@
//
// AVCaptureDevice+neutralZoom.swift
// Cuvent
//
// Created by Marc Rousavy on 10.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice {
var neutralZoomFactor: CGFloat {
if #available(iOS 13.0, *) {
if let indexOfWideAngle = self.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) {
if let zoomFactor = self.virtualDeviceSwitchOverVideoZoomFactors[safe: indexOfWideAngle - 1] {
return CGFloat(zoomFactor.doubleValue)
}
}
}
return 1.0
}
/**
Get the value at which the Zoom value is neutral, in percent (0.0-1.0)
* On single-camera physical devices, this value will always be 0.0
* On devices with multiple cameras, e.g. triple-camera, this value will be a value between 0.0 and 1.0, where the field-of-view and zoom looks "neutral"
*/
var neutralZoomPercent: CGFloat {
return (neutralZoomFactor - minAvailableVideoZoomFactor) / (maxAvailableVideoZoomFactor - minAvailableVideoZoomFactor)
}
}

View File

@ -0,0 +1,22 @@
//
// AVCaptureDevice+physicalDevices.swift
// Cuvent
//
// Created by Marc Rousavy on 10.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice {
/**
If the device is a virtual multi-cam, this returns `constituentDevices`, otherwise this returns an array of a single element, `self`
*/
var physicalDevices: [AVCaptureDevice] {
if #available(iOS 13.0, *), isVirtualDevice {
return self.constituentDevices
} else {
return [self]
}
}
}

View File

@ -0,0 +1,47 @@
//
// AVCaptureDevice.Format+isBetterThan.swift
// Cuvent
//
// Created by Marc Rousavy on 19.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice.Format {
/** Compares the current Format to the given format and returns true if the current format has either:
* 1. Higher still image capture dimensions
* 2. Higher video format dimensions (iOS 13.0)
* 3. Higher FPS
*/
func isBetterThan(_ other: AVCaptureDevice.Format) -> Bool {
// compare still image dimensions
let leftDimensions = highResolutionStillImageDimensions
let rightDimensions = other.highResolutionStillImageDimensions
if leftDimensions.height * leftDimensions.width > rightDimensions.height * rightDimensions.width
{
return true
}
if #available(iOS 13.0, *) {
// compare video dimensions
let leftVideo = self.formatDescription.presentationDimensions()
let rightVideo = other.formatDescription.presentationDimensions()
if leftVideo.height * leftVideo.width > rightVideo.height * rightVideo.width
{
return true
}
}
// compare max fps
if let leftMaxFps = videoSupportedFrameRateRanges.max(by: { $0.maxFrameRate > $1.maxFrameRate }),
let rightMaxFps = other.videoSupportedFrameRateRanges.max(by: { $0.maxFrameRate > $1.maxFrameRate })
{
if leftMaxFps.maxFrameRate > rightMaxFps.maxFrameRate {
return true
}
}
return false
}
}

View File

@ -0,0 +1,102 @@
//
// AVCaptureDevice.Format+matchesFilter.swift
// Cuvent
//
// Created by Marc Rousavy on 15.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice.Format {
/**
* Checks whether the given filter (NSDictionary, JSON Object) matches the given AVCaptureDevice Format.
* The `dictionary` dictionary must be of type `CameraDeviceFormat` (from `CameraDevice.d.ts`)
*/
func matchesFilter(_ filter: NSDictionary) -> Bool {
if let photoHeight = filter.value(forKey: "photoHeight") as? NSNumber {
if highResolutionStillImageDimensions.height != photoHeight.intValue {
return false
}
}
if let photoWidth = filter.value(forKey: "photoWidth") as? NSNumber {
if highResolutionStillImageDimensions.width != photoWidth.intValue {
return false
}
}
if #available(iOS 13.0, *) {
if let videoHeight = filter.value(forKey: "videoHeight") as? NSNumber {
if self.formatDescription.presentationDimensions().height != CGFloat(videoHeight.doubleValue) {
return false
}
}
if let videoWidth = filter.value(forKey: "videoWidth") as? NSNumber {
if self.formatDescription.presentationDimensions().width != CGFloat(videoWidth.doubleValue) {
return false
}
}
if let isHighestPhotoQualitySupported = filter.value(forKey: "isHighestPhotoQualitySupported") as? Bool {
if self.isHighestPhotoQualitySupported != isHighestPhotoQualitySupported {
return false
}
}
}
if let maxISO = filter.value(forKey: "maxISO") as? NSNumber {
if self.maxISO != maxISO.floatValue {
return false
}
}
if let minISO = filter.value(forKey: "minISO") as? NSNumber {
if self.minISO != minISO.floatValue {
return false
}
}
if let fieldOfView = filter.value(forKey: "fieldOfView") as? NSNumber {
if videoFieldOfView != fieldOfView.floatValue {
return false
}
}
if let maxZoom = filter.value(forKey: "maxZoom") as? NSNumber {
if videoMaxZoomFactor != CGFloat(maxZoom.floatValue) {
return false
}
}
if let colorSpaces = filter.value(forKey: "colorSpaces") as? [String] {
let avColorSpaces = colorSpaces.map { try? AVCaptureColorSpace(string: $0) }
let allColorSpacesIncluded = supportedColorSpaces.allSatisfy { avColorSpaces.contains($0) }
if !allColorSpacesIncluded {
return false
}
}
if let frameRateRanges = filter.value(forKey: "frameRateRanges") as? [NSDictionary] {
let allFrameRateRangesIncluded = videoSupportedFrameRateRanges.allSatisfy { (range) -> Bool in
frameRateRanges.contains { (dict) -> Bool in
guard let max = dict.value(forKey: "maxFrameRate") as? NSNumber,
let min = dict.value(forKey: "minFrameRate") as? NSNumber
else {
return false
}
return range.maxFrameRate == max.doubleValue && range.minFrameRate == min.doubleValue
}
}
if !allFrameRateRangesIncluded {
return false
}
}
if let autoFocusSystem = filter.value(forKey: "autoFocusSystem") as? String, let avAutoFocusSystem = try? AVCaptureDevice.Format.AutoFocusSystem(withString: autoFocusSystem) {
if self.autoFocusSystem != avAutoFocusSystem {
return false
}
}
if let videoStabilizationModes = filter.value(forKey: "videoStabilizationModes") as? [String] {
let avVideoStabilizationModes = videoStabilizationModes.map { try? AVCaptureVideoStabilizationMode(withString: $0) }
let allStabilizationModesIncluded = self.videoStabilizationModes.allSatisfy { avVideoStabilizationModes.contains($0) }
if !allStabilizationModesIncluded {
return false
}
}
return true
}
}

View File

@ -0,0 +1,51 @@
//
// AVCaptureDevice.Format+toDictionary.swift
// Cuvent
//
// Created by Marc Rousavy on 15.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
private func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
if #available(iOS 13, *) {
modes.append(.cinematicExtended)
}
return modes
}
extension AVCaptureDevice.Format {
var videoStabilizationModes: [AVCaptureVideoStabilizationMode] {
return getAllVideoStabilizationModes().filter { self.isVideoStabilizationModeSupported($0) }
}
func toDictionary() -> [String: Any] {
var dict: [String: Any] = [
"videoStabilizationModes": videoStabilizationModes.map { $0.descriptor },
"autoFocusSystem": autoFocusSystem.descriptor,
"photoHeight": highResolutionStillImageDimensions.height,
"photoWidth": highResolutionStillImageDimensions.width,
"maxISO": maxISO,
"minISO": minISO,
"fieldOfView": videoFieldOfView,
"maxZoom": videoMaxZoomFactor,
"colorSpaces": supportedColorSpaces.map { $0.descriptor },
"supportsVideoHDR": isVideoHDRSupported,
"supportsPhotoHDR": false,
"frameRateRanges": videoSupportedFrameRateRanges.map {
[
"minFrameRate": $0.minFrameRate,
"maxFrameRate": $0.maxFrameRate,
]
},
]
if #available(iOS 13.0, *) {
dict["isHighestPhotoQualitySupported"] = self.isHighestPhotoQualitySupported
dict["videoHeight"] = self.formatDescription.presentationDimensions().height
dict["videoWidth"] = self.formatDescription.presentationDimensions().width
}
return dict
}
}

View File

@ -0,0 +1,19 @@
//
// AVCaptureMovieFileOutput+mirror.swift
// Cuvent
//
// Created by Marc Rousavy on 18.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureMovieFileOutput {
func mirror() {
connections.forEach { (connection) in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
}
}

View File

@ -0,0 +1,19 @@
//
// AVCapturePhotoOutput+mirror.swift
// Cuvent
//
// Created by Marc Rousavy on 18.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVCapturePhotoOutput {
func mirror() {
connections.forEach { (connection) in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
}
}

View File

@ -0,0 +1,15 @@
//
// AVFrameRateRange+includes.swift
// Cuvent
//
// Created by Marc Rousavy on 15.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
extension AVFrameRateRange {
func includes(fps: Double) -> Bool {
return fps >= minFrameRate && fps <= maxFrameRate
}
}

View File

@ -0,0 +1,18 @@
//
// Collection+safe.swift
// Cuvent
//
// Created by Marc Rousavy on 10.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
extension Collection {
/**
Returns the element at the specified index if it is within bounds, otherwise nil.
*/
subscript(safe index: Index) -> Element? {
return indices.contains(index) ? self[index] : nil
}
}

View File

@ -0,0 +1,26 @@
//
// AVAuthorizationStatus+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 29.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVAuthorizationStatus {
var descriptor: String {
switch self {
case .authorized:
return "authorized"
case .denied:
return "denied"
case .notDetermined:
return "not-determined"
case .restricted:
return "restricted"
@unknown default:
fatalError("AVAuthorizationStatus has unknown state.")
}
}
}

View File

@ -0,0 +1,44 @@
//
// AVCaptureColorSpace+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 19.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureColorSpace {
init(string: String) throws {
switch string {
case "hlg-bt2020":
if #available(iOS 14.1, *) {
self = .HLG_BT2020
} else {
throw EnumParserError.unsupportedOS(supportedOnOS: "14.1")
}
return
case "p3-d65":
self = .P3_D65
return
case "srgb":
self = .sRGB
return
default:
throw EnumParserError.invalidValue
}
}
var descriptor: String {
switch self {
case .HLG_BT2020:
return "hlg-bt2020"
case .P3_D65:
return "p3-d65"
case .sRGB:
return "srgb"
default:
fatalError("AVCaptureDevice.Position has unknown state.")
}
}
}

View File

@ -0,0 +1,45 @@
//
// AVCaptureDevice.DeviceType+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
extension AVCaptureDevice.DeviceType {
var descriptor: String {
if #available(iOS 13.0, *) {
switch self {
case .builtInDualWideCamera:
return "dual-wide-camera"
case .builtInTripleCamera:
return "triple-camera"
case .builtInUltraWideCamera:
return "ultra-wide-angle-camera"
default:
break
}
}
if #available(iOS 11.1, *) {
switch self {
case .builtInTrueDepthCamera:
return "true-depth-camera"
default:
break
}
}
switch self {
case .builtInDualCamera:
return "dual-camera"
case .builtInTelephotoCamera:
return "telephoto-camera"
case .builtInWideAngleCamera:
return "wide-angle-camera"
default:
fatalError("AVCaptureDevice.Position has unknown state.")
}
}
}

View File

@ -0,0 +1,27 @@
//
// AVCaptureDevice.FlashMode+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice.FlashMode {
init?(withString string: String) {
switch string {
case "on":
self = .on
return
case "off":
self = .off
return
case "auto":
self = .auto
return
default:
return nil
}
}
}

View File

@ -0,0 +1,40 @@
//
// AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 29.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice.Format.AutoFocusSystem {
init(withString string: String) throws {
switch string {
case "contrast-detection":
self = .contrastDetection
return
case "phase-detection":
self = .phaseDetection
return
case "none":
self = .none
return
default:
throw EnumParserError.invalidValue
}
}
var descriptor: String {
switch self {
case .contrastDetection:
return "contrast-detection"
case .phaseDetection:
return "phase-detection"
case .none:
return "none"
@unknown default:
fatalError("AVCaptureDevice.Format has unknown state.")
}
}
}

View File

@ -0,0 +1,25 @@
//
// AVCaptureDevice.Position+String.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
extension AVCaptureDevice.Position {
var descriptor: String {
switch self {
case .back:
return "back"
case .front:
return "front"
case .unspecified:
return "unspecified"
@unknown default:
fatalError("AVCaptureDevice.Position has unknown state.")
}
}
}

View File

@ -0,0 +1,27 @@
//
// AVCaptureDevice.TorchMode+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 18.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureDevice.TorchMode {
init?(withString string: String) {
switch string {
case "on":
self = .on
return
case "off":
self = .off
return
case "auto":
self = .auto
return
default:
return nil
}
}
}

View File

@ -0,0 +1,29 @@
//
// AVCapturePhotoOutput.QualityPrioritization+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
@available(iOS 13.0, *)
extension AVCapturePhotoOutput.QualityPrioritization {
init?(withString string: String) {
switch string {
case "speed":
self = .speed
return
case "quality":
self = .quality
return
case "balanced":
self = .balanced
return
default:
return nil
}
}
}

View File

@ -0,0 +1,55 @@
//
// AVCaptureSession.Preset+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
extension AVCaptureSession.Preset {
init(withString string: String) throws {
switch string {
case "cif-352x288":
self = .cif352x288
return
case "hd-1280x720":
self = .hd1280x720
return
case "hd-1920x1080":
self = .hd1920x1080
return
case "hd-3840x2160":
self = .hd4K3840x2160
return
case "high":
self = .high
return
case "iframe-1280x720":
self = .iFrame1280x720
return
case "iframe-960x540":
self = .iFrame960x540
return
case "input-priority":
self = .inputPriority
return
case "low":
self = .low
return
case "medium":
self = .medium
return
case "photo":
self = .photo
return
case "vga-640x480":
self = .vga640x480
return
default:
throw EnumParserError.invalidValue
}
}
}

View File

@ -0,0 +1,61 @@
//
// AVCaptureVideoStabilizationMode+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 29.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
extension AVCaptureVideoStabilizationMode {
init(withString string: String) throws {
switch string {
case "auto":
self = .auto
return
case "cinematic":
self = .cinematic
return
case "cinematic-extended":
if #available(iOS 13.0, *) {
self = .cinematicExtended
return
} else {
throw EnumParserError.unsupportedOS(supportedOnOS: "iOS 13.0")
}
case "off":
self = .off
return
case "standard":
self = .standard
return
default:
throw EnumParserError.invalidValue
}
}
var descriptor: String {
if #available(iOS 13.0, *) {
switch self {
case .cinematicExtended:
return "cinematic-extended"
default:
break
}
}
switch self {
case .auto:
return "auto"
case .cinematic:
return "cinematic"
case .off:
return "off"
case .standard:
return "standard"
default:
fatalError("AVCaptureVideoStabilizationMode has unknown state.")
}
}
}

View File

@ -0,0 +1,137 @@
//
// AVMetadataObject.ObjectType+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 16.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
extension AVMetadataObject.ObjectType {
init(withString string: String) throws {
switch string {
case "aztec":
self = .aztec
return
case "cat-body":
if #available(iOS 13.0, *) {
self = .catBody
return
} else {
throw EnumParserError.unsupportedOS(supportedOnOS: "13.0")
}
case "code-128":
self = .code128
return
case "code-39":
self = .code39
return
case "code-39-mod-43":
self = .code39Mod43
return
case "code-93":
self = .code93
return
case "data-matrix":
self = .dataMatrix
return
case "dog-body":
if #available(iOS 13.0, *) {
self = .dogBody
return
} else {
throw EnumParserError.unsupportedOS(supportedOnOS: "13.0")
}
case "ean-13":
self = .ean13
return
case "ean-8":
self = .ean8
return
case "face":
self = .face
return
case "human-body":
if #available(iOS 13.0, *) {
self = .humanBody
return
} else {
throw EnumParserError.unsupportedOS(supportedOnOS: "13.0")
}
case "interleaved-2-of-5":
self = .interleaved2of5
return
case "itf-14":
self = .itf14
return
case "pdf-417":
self = .pdf417
return
case "qr":
self = .qr
return
case "salient-object":
if #available(iOS 13.0, *) {
self = .salientObject
return
} else {
throw EnumParserError.unsupportedOS(supportedOnOS: "13.0")
}
case "upce":
self = .upce
return
default:
throw EnumParserError.invalidValue
}
}
var descriptor: String {
if #available(iOS 13.0, *) {
switch self {
case .catBody:
return "cat-body"
case .dogBody:
return "dog-body"
case .humanBody:
return "human-body"
case .salientObject:
return "salient-object"
default: break
}
}
switch self {
case .aztec:
return "aztec"
case .code128:
return "code-128"
case .code39:
return "code-39"
case .code39Mod43:
return "code-39-mod-43"
case .code93:
return "code-93"
case .dataMatrix:
return "data-matrix"
case .ean13:
return "ean-13"
case .ean8:
return "ean-8"
case .face:
return "face"
case .interleaved2of5:
return "interleaved-2-of-5"
case .itf14:
return "itf-14"
case .pdf417:
return "pdf-417"
case .qr:
return "qr"
case .upce:
return "upce"
default:
fatalError("AVMetadataObject.ObjectType has unknown state.")
}
}
}

View File

@ -0,0 +1,93 @@
//
// AVVideoCodecType+descriptor.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
import Foundation
extension AVVideoCodecType {
init?(withString string: String) {
switch string {
case "h264":
self = .h264
return
case "hevc":
self = .hevc
return
case "hevc-alpha":
if #available(iOS 13.0, *) {
self = .hevcWithAlpha
return
} else {
return nil
}
case "jpeg":
self = .jpeg
return
case "pro-res-422":
self = .proRes422
return
case "pro-res-422-hq":
if #available(iOS 13.0, *) {
self = .proRes422HQ
return
} else {
return nil
}
case "pro-res-422-lt":
if #available(iOS 13.0, *) {
self = .proRes422LT
return
} else {
return nil
}
case "pro-res-422-proxy":
if #available(iOS 13.0, *) {
self = .proRes422Proxy
return
} else {
return nil
}
case "pro-res-4444":
self = .proRes4444
return
default:
return nil
}
}
var descriptor: String {
if #available(iOS 13.0, *) {
switch self {
case .hevcWithAlpha:
return "hevc-alpha"
case .proRes422HQ:
return "pro-res-422-hq"
case .proRes422LT:
return "pro-res-422-lt"
case .proRes422Proxy:
return "pro-res-422-proxy"
default:
break
}
}
switch self {
case .h264:
return "h264"
case .hevc:
return "hevc"
case .jpeg:
return "jpeg"
case .proRes422:
return "pro-res-422"
case .proRes4444:
return "pro-res-4444"
default:
fatalError("AVVideoCodecType has unknown state.")
}
}
}

View File

@ -0,0 +1,27 @@
//
// EnumParserError.swift
// Cuvent
//
// Created by Marc Rousavy on 18.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import Foundation
/**
An error raised when the given descriptor (TypeScript string union type) cannot be parsed and converted to a Swift enum.
*/
enum EnumParserError: Error {
/**
Raised when the descriptor is not supported on the current OS.
*/
case unsupportedOS(supportedOnOS: String)
/**
Raised when the descriptor does not match any of the possible values.
*/
case invalidValue
/**
Raised when no descriptor for the given enum is available.
*/
case noDescriptorAvailable
}

View File

@ -0,0 +1,69 @@
//
// PhotoCaptureDelegate.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import AVFoundation
private var delegatesReferences: [NSObject] = []
class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
private let promise: Promise
required init(promise: Promise) {
self.promise = promise
super.init()
delegatesReferences.append(self)
}
func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
defer {
delegatesReferences.removeAll(where: { $0 == self })
}
if let error = error {
return promise.reject(error: .capture(.unknown(message: error.localizedDescription)), cause: error as NSError)
}
let error = ErrorPointer(nilLiteral: ())
guard let tempFilePath = RCTTempFilePath("jpeg", error)
else {
return promise.reject(error: .capture(.createTempFileError), cause: error?.pointee)
}
let url = URL(string: "file://\(tempFilePath)")!
guard let data = photo.fileDataRepresentation()
else {
return promise.reject(error: .capture(.fileError))
}
do {
try data.write(to: url)
let exif = photo.metadata["{Exif}"] as? [String: Any]
let width = exif?["PixelXDimension"]
let height = exif?["PixelYDimension"]
return promise.resolve([
"path": tempFilePath,
"width": width as Any,
"height": height as Any,
"isRawPhoto": photo.isRawPhoto,
"metadata": photo.metadata,
"thumbnail": photo.embeddedThumbnailPhotoFormat as Any,
])
} catch {
return promise.reject(error: .capture(.fileError), cause: error as NSError)
}
}
func photoOutput(_: AVCapturePhotoOutput, didFinishCaptureFor _: AVCaptureResolvedPhotoSettings, error: Error?) {
defer {
delegatesReferences.removeAll(where: { $0 == self })
}
if let error = error {
return promise.reject(error: .capture(.unknown(message: error.localizedDescription)), cause: error as NSError)
}
}
}

View File

@ -0,0 +1,29 @@
//
// MakeReactError.swift
// Cuvent
//
// Created by Marc Rousavy on 15.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
func makeReactError(_ cameraError: CameraError, cause: NSError?) -> [String: Any] {
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = RCTMakeError("\(cause.domain): \(cause.code) \(cause.description)", nil, cause.userInfo)
}
return RCTMakeError(
"\(cameraError.code): \(cameraError.message)",
nil,
[
"code": cameraError.code,
"message": cameraError.message,
"cause": causeDictionary ?? NSNull(),
]
)
}
func makeReactError(_ cameraError: CameraError) -> [String: Any] {
return makeReactError(cameraError, cause: nil)
}

65
ios/React/Promise.swift Normal file
View File

@ -0,0 +1,65 @@
//
// Promise.swift
// Cuvent
//
// Created by Marc Rousavy on 14.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import Foundation
/**
* Represents a JavaScript Promise instance. `reject()` and `resolve()` should only be called once.
*/
class Promise {
private let resolver: RCTPromiseResolveBlock
private let rejecter: RCTPromiseRejectBlock
init(resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) {
self.resolver = resolver
self.rejecter = rejecter
}
func reject(error: CameraError, cause: NSError?) {
rejecter(error.code, error.message, cause)
}
func reject(error: CameraError) {
reject(error: error, cause: nil)
}
func resolve(_ value: Any?) {
resolver(value)
}
func resolve() {
resolve(nil)
}
}
/**
* Wrap a block with an automatic promise resolver and rejecter.
*
* The value returned by the `block` must be serializable by the React Native bridge, or `nil`.
* The error thrown by the `block` should be a `CameraError`
*/
func withPromise(_ promise: Promise, _ block: () throws -> Any?) {
do {
let result = try block()
promise.resolve(result)
} catch let error as CameraError {
promise.reject(error: error)
} catch let error as NSError {
promise.reject(error: CameraError.unknown(message: error.localizedDescription), cause: error)
}
}
/**
* Wrap a block with an automatic promise resolver and rejecter.
*
* The value returned by the `block` must be serializable by the React Native bridge, or `nil`.
* The error thrown by the `block` should be a `CameraError`
*/
func withPromise(resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock, _ block: () throws -> Any?) {
return withPromise(Promise(resolver: resolve, rejecter: reject), block)
}

View File

@ -0,0 +1,24 @@
//
// ReactLogger.swift
// Cuvent
//
// Created by Marc Rousavy on 15.12.20.
// Copyright © 2020 Facebook. All rights reserved.
//
import Foundation
let context = "Camera"
enum ReactLogger {
static func log(level: RCTLogLevel, message: String, alsoLogToJS: Bool = false, file: String = #file, lineNumber: Int = #line) {
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "\(context): \(message)")
if alsoLogToJS {
RCTDefaultLogFunction(level, RCTLogSource.javaScript, file, lineNumber as NSNumber, "\(context): \(message)")
}
}
static func logJS(level: RCTLogLevel, message: String, file: String = #file, lineNumber: Int = #line) {
RCTDefaultLogFunction(level, RCTLogSource.javaScript, file, lineNumber as NSNumber, "\(context): \(message)")
}
}

View File

@ -0,0 +1,40 @@
//
// VideoCaptureDelegate.swift
// Cuvent
//
// Created by Marc Rousavy on 14.01.21.
// Copyright © 2021 Facebook. All rights reserved.
//
import AVFoundation
// Functions like `startRecording(delegate: ...)` only maintain a weak reference on the delegates to prevent memory leaks.
// In our use case, we exit from the function which will deinit our recording delegate since no other references are being held.
// That's why we're keeping a strong reference to the delegate by appending it to the `delegateReferences` list and removing it
// once the delegate has been triggered once.
private var delegateReferences: [NSObject] = []
class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDelegate {
private let callback: RCTResponseSenderBlock // (video?, error?) => void
private let resetTorchMode: () -> Void
init(callback: @escaping RCTResponseSenderBlock, resetTorchMode: @escaping () -> Void) {
self.callback = callback
self.resetTorchMode = resetTorchMode
super.init()
delegateReferences.append(self)
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from _: [AVCaptureConnection], error: Error?) {
defer {
self.resetTorchMode()
delegateReferences.removeAll(where: { $0 == self })
}
if let error = error {
return callback([NSNull(), makeReactError(.capture(.unknown(message: error.localizedDescription)), cause: error as NSError)])
}
let seconds = CMTimeGetSeconds(output.recordedDuration)
return callback([["path": outputFileURL.absoluteString, "duration": seconds, "size": output.recordedFileSize], NSNull()])
}
}

View File

@ -1,2 +0,0 @@
#import <React/RCTBridgeModule.h>
#import <React/RCTViewManager.h>

View File

@ -1,9 +0,0 @@
#import <React/RCTBridgeModule.h>
@interface RCT_EXTERN_MODULE(VisionCamera, NSObject)
RCT_EXTERN_METHOD(multiply:(float)a withB:(float)b
withResolver:(RCTPromiseResolveBlock)resolve
withRejecter:(RCTPromiseRejectBlock)reject)
@end

View File

@ -1,8 +0,0 @@
@objc(VisionCamera)
class VisionCamera: NSObject {
@objc(multiply:withB:withResolver:withRejecter:)
func multiply(a: Float, b: Float, resolve:RCTPromiseResolveBlock,reject:RCTPromiseRejectBlock) -> Void {
resolve(a*b)
}
}

View File

@ -7,10 +7,43 @@
objects = {
/* Begin PBXBuildFile section */
5E555C0D2413F4C50049A1A2 /* VisionCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = B3E7B5891CC2AC0600A0062D /* VisionCamera.m */; };
F4FF95D7245B92E800C19C63 /* VisionCamera.swift in Sources */ = {isa = PBXBuildFile; fileRef = F4FF95D6245B92E800C19C63 /* VisionCamera.swift */; };
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B887515F25E0102000DB86D6 /* CameraViewManager.m */; };
B887518825E0102000DB86D6 /* VideoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516025E0102000DB86D6 /* VideoCaptureDelegate.swift */; };
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516225E0102000DB86D6 /* Collection+safe.swift */; };
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */; };
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */; };
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */; };
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */; };
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */; };
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */; };
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */; };
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
B887519225E0102000DB86D6 /* AVCaptureMovieFileOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516B25E0102000DB86D6 /* AVCaptureMovieFileOutput+mirror.swift */; };
B887519325E0102000DB86D6 /* CameraView+CodeScanning.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516C25E0102000DB86D6 /* CameraView+CodeScanning.swift */; };
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516E25E0102000DB86D6 /* MakeReactError.swift */; };
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
B887519625E0102000DB86D6 /* Promise.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517025E0102000DB86D6 /* Promise.swift */; };
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */; };
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517325E0102000DB86D6 /* EnumParserError.swift */; };
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */; };
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */; };
B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */; };
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */; };
B887519D25E0102000DB86D6 /* AVMetadataObject.ObjectType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517825E0102000DB86D6 /* AVMetadataObject.ObjectType+descriptor.swift */; };
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */; };
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */; };
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */; };
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */; };
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517D25E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift */; };
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */; };
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */; };
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518025E0102000DB86D6 /* CameraView+Focus.swift */; };
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518125E0102000DB86D6 /* CameraViewManager.swift */; };
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
@ -27,11 +60,44 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B3E7B5891CC2AC0600A0062D /* VisionCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = VisionCamera.m; sourceTree = "<group>"; };
F4FF95D5245B92E700C19C63 /* VisionCamera-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "VisionCamera-Bridging-Header.h"; sourceTree = "<group>"; };
F4FF95D6245B92E800C19C63 /* VisionCamera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VisionCamera.swift; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
B887515E25E0102000DB86D6 /* CameraBridge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraBridge.h; sourceTree = "<group>"; };
B887515F25E0102000DB86D6 /* CameraViewManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraViewManager.m; sourceTree = "<group>"; };
B887516025E0102000DB86D6 /* VideoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoCaptureDelegate.swift; sourceTree = "<group>"; };
B887516225E0102000DB86D6 /* Collection+safe.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Collection+safe.swift"; sourceTree = "<group>"; };
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+neutralZoom.swift"; sourceTree = "<group>"; };
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+isBetterThan.swift"; sourceTree = "<group>"; };
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+isMultiCam.swift"; sourceTree = "<group>"; };
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+physicalDevices.swift"; sourceTree = "<group>"; };
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVFrameRateRange+includes.swift"; sourceTree = "<group>"; };
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput+mirror.swift"; sourceTree = "<group>"; };
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+matchesFilter.swift"; sourceTree = "<group>"; };
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+toDictionary.swift"; sourceTree = "<group>"; };
B887516B25E0102000DB86D6 /* AVCaptureMovieFileOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureMovieFileOutput+mirror.swift"; sourceTree = "<group>"; };
B887516C25E0102000DB86D6 /* CameraView+CodeScanning.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+CodeScanning.swift"; sourceTree = "<group>"; };
B887516E25E0102000DB86D6 /* MakeReactError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MakeReactError.swift; sourceTree = "<group>"; };
B887516F25E0102000DB86D6 /* ReactLogger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactLogger.swift; sourceTree = "<group>"; };
B887517025E0102000DB86D6 /* Promise.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Promise.swift; sourceTree = "<group>"; };
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+TakePhoto.swift"; sourceTree = "<group>"; };
B887517325E0102000DB86D6 /* EnumParserError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EnumParserError.swift; sourceTree = "<group>"; };
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoStabilizationMode+descriptor.swift"; sourceTree = "<group>"; };
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVVideoCodecType+descriptor.swift"; sourceTree = "<group>"; };
B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession.Preset+descriptor.swift"; sourceTree = "<group>"; };
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.TorchMode+descriptor.swift"; sourceTree = "<group>"; };
B887517825E0102000DB86D6 /* AVMetadataObject.ObjectType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVMetadataObject.ObjectType+descriptor.swift"; sourceTree = "<group>"; };
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput.QualityPrioritization+descriptor.swift"; sourceTree = "<group>"; };
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.DeviceType+descriptor.swift"; sourceTree = "<group>"; };
B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVAuthorizationStatus+descriptor.swift"; sourceTree = "<group>"; };
B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Position+descriptor.swift"; sourceTree = "<group>"; };
B887517D25E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureColorSpace+descriptor.swift"; sourceTree = "<group>"; };
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.FlashMode+descriptor.swift"; sourceTree = "<group>"; };
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift"; sourceTree = "<group>"; };
B887518025E0102000DB86D6 /* CameraView+Focus.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+Focus.swift"; sourceTree = "<group>"; };
B887518125E0102000DB86D6 /* CameraViewManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraViewManager.swift; sourceTree = "<group>"; };
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+Zoom.swift"; sourceTree = "<group>"; };
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = "<group>"; };
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -56,15 +122,72 @@
58B511D21A9E6C8500147676 = {
isa = PBXGroup;
children = (
F4FF95D6245B92E800C19C63 /* VisionCamera.swift */,
B3E7B5891CC2AC0600A0062D /* VisionCamera.m */,
F4FF95D5245B92E700C19C63 /* VisionCamera-Bridging-Header.h */,
B887515E25E0102000DB86D6 /* CameraBridge.h */,
B887518325E0102000DB86D6 /* CameraError.swift */,
B887518425E0102000DB86D6 /* CameraView.swift */,
B887516C25E0102000DB86D6 /* CameraView+CodeScanning.swift */,
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B887516D25E0102000DB86D6 /* React */,
B887516025E0102000DB86D6 /* VideoCaptureDelegate.swift */,
134814211AA4EA7D00B7C361 /* Products */,
);
sourceTree = "<group>";
};
B887516125E0102000DB86D6 /* Extensions */ = {
isa = PBXGroup;
children = (
B887516225E0102000DB86D6 /* Collection+safe.swift */,
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */,
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */,
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */,
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */,
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */,
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */,
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */,
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
B887516B25E0102000DB86D6 /* AVCaptureMovieFileOutput+mirror.swift */,
);
path = Extensions;
sourceTree = "<group>";
};
B887516D25E0102000DB86D6 /* React */ = {
isa = PBXGroup;
children = (
B887516E25E0102000DB86D6 /* MakeReactError.swift */,
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
B887517025E0102000DB86D6 /* Promise.swift */,
);
path = React;
sourceTree = "<group>";
};
B887517225E0102000DB86D6 /* Parsers */ = {
isa = PBXGroup;
children = (
B887517325E0102000DB86D6 /* EnumParserError.swift */,
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */,
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */,
B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */,
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */,
B887517825E0102000DB86D6 /* AVMetadataObject.ObjectType+descriptor.swift */,
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */,
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */,
B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */,
B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */,
B887517D25E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift */,
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */,
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */,
);
path = Parsers;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@ -122,10 +245,43 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
F4FF95D7245B92E800C19C63 /* VisionCamera.swift in Sources */,
B3E7B58A1CC2AC0600A0062D /* VisionCamera.m in Sources */,
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */,
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
B887519D25E0102000DB86D6 /* AVMetadataObject.ObjectType+descriptor.swift in Sources */,
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B887519225E0102000DB86D6 /* AVCaptureMovieFileOutput+mirror.swift in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */,
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
B887519325E0102000DB86D6 /* CameraView+CodeScanning.swift in Sources */,
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
B887518825E0102000DB86D6 /* VideoCaptureDelegate.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -238,11 +394,9 @@
OTHER_LDFLAGS = "-ObjC";
PRODUCT_NAME = VisionCamera;
SKIP_INSTALL = YES;
SWIFT_OBJC_BRIDGING_HEADER = "VisionCamera-Bridging-Header.h";
SWIFT_OBJC_BRIDGING_HEADER = "CameraBridge.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
};
name = Debug;
};
@ -259,10 +413,8 @@
OTHER_LDFLAGS = "-ObjC";
PRODUCT_NAME = VisionCamera;
SKIP_INSTALL = YES;
SWIFT_OBJC_BRIDGING_HEADER = "VisionCamera-Bridging-Header.h";
SWIFT_OBJC_BRIDGING_HEADER = "CameraBridge.h";
SWIFT_VERSION = 5.0;
};
name = Release;
};