feat: New Core/ library (#1975)

Moves everything Camera related into `core/` / `Core/` so that it is better encapsulated from React Native.

Benefits:

1. Code is much better organized. Should be easier for collaborators now, and cleaner codebase for me.
2. Locking is fully atomically as you can now only configure the session through a lock/Mutex which is batch-overridable
    * On iOS, this makes Camera startup time **MUCH** faster, I measured speedups from **1.5 seconds** to only **240 milliseconds** since we only lock/commit once! 🚀 
    * On Android, this fixes a few out-of-sync/concurrency issues like "Capture Request contains unconfigured Input/Output Surface!" since it is now a single lock-operation! 💪 
3. It is easier to integrate VisionCamera outside of React Native (e.g. Native iOS Apps, NativeScript, Flutter, etc)

With this PR, VisionCamera V3 is up to **7x** faster than V2
This commit is contained in:
Marc Rousavy
2023-10-13 18:33:20 +02:00
committed by GitHub
parent 54871022f4
commit cd0b413706
72 changed files with 2326 additions and 1521 deletions

View File

@@ -1,93 +1,19 @@
//
// CameraView+Focus.swift
// mrousavy
// VisionCamera
//
// Created by Marc Rousavy on 19.02.21.
// Copyright © 2021 mrousavy. All rights reserved.
// Created by Marc Rousavy on 12.10.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension CameraView {
private func convertPreviewCoordinatesToCameraCoordinates(_ point: CGPoint) -> CGPoint {
return previewView.captureDevicePointConverted(fromLayerPoint: point)
}
func focus(point: CGPoint, promise: Promise) {
withPromise(promise) {
guard let device = self.videoDeviceInput?.device else {
throw CameraError.session(SessionError.cameraNotReady)
}
if !device.isFocusPointOfInterestSupported {
throw CameraError.device(DeviceError.focusNotSupported)
}
// in {0..1} system
let normalizedPoint = convertPreviewCoordinatesToCameraCoordinates(point)
do {
try device.lockForConfiguration()
defer {
device.unlockForConfiguration()
}
// Set Focus
if device.isFocusPointOfInterestSupported {
device.focusPointOfInterest = normalizedPoint
device.focusMode = .autoFocus
}
// Set Exposure
if device.isExposurePointOfInterestSupported {
device.exposurePointOfInterest = normalizedPoint
device.exposureMode = .autoExpose
}
// Remove any existing listeners
NotificationCenter.default.removeObserver(self,
name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
object: nil)
// Listen for focus completion
device.isSubjectAreaChangeMonitoringEnabled = true
NotificationCenter.default.addObserver(self,
selector: #selector(subjectAreaDidChange),
name: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange,
object: nil)
return nil
} catch {
throw CameraError.device(DeviceError.configureError)
}
}
}
@objc
func subjectAreaDidChange(notification _: NSNotification) {
guard let device = videoDeviceInput?.device else {
invokeOnError(.session(.cameraNotReady))
return
}
do {
try device.lockForConfiguration()
defer {
device.unlockForConfiguration()
}
// Reset Focus to continuous/auto
if device.isFocusPointOfInterestSupported {
device.focusMode = .continuousAutoFocus
}
// Reset Exposure to continuous/auto
if device.isExposurePointOfInterestSupported {
device.exposureMode = .continuousAutoExposure
}
// Disable listeners
device.isSubjectAreaChangeMonitoringEnabled = false
} catch {
invokeOnError(.device(.configureError))
try cameraSession.focus(point: point)
return nil
}
}
}