feat: New Core/
library (#1975)
Moves everything Camera related into `core/` / `Core/` so that it is better encapsulated from React Native. Benefits: 1. Code is much better organized. Should be easier for collaborators now, and cleaner codebase for me. 2. Locking is fully atomically as you can now only configure the session through a lock/Mutex which is batch-overridable * On iOS, this makes Camera startup time **MUCH** faster, I measured speedups from **1.5 seconds** to only **240 milliseconds** since we only lock/commit once! 🚀 * On Android, this fixes a few out-of-sync/concurrency issues like "Capture Request contains unconfigured Input/Output Surface!" since it is now a single lock-operation! 💪 3. It is easier to integrate VisionCamera outside of React Native (e.g. Native iOS Apps, NativeScript, Flutter, etc) With this PR, VisionCamera V3 is up to **7x** faster than V2
This commit is contained in:
@@ -1,23 +0,0 @@
|
||||
//
|
||||
// AVAudioSession+trySetAllowHaptics.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAudioSession {
|
||||
/**
|
||||
Tries to set allowHapticsAndSystemSoundsDuringRecording and ignore errors.
|
||||
*/
|
||||
func trySetAllowHaptics(_ allowHaptics: Bool) {
|
||||
if #available(iOS 13.0, *) {
|
||||
if !self.allowHapticsAndSystemSoundsDuringRecording {
|
||||
try? self.setAllowHapticsAndSystemSoundsDuringRecording(allowHaptics)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension AVCaptureConnection {
|
||||
/**
|
||||
|
@@ -10,6 +10,8 @@ import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
func toDictionary() -> [String: Any] {
|
||||
let formats = formats.map { CameraDeviceFormat(fromFormat: $0) }
|
||||
|
||||
return [
|
||||
"id": uniqueID,
|
||||
"physicalDevices": physicalDevices.map(\.deviceType.physicalDeviceDescriptor),
|
||||
@@ -25,10 +27,8 @@ extension AVCaptureDevice {
|
||||
"supportsLowLightBoost": isLowLightBoostSupported,
|
||||
"supportsFocus": isFocusPointOfInterestSupported,
|
||||
"hardwareLevel": "full",
|
||||
"sensorOrientation": "portrait", // TODO: Sensor Orientation?
|
||||
"formats": formats.map { format -> [String: Any] in
|
||||
format.toDictionary()
|
||||
},
|
||||
"sensorOrientation": Orientation.landscapeLeft.jsValue,
|
||||
"formats": formats.map { $0.toJSValue() },
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@@ -8,27 +8,20 @@
|
||||
|
||||
import AVFoundation
|
||||
|
||||
private func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
|
||||
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
|
||||
if #available(iOS 13, *) {
|
||||
modes.append(.cinematicExtended)
|
||||
}
|
||||
return modes
|
||||
}
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
var videoStabilizationModes: [AVCaptureVideoStabilizationMode] {
|
||||
return getAllVideoStabilizationModes().filter { self.isVideoStabilizationModeSupported($0) }
|
||||
let allModes = AVCaptureDevice.Format.getAllVideoStabilizationModes()
|
||||
return allModes.filter { self.isVideoStabilizationModeSupported($0) }
|
||||
}
|
||||
|
||||
var minFrameRate: Float64 {
|
||||
var minFps: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
var maxFrameRate: Float64 {
|
||||
var maxFps: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
@@ -45,52 +38,20 @@ extension AVCaptureDevice.Format {
|
||||
return hdrFormats.contains(pixelFormat)
|
||||
}
|
||||
|
||||
func toDictionary() -> [String: AnyHashable] {
|
||||
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
|
||||
let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
|
||||
|
||||
return [
|
||||
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
|
||||
"autoFocusSystem": autoFocusSystem.descriptor,
|
||||
"photoHeight": photoDimensions.height,
|
||||
"photoWidth": photoDimensions.width,
|
||||
"videoHeight": videoDimensions.height,
|
||||
"videoWidth": videoDimensions.width,
|
||||
"maxISO": maxISO,
|
||||
"minISO": minISO,
|
||||
"fieldOfView": videoFieldOfView,
|
||||
"maxZoom": videoMaxZoomFactor,
|
||||
"supportsVideoHDR": supportsVideoHDR,
|
||||
"supportsPhotoHDR": false,
|
||||
"minFps": minFrameRate,
|
||||
"maxFps": maxFrameRate,
|
||||
"pixelFormats": pixelFormats.map(\.unionValue),
|
||||
"supportsDepthCapture": !supportedDepthDataFormats.isEmpty,
|
||||
]
|
||||
var supportsPhotoHDR: Bool {
|
||||
// TODO: Supports Photo HDR on iOS?
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
Compares this format to the given JS `CameraDeviceFormat`.
|
||||
Only the most important properties (such as dimensions and FPS) are taken into consideration,
|
||||
so this is not an exact equals, but more like a "matches filter" comparison.
|
||||
*/
|
||||
func isEqualTo(jsFormat dict: NSDictionary) -> Bool {
|
||||
guard dict["photoWidth"] as? Int32 == photoDimensions.width && dict["photoHeight"] as? Int32 == photoDimensions.height else {
|
||||
return false
|
||||
}
|
||||
var supportsDepthCapture: Bool {
|
||||
return !supportedDepthDataFormats.isEmpty
|
||||
}
|
||||
|
||||
guard dict["videoWidth"] as? Int32 == videoDimensions.width && dict["videoHeight"] as? Int32 == videoDimensions.height else {
|
||||
return false
|
||||
private static func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
|
||||
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
|
||||
if #available(iOS 13, *) {
|
||||
modes.append(.cinematicExtended)
|
||||
}
|
||||
|
||||
guard dict["minFps"] as? Float64 == minFrameRate && dict["maxFps"] as? Float64 == maxFrameRate else {
|
||||
return false
|
||||
}
|
||||
|
||||
guard dict["supportsVideoHDR"] as? Bool == supportsVideoHDR else {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
return modes
|
||||
}
|
||||
}
|
||||
|
52
package/ios/Extensions/AVCaptureOutput+mirror.swift
Normal file
52
package/ios/Extensions/AVCaptureOutput+mirror.swift
Normal file
@@ -0,0 +1,52 @@
|
||||
//
|
||||
// AVCaptureOutput+mirror.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureOutput {
|
||||
/**
|
||||
Mirrors the video output if possible.
|
||||
*/
|
||||
func mirror() {
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Sets the target orientation of the video output.
|
||||
This does not always physically rotate image buffers.
|
||||
|
||||
- For Preview, an orientation hint is used to rotate the layer/view itself.
|
||||
- For Photos, an EXIF tag is used.
|
||||
- For Videos, the buffers are physically rotated if available, since we use an AVCaptureVideoDataOutput instead of an AVCaptureMovieFileOutput.
|
||||
*/
|
||||
func setOrientation(_ orientation: Orientation) {
|
||||
// Camera Sensors are always in 90deg rotation.
|
||||
// We are setting the target rotation here, so we need to rotate by 90deg once.
|
||||
let cameraOrientation = orientation.rotateRight()
|
||||
|
||||
// Set orientation for each connection
|
||||
connections.forEach { connection in
|
||||
// TODO: Use this once Xcode 15 is rolled out
|
||||
// if #available(iOS 17.0, *) {
|
||||
// let degrees = cameraOrientation.toDegrees()
|
||||
// if connection.isVideoRotationAngleSupported(degrees) {
|
||||
// connection.videoRotationAngle = degrees
|
||||
// }
|
||||
// } else {
|
||||
if connection.isVideoOrientationSupported {
|
||||
connection.videoOrientation = cameraOrientation.toAVCaptureVideoOrientation()
|
||||
}
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,20 +0,0 @@
|
||||
//
|
||||
// AVCapturePhotoOutput+mirror.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCapturePhotoOutput {
|
||||
func mirror() {
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,26 @@
|
||||
//
|
||||
// AVCaptureVideoDataOutput+pixelFormat.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 12.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureVideoDataOutput {
|
||||
/**
|
||||
Gets or sets the PixelFormat this output streams in.
|
||||
By default, the first item in `availableVideoPixelFormatTypes` is chosen.
|
||||
*/
|
||||
var pixelFormat: OSType {
|
||||
get {
|
||||
let current = videoSettings[String(kCVPixelBufferPixelFormatTypeKey)] as? OSType
|
||||
return current ?? availableVideoPixelFormatTypes.first!
|
||||
}
|
||||
set {
|
||||
videoSettings[String(kCVPixelBufferPixelFormatTypeKey)] = newValue
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user