feat: Use correct photo and video format dimensions on iOS (#1929)
* feat: Use new photo dimensions API * Update AVCaptureDevice.Format+matchesFilter.swift * fix: Use Pixels instead of Points for video size * feat: Set `PhotoOutput`'s maximum photo resolution * fix: Compare dictionaries instead * chore: Format code * fix: Try to use hash.... failing atm * fix: Use rough comparison again * fix: Also take video HDR into consideration * chore: Format * Use contains * Update AVCaptureDevice.Format+toDictionary.swift * docs: Add better docs to Camera props * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift
This commit is contained in:
parent
a4448c3a7d
commit
6e72781500
@ -507,7 +507,7 @@ PODS:
|
||||
- libwebp (~> 1.0)
|
||||
- SDWebImage/Core (~> 5.10)
|
||||
- SocketRocket (0.6.1)
|
||||
- VisionCamera (3.3.0):
|
||||
- VisionCamera (3.3.1):
|
||||
- React
|
||||
- React-callinvoker
|
||||
- React-Core
|
||||
@ -747,7 +747,7 @@ SPEC CHECKSUMS:
|
||||
SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d
|
||||
SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d
|
||||
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
|
||||
VisionCamera: 94dc6dba1e9ed8669cc1b890d3541a6532d4c9d5
|
||||
VisionCamera: f649cd0c0fa6266f1cd5e0787a7c9583ca143b3a
|
||||
Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce
|
||||
|
||||
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
|
||||
|
@ -262,6 +262,7 @@ extension CameraView {
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
|
||||
// Configure FPS
|
||||
if let fps = fps?.int32Value {
|
||||
let supportsGivenFps = device.activeFormat.videoSupportedFrameRateRanges.contains { range in
|
||||
return range.includes(fps: Double(fps))
|
||||
@ -278,15 +279,15 @@ extension CameraView {
|
||||
device.activeVideoMinFrameDuration = CMTime.invalid
|
||||
device.activeVideoMaxFrameDuration = CMTime.invalid
|
||||
}
|
||||
|
||||
// Configure Low-Light-Boost
|
||||
if lowLightBoost != nil {
|
||||
if lowLightBoost == true && !device.isLowLightBoostSupported {
|
||||
invokeOnError(.device(.lowLightBoostNotSupported))
|
||||
return
|
||||
}
|
||||
if device.automaticallyEnablesLowLightBoostWhenAvailable != lowLightBoost!.boolValue {
|
||||
device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue
|
||||
}
|
||||
}
|
||||
|
||||
device.unlockForConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Device successfully configured!")
|
||||
@ -303,8 +304,8 @@ extension CameraView {
|
||||
*/
|
||||
final func configureFormat() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Format...")
|
||||
guard let filter = format else {
|
||||
// Format Filter was null. Ignore it.
|
||||
guard let jsFormat = format else {
|
||||
// JS Format was null. Ignore it, use default.
|
||||
return
|
||||
}
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
@ -312,22 +313,34 @@ extension CameraView {
|
||||
return
|
||||
}
|
||||
|
||||
if device.activeFormat.matchesFilter(filter) {
|
||||
ReactLogger.log(level: .info, message: "Active format already matches filter.")
|
||||
if device.activeFormat.isEqualTo(jsFormat: jsFormat) {
|
||||
ReactLogger.log(level: .info, message: "Already selected active format.")
|
||||
return
|
||||
}
|
||||
|
||||
// get matching format
|
||||
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
|
||||
guard let format = matchingFormats.first else {
|
||||
let format = device.formats.first { $0.isEqualTo(jsFormat: jsFormat) }
|
||||
guard let format else {
|
||||
invokeOnError(.format(.invalidFormat))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
device.activeFormat = format
|
||||
defer {
|
||||
device.unlockForConfiguration()
|
||||
}
|
||||
|
||||
let shouldReconfigurePhotoOutput = device.activeFormat.photoDimensions.toCGSize() != format.photoDimensions.toCGSize()
|
||||
device.activeFormat = format
|
||||
|
||||
// The Photo Output uses the smallest available Dimension by default. We need to configure it for the maximum here
|
||||
if shouldReconfigurePhotoOutput, #available(iOS 16.0, *) {
|
||||
if let photoOutput = photoOutput {
|
||||
photoOutput.maxPhotoDimensions = format.photoDimensions
|
||||
}
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Format successfully configured!")
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
|
@ -63,6 +63,11 @@ extension CameraView {
|
||||
photoSettings.photoQualityPrioritization = photoQualityPrioritization
|
||||
}
|
||||
|
||||
// photo size is always the one selected in the format
|
||||
if #available(iOS 16.0, *) {
|
||||
photoSettings.maxPhotoDimensions = photoOutput.maxPhotoDimensions
|
||||
}
|
||||
|
||||
// red-eye reduction
|
||||
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
|
||||
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
|
||||
|
@ -0,0 +1,33 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+dimensions.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 03.08.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Returns the dimensions the video pipeline is streaming at.
|
||||
*/
|
||||
var videoDimensions: CMVideoDimensions {
|
||||
return CMVideoFormatDescriptionGetDimensions(formatDescription)
|
||||
}
|
||||
|
||||
/**
|
||||
Returns the maximum available photo resolution this format can use.
|
||||
*/
|
||||
var photoDimensions: CMVideoDimensions {
|
||||
if #available(iOS 16.0, *) {
|
||||
if let max = supportedMaxPhotoDimensions.max(by: { left, right in
|
||||
return left.width * left.height < right.width * right.height
|
||||
}) {
|
||||
return max
|
||||
}
|
||||
}
|
||||
return highResolutionStillImageDimensions
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+isBetterThan.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 19.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/** Compares the current Format to the given format and returns true if the current format has either:
|
||||
* 1. Higher still image capture dimensions
|
||||
* 2. Higher video format dimensions (iOS 13.0)
|
||||
* 3. Higher FPS
|
||||
*/
|
||||
func isBetterThan(_ other: AVCaptureDevice.Format) -> Bool {
|
||||
// compare still image dimensions
|
||||
let leftDimensions = highResolutionStillImageDimensions
|
||||
let rightDimensions = other.highResolutionStillImageDimensions
|
||||
if leftDimensions.height * leftDimensions.width > rightDimensions.height * rightDimensions.width {
|
||||
return true
|
||||
}
|
||||
|
||||
// compare video dimensions
|
||||
let leftVideo = videoDimensions
|
||||
let rightVideo = other.videoDimensions
|
||||
if leftVideo.height * leftVideo.width > rightVideo.height * rightVideo.width {
|
||||
return true
|
||||
}
|
||||
|
||||
// compare max fps
|
||||
if maxFrameRate > other.maxFrameRate {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+matchesFilter.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Checks whether the given filter (NSDictionary, JSON Object) matches the given AVCaptureDevice Format.
|
||||
* The `dictionary` dictionary must be of type `CameraDeviceFormat` (from `CameraDevice.d.ts`)
|
||||
*/
|
||||
func matchesFilter(_ filter: NSDictionary) -> Bool {
|
||||
if let photoHeight = filter.value(forKey: "photoHeight") as? NSNumber {
|
||||
if highResolutionStillImageDimensions.height != photoHeight.intValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let photoWidth = filter.value(forKey: "photoWidth") as? NSNumber {
|
||||
if highResolutionStillImageDimensions.width != photoWidth.intValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoHeight = filter.value(forKey: "videoHeight") as? NSNumber {
|
||||
if videoDimensions.height != CGFloat(videoHeight.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoWidth = filter.value(forKey: "videoWidth") as? NSNumber {
|
||||
if videoDimensions.width != CGFloat(videoWidth.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxISO = filter.value(forKey: "maxISO") as? NSNumber {
|
||||
if self.maxISO != maxISO.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let minISO = filter.value(forKey: "minISO") as? NSNumber {
|
||||
if self.minISO != minISO.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let fieldOfView = filter.value(forKey: "fieldOfView") as? NSNumber {
|
||||
if videoFieldOfView != fieldOfView.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxZoom = filter.value(forKey: "maxZoom") as? NSNumber {
|
||||
if videoMaxZoomFactor != CGFloat(maxZoom.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let minFps = filter.value(forKey: "minFps") as? NSNumber {
|
||||
if minFrameRate != Float64(minFps.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxFps = filter.value(forKey: "maxFps") as? NSNumber {
|
||||
if maxFrameRate != Float64(maxFps.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let autoFocusSystem = filter.value(forKey: "autoFocusSystem") as? String,
|
||||
let avAutoFocusSystem = try? AVCaptureDevice.Format.AutoFocusSystem(withString: autoFocusSystem) {
|
||||
if self.autoFocusSystem != avAutoFocusSystem {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoStabilizationModes = filter.value(forKey: "videoStabilizationModes") as? [String] {
|
||||
let avVideoStabilizationModes = videoStabilizationModes.map { try? AVCaptureVideoStabilizationMode(withString: $0) }
|
||||
let allStabilizationModesIncluded = self.videoStabilizationModes.allSatisfy { avVideoStabilizationModes.contains($0) }
|
||||
if !allStabilizationModesIncluded {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
@ -35,22 +35,32 @@ extension AVCaptureDevice.Format {
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
func toDictionary() -> [String: Any] {
|
||||
var supportsVideoHDR: Bool {
|
||||
let pixelFormat = CMFormatDescriptionGetMediaSubType(formatDescription)
|
||||
let hdrFormats = [
|
||||
kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
|
||||
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange,
|
||||
kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange,
|
||||
]
|
||||
return hdrFormats.contains(pixelFormat)
|
||||
}
|
||||
|
||||
func toDictionary() -> [String: AnyHashable] {
|
||||
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
|
||||
let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
|
||||
|
||||
return [
|
||||
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
|
||||
"autoFocusSystem": autoFocusSystem.descriptor,
|
||||
"photoHeight": highResolutionStillImageDimensions.height,
|
||||
"photoWidth": highResolutionStillImageDimensions.width,
|
||||
"photoHeight": photoDimensions.height,
|
||||
"photoWidth": photoDimensions.width,
|
||||
"videoHeight": videoDimensions.height,
|
||||
"videoWidth": videoDimensions.width,
|
||||
"maxISO": maxISO,
|
||||
"minISO": minISO,
|
||||
"fieldOfView": videoFieldOfView,
|
||||
"maxZoom": videoMaxZoomFactor,
|
||||
"supportsVideoHDR": availablePixelFormats.contains(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange),
|
||||
"supportsVideoHDR": supportsVideoHDR,
|
||||
"supportsPhotoHDR": false,
|
||||
"minFps": minFrameRate,
|
||||
"maxFps": maxFrameRate,
|
||||
@ -58,4 +68,29 @@ extension AVCaptureDevice.Format {
|
||||
"supportsDepthCapture": !supportedDepthDataFormats.isEmpty,
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
Compares this format to the given JS `CameraDeviceFormat`.
|
||||
Only the most important properties (such as dimensions and FPS) are taken into consideration,
|
||||
so this is not an exact equals, but more like a "matches filter" comparison.
|
||||
*/
|
||||
func isEqualTo(jsFormat dict: NSDictionary) -> Bool {
|
||||
guard dict["photoWidth"] as? Int32 == photoDimensions.width && dict["photoHeight"] as? Int32 == photoDimensions.height else {
|
||||
return false
|
||||
}
|
||||
|
||||
guard dict["videoWidth"] as? Int32 == videoDimensions.width && dict["videoHeight"] as? Int32 == videoDimensions.height else {
|
||||
return false
|
||||
}
|
||||
|
||||
guard dict["minFps"] as? Float64 == minFrameRate && dict["maxFps"] as? Float64 == maxFrameRate else {
|
||||
return false
|
||||
}
|
||||
|
||||
guard dict["supportsVideoHDR"] as? Bool == supportsVideoHDR else {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -1,24 +0,0 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+videoDimensions.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 03.08.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Returns the video dimensions, adjusted to take pixel aspect ratio and/or clean
|
||||
* aperture into account.
|
||||
*
|
||||
* Pixel aspect ratio is used to adjust the width, leaving the height alone.
|
||||
*/
|
||||
var videoDimensions: CGSize {
|
||||
return CMVideoFormatDescriptionGetPresentationDimensions(formatDescription,
|
||||
usePixelAspectRatio: true,
|
||||
useCleanAperture: true)
|
||||
}
|
||||
}
|
16
package/ios/Extensions/CMVideoDimensions+toCGSize.swift
Normal file
16
package/ios/Extensions/CMVideoDimensions+toCGSize.swift
Normal file
@ -0,0 +1,16 @@
|
||||
//
|
||||
// CMVideoDimensions+toCGSize.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 05.10.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CMVideoDimensions {
|
||||
func toCGSize() -> CGSize {
|
||||
return CGSize(width: Int(width), height: Int(height))
|
||||
}
|
||||
}
|
@ -10,7 +10,7 @@
|
||||
B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; };
|
||||
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
|
||||
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift */; };
|
||||
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
|
||||
B8446E4D2ABA147C00E56077 /* CameraDevicesManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */; };
|
||||
B8446E502ABA14C900E56077 /* CameraDevicesManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B8446E4F2ABA14C900E56077 /* CameraDevicesManager.m */; };
|
||||
@ -31,12 +31,10 @@
|
||||
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B887515F25E0102000DB86D6 /* CameraViewManager.m */; };
|
||||
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516225E0102000DB86D6 /* Collection+safe.swift */; };
|
||||
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */; };
|
||||
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */; };
|
||||
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */; };
|
||||
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */; };
|
||||
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */; };
|
||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */; };
|
||||
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */; };
|
||||
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
|
||||
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516E25E0102000DB86D6 /* MakeReactError.swift */; };
|
||||
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
|
||||
@ -65,6 +63,7 @@
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
|
||||
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
|
||||
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */; };
|
||||
B8F127D02ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */; };
|
||||
B8FF60AC2ACC93EF009D612F /* CameraView+CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60AB2ACC93EF009D612F /* CameraView+CodeScanner.swift */; };
|
||||
B8FF60AE2ACC9731009D612F /* CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60AD2ACC9731009D612F /* CodeScanner.swift */; };
|
||||
B8FF60B12ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60B02ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift */; };
|
||||
@ -91,7 +90,7 @@
|
||||
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
|
||||
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
|
||||
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
|
||||
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
|
||||
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+dimensions.swift"; sourceTree = "<group>"; };
|
||||
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
|
||||
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
|
||||
B8446E4C2ABA147C00E56077 /* CameraDevicesManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraDevicesManager.swift; sourceTree = "<group>"; };
|
||||
@ -115,12 +114,10 @@
|
||||
B887515F25E0102000DB86D6 /* CameraViewManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraViewManager.m; sourceTree = "<group>"; };
|
||||
B887516225E0102000DB86D6 /* Collection+safe.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Collection+safe.swift"; sourceTree = "<group>"; };
|
||||
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+neutralZoom.swift"; sourceTree = "<group>"; };
|
||||
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+isBetterThan.swift"; sourceTree = "<group>"; };
|
||||
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+isMultiCam.swift"; sourceTree = "<group>"; };
|
||||
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+physicalDevices.swift"; sourceTree = "<group>"; };
|
||||
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVFrameRateRange+includes.swift"; sourceTree = "<group>"; };
|
||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput+mirror.swift"; sourceTree = "<group>"; };
|
||||
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+matchesFilter.swift"; sourceTree = "<group>"; };
|
||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+toDictionary.swift"; sourceTree = "<group>"; };
|
||||
B887516E25E0102000DB86D6 /* MakeReactError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MakeReactError.swift; sourceTree = "<group>"; };
|
||||
B887516F25E0102000DB86D6 /* ReactLogger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactLogger.swift; sourceTree = "<group>"; };
|
||||
@ -154,6 +151,7 @@
|
||||
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
|
||||
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
|
||||
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessor.mm; sourceTree = "<group>"; };
|
||||
B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMVideoDimensions+toCGSize.swift"; sourceTree = "<group>"; };
|
||||
B8F7DDD1266F715D00120533 /* Frame.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Frame.m; sourceTree = "<group>"; };
|
||||
B8FF60AB2ACC93EF009D612F /* CameraView+CodeScanner.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+CodeScanner.swift"; sourceTree = "<group>"; };
|
||||
B8FF60AD2ACC9731009D612F /* CodeScanner.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CodeScanner.swift; sourceTree = "<group>"; };
|
||||
@ -228,18 +226,17 @@
|
||||
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */,
|
||||
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */,
|
||||
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */,
|
||||
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */,
|
||||
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */,
|
||||
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift */,
|
||||
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */,
|
||||
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */,
|
||||
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */,
|
||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */,
|
||||
B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */,
|
||||
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */,
|
||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
|
||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */,
|
||||
B887516225E0102000DB86D6 /* Collection+safe.swift */,
|
||||
B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */,
|
||||
B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */,
|
||||
);
|
||||
path = Extensions;
|
||||
sourceTree = "<group>";
|
||||
@ -400,7 +397,7 @@
|
||||
files = (
|
||||
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */,
|
||||
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift in Sources */,
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
|
||||
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
|
||||
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
|
||||
@ -420,7 +417,6 @@
|
||||
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
|
||||
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
|
||||
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
|
||||
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
|
||||
B8FF60B12ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift in Sources */,
|
||||
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
|
||||
@ -446,7 +442,6 @@
|
||||
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */,
|
||||
B8FF60AE2ACC9731009D612F /* CodeScanner.swift in Sources */,
|
||||
B8446E502ABA14C900E56077 /* CameraDevicesManager.m in Sources */,
|
||||
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */,
|
||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
|
||||
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
|
||||
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */,
|
||||
@ -454,6 +449,7 @@
|
||||
B8446E4D2ABA147C00E56077 /* CameraDevicesManager.swift in Sources */,
|
||||
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
|
||||
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
|
||||
B8F127D02ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift in Sources */,
|
||||
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */,
|
||||
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
|
||||
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
|
||||
|
@ -118,7 +118,9 @@ export interface CameraDeviceFormat {
|
||||
videoStabilizationModes: VideoStabilizationMode[]
|
||||
/**
|
||||
* Specifies this format's supported pixel-formats.
|
||||
* In most cases, this is `['native', 'yuv']`.
|
||||
* In most cases, this is `['native', 'yuv']`, some iPhones also support `'rgb'`.
|
||||
*
|
||||
* Any value represented here can be used to configure the Camera to stream Frames in the given {@linkcode PixelFormat}.
|
||||
*/
|
||||
pixelFormats: PixelFormat[]
|
||||
}
|
||||
|
@ -52,8 +52,6 @@ export interface CameraProps extends ViewProps {
|
||||
photo?: boolean
|
||||
/**
|
||||
* Enables **video capture** with the `startRecording` function (see ["Recording Videos"](https://react-native-vision-camera.com/docs/guides/recording-videos))
|
||||
*
|
||||
* Note: If both the `photo` and `video` properties are enabled at the same time and the device is running at a `hardwareLevel` of `'legacy'` or `'limited'`, VisionCamera _might_ use a lower resolution for video capture due to hardware constraints.
|
||||
*/
|
||||
video?: boolean
|
||||
/**
|
||||
@ -63,9 +61,14 @@ export interface CameraProps extends ViewProps {
|
||||
/**
|
||||
* Specifies the pixel format for the video pipeline.
|
||||
*
|
||||
* Frames from a [Frame Processor](https://react-native-vision-camera.com/docs/guides/frame-processors) will be streamed in the pixel format specified here.
|
||||
* Make sure the given {@linkcode format} supports the given {@linkcode pixelFormat} (see {@linkcode CameraDeviceFormat.pixelFormats format.pixelFormats}).
|
||||
*
|
||||
* While `native` and `yuv` are the most efficient formats, some ML models (such as MLKit Barcode detection) require input Frames to be in RGB colorspace, otherwise they just output nonsense.
|
||||
* Affects:
|
||||
* * {@linkcode frameProcessor}: The format of Frames from a [Frame Processor](https://react-native-vision-camera.com/docs/guides/frame-processors).
|
||||
* While `'native'` and `'yuv'` are the most efficient formats, some ML models (such as TensorFlow Face Detection Models) require input Frames to be in RGB colorspace, otherwise they just output nonsense.
|
||||
* * {@linkcode video}: The format of Frames streamed in the Video Pipeline. The format `'native'` is most efficient here.
|
||||
*
|
||||
* The following values are supported:
|
||||
*
|
||||
* - `native`: The hardware native GPU buffer format. This is the most efficient format. (`PRIVATE` on Android, sometimes YUV on iOS)
|
||||
* - `yuv`: The YUV (Y'CbCr 4:2:0 or NV21, 8-bit) format, either video- or full-range, depending on hardware capabilities. This is the second most efficient format.
|
||||
@ -80,7 +83,7 @@ export interface CameraProps extends ViewProps {
|
||||
/**
|
||||
* Set the current torch mode.
|
||||
*
|
||||
* Note: The torch is only available on `"back"` cameras, and isn't supported by every phone.
|
||||
* Make sure the given {@linkcode device} has a torch (see {@linkcode CameraDevice.hasTorch device.hasTorch}).
|
||||
*
|
||||
* @default "off"
|
||||
*/
|
||||
@ -100,7 +103,7 @@ export interface CameraProps extends ViewProps {
|
||||
/**
|
||||
* Enables or disables the native pinch to zoom gesture.
|
||||
*
|
||||
* If you want to implement a custom zoom gesture, see [the Zooming with Reanimated documentation](https://react-native-vision-camera.com/docs/guides/animated).
|
||||
* If you want to implement a custom zoom gesture, see [the Zooming with Reanimated documentation](https://react-native-vision-camera.com/docs/guides/zooming).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
@ -110,6 +113,15 @@ export interface CameraProps extends ViewProps {
|
||||
//#region Format/Preset selection
|
||||
/**
|
||||
* Selects a given format. By default, the best matching format is chosen.
|
||||
*
|
||||
* The format defines the possible values for properties like:
|
||||
* - {@linkcode fps}: {@linkcode CameraDeviceFormat.minFps format.minFps}...{@linkcode CameraDeviceFormat.maxFps format.maxFps}
|
||||
* - {@linkcode hdr}: {@linkcode CameraDeviceFormat.supportsVideoHDR format.supportsVideoHDR}
|
||||
* - {@linkcode pixelFormat}: {@linkcode CameraDeviceFormat.pixelFormats format.pixelFormats}
|
||||
* - {@linkcode enableDepthData}: {@linkcode CameraDeviceFormat.supportsDepthCapture format.supportsDepthCapture}
|
||||
* - {@linkcode videoStabilizationMode}: {@linkcode CameraDeviceFormat.videoStabilizationModes format.videoStabilizationModes}
|
||||
*
|
||||
* In other words; {@linkcode enableDepthData} can only be set to true if {@linkcode CameraDeviceFormat.supportsDepthCapture format.supportsDepthCapture} is true.
|
||||
*/
|
||||
format?: CameraDeviceFormat
|
||||
/**
|
||||
@ -121,15 +133,15 @@ export interface CameraProps extends ViewProps {
|
||||
*/
|
||||
resizeMode?: 'cover' | 'contain'
|
||||
/**
|
||||
* Specify the frames per second this camera should use. Make sure the given `format` includes a frame rate range with the given `fps`.
|
||||
* Specify the frames per second this camera should stream frames at.
|
||||
*
|
||||
* Requires `format` to be set that supports the given `fps`.
|
||||
* Make sure the given {@linkcode format} can stream at the target {@linkcode fps} value (see {@linkcode CameraDeviceFormat.minFps format.minFps} and {@linkcode CameraDeviceFormat.maxFps format.maxFps}).
|
||||
*/
|
||||
fps?: number
|
||||
/**
|
||||
* Enables or disables HDR on this camera device. Make sure the given `format` supports HDR mode.
|
||||
* Enables or disables HDR streaming.
|
||||
*
|
||||
* Requires `format` to be set that supports `photoHDR`/`videoHDR`.
|
||||
* Make sure the given {@linkcode format} supports HDR (see {@linkcode CameraDeviceFormat.supportsVideoHDR format.supportsVideoHDR}).
|
||||
*/
|
||||
hdr?: boolean
|
||||
/**
|
||||
@ -154,21 +166,23 @@ export interface CameraProps extends ViewProps {
|
||||
*/
|
||||
enableBufferCompression?: boolean
|
||||
/**
|
||||
* Enables or disables low-light boost on this camera device. Make sure the given `format` supports low-light boost.
|
||||
* Enables or disables low-light boost on this camera device.
|
||||
*
|
||||
* Requires a `format` to be set that supports `lowLightBoost`.
|
||||
* Make sure the given {@linkcode device} supports low-light-boost (see {@linkcode CameraDevice.supportsLowLightBoost device.supportsLowLightBoost}).
|
||||
*/
|
||||
lowLightBoost?: boolean
|
||||
/**
|
||||
* Specifies the video stabilization mode to use.
|
||||
*
|
||||
* Requires a `format` to be set that contains the given `videoStabilizationMode`.
|
||||
* Make sure the given {@linkcode format} supports the given {@linkcode videoStabilizationMode} (see {@linkcode CameraDeviceFormat.videoStabilizationModes format.videoStabilizationModes}).
|
||||
*/
|
||||
videoStabilizationMode?: VideoStabilizationMode
|
||||
//#endregion
|
||||
|
||||
/**
|
||||
* Also captures data from depth-perception sensors. (e.g. disparity maps)
|
||||
* Enables or disables depth data delivery for photo capture.
|
||||
*
|
||||
* Make sure the given {@linkcode format} supports depth data (see {@linkcode CameraDeviceFormat.supportsDepthCapture format.supportsDepthCapture}).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
|
Loading…
Reference in New Issue
Block a user