feat: Correctly get videoDimensions on devices below iOS 13

This commit is contained in:
Marc Rousavy 2021-08-03 10:37:48 +02:00
parent aaecb90937
commit c078cdf933
6 changed files with 50 additions and 26 deletions

View File

@ -322,7 +322,7 @@ PODS:
- React
- RNVectorIcons (8.1.0):
- React-Core
- VisionCamera (2.4.2-beta.11):
- VisionCamera (2.4.2-beta.12):
- React
- React-callinvoker
- React-Core
@ -490,7 +490,7 @@ SPEC CHECKSUMS:
RNReanimated: daebbd404c0cd9df6daa248d63dd940086bea9ff
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
RNVectorIcons: 31cebfcf94e8cf8686eb5303ae0357da64d7a5a4
VisionCamera: 35bd0ff9611655b0e5506c1721da45e006f3398e
VisionCamera: ed40e8380f2cd139ccf78bfbf9e5a98e91781033
Yoga: 575c581c63e0d35c9a83f4b46d01d63abc1100ac
PODFILE CHECKSUM: 4b093c1d474775c2eac3268011e4b0b80929d3a2

View File

@ -22,14 +22,12 @@ extension AVCaptureDevice.Format {
return true
}
if #available(iOS 13.0, *) {
// compare video dimensions
let leftVideo = self.formatDescription.presentationDimensions()
let rightVideo = other.formatDescription.presentationDimensions()
let leftVideo = self.videoDimensions
let rightVideo = other.videoDimensions
if leftVideo.height * leftVideo.width > rightVideo.height * rightVideo.width {
return true
}
}
// compare max fps
if let leftMaxFps = videoSupportedFrameRateRanges.max(by: { $0.maxFrameRate > $1.maxFrameRate }),

View File

@ -24,24 +24,16 @@ extension AVCaptureDevice.Format {
return false
}
}
if #available(iOS 13.0, *) {
if let videoHeight = filter.value(forKey: "videoHeight") as? NSNumber {
if self.formatDescription.presentationDimensions().height != CGFloat(videoHeight.doubleValue) {
if videoDimensions.height != CGFloat(videoHeight.doubleValue) {
return false
}
}
if let videoWidth = filter.value(forKey: "videoWidth") as? NSNumber {
if self.formatDescription.presentationDimensions().width != CGFloat(videoWidth.doubleValue) {
if videoDimensions.width != CGFloat(videoWidth.doubleValue) {
return false
}
}
if let isHighestPhotoQualitySupported = filter.value(forKey: "isHighestPhotoQualitySupported") as? Bool {
if self.isHighestPhotoQualitySupported != isHighestPhotoQualitySupported {
return false
}
}
}
if let maxISO = filter.value(forKey: "maxISO") as? NSNumber {
if self.maxISO != maxISO.floatValue {
return false
@ -98,6 +90,14 @@ extension AVCaptureDevice.Format {
}
}
if #available(iOS 13.0, *) {
if let isHighestPhotoQualitySupported = filter.value(forKey: "isHighestPhotoQualitySupported") as? Bool {
if self.isHighestPhotoQualitySupported != isHighestPhotoQualitySupported {
return false
}
}
}
return true
}
}

View File

@ -22,8 +22,6 @@ extension AVCaptureDevice.Format {
}
func toDictionary() -> [String: Any] {
let videoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
var dict: [String: Any] = [
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
"autoFocusSystem": autoFocusSystem.descriptor,

View File

@ -0,0 +1,24 @@
//
// AVCaptureDevice.Format+videoDimensions.swift
// VisionCamera
//
// Created by Marc Rousavy on 03.08.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import Foundation
import AVFoundation
extension AVCaptureDevice.Format {
/**
* Returns the video dimensions, adjusted to take pixel aspect ratio and/or clean
* aperture into account.
*
* Pixel aspect ratio is used to adjust the width, leaving the height alone.
*/
var videoDimensions: CGSize {
return CMVideoFormatDescriptionGetPresentationDimensions(formatDescription,
usePixelAspectRatio: true,
useCleanAperture: true)
}
}

View File

@ -10,6 +10,7 @@
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.mm in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */; };
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
@ -84,6 +85,7 @@
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorUtils.mm; sourceTree = "<group>"; };
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorUtils.h; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = "<group>"; };
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = "<group>"; };
@ -197,6 +199,7 @@
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */,
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */,
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */,
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */,
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */,
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */,
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */,
@ -369,6 +372,7 @@
files = (
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */,
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,