From 0e9f1ca64036041621fe390e62a86ef8c503c4e8 Mon Sep 17 00:00:00 2001 From: Marc Rousavy Date: Fri, 1 Sep 2023 15:07:16 +0200 Subject: [PATCH] perf: Improve `pixelFormat` and add `CameraDevice.sensorOrientation` (#1729) * feat: Orientation * fix orientation value in manifest * Update AndroidManifest.xml * Style * fix: Set MAX_IMAGES to 3 * Pass `isMirrored` to `VideoPipeline` * Update docs about Skia FPs * Options * Add iPad target * Remove UIDevice onOrientationChanged listener * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift * Get available pixelFormats on iOS * format * Update CameraSession.kt * Expose `CameraDevice.sensorOrientation` * Lock orientation again --- .../camera/core/CameraDeviceDetails.kt | 3 +++ .../com/mrousavy/camera/core/CameraSession.kt | 2 +- .../com/mrousavy/camera/core/VideoPipeline.kt | 9 +++---- .../camera/core/outputs/CameraOutputs.kt | 4 +++- .../CameraDevice+createPhotoCaptureRequest.kt | 18 +++++++------- docs/docs/guides/FRAME_PROCESSORS_SKIA.mdx | 10 ++++++++ example/ios/Podfile.lock | 4 ++-- .../project.pbxproj | 16 +++++++++++-- example/ios/VisionCameraExample/Info.plist | 24 +++++++++---------- example/src/CameraPage.tsx | 2 +- ios/CameraError.swift | 3 +++ ios/CameraView+AVCaptureSession.swift | 21 ++++++++++++---- ios/CameraView.swift | 20 ++++------------ ios/CameraViewManager.swift | 1 + .../AVCaptureDevice.Format+toDictionary.swift | 10 ++++---- src/CameraDevice.ts | 6 +++++ 16 files changed, 95 insertions(+), 58 deletions(-) diff --git a/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt b/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt index 7638d37..8fe83b0 100644 --- a/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt +++ b/android/src/main/java/com/mrousavy/camera/core/CameraDeviceDetails.kt @@ -17,6 +17,7 @@ import com.mrousavy.camera.extensions.getVideoSizes import com.mrousavy.camera.parsers.PixelFormat import com.mrousavy.camera.parsers.HardwareLevel import com.mrousavy.camera.parsers.LensFacing +import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.VideoStabilizationMode import kotlin.math.PI import kotlin.math.atan @@ -36,6 +37,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false private val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f /* 35mm default */) private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!! + private val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!! private val name = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION) else null) ?: "$lensFacing (${cameraId})" @@ -202,6 +204,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val map.putDouble("maxZoom", maxZoom) map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android map.putString("hardwareLevel", hardwareLevel.unionValue) + map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue) val array = Arguments.createArray() cameraConfig.outputFormats.forEach { f -> diff --git a/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt b/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt index a71e784..5238a26 100644 --- a/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt +++ b/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt @@ -56,7 +56,7 @@ class CameraSession(private val context: Context, private const val TAG = "CameraSession" // TODO: Samsung advertises 60 FPS but only allows 30 FPS for some reason. - private val CAN_SET_FPS = Build.MANUFACTURER != "samsung" + private val CAN_SET_FPS = !Build.MANUFACTURER.equals("samsung", true) } data class CapturedPhoto(val image: Image, diff --git a/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt b/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt index c0b65fb..e83a758 100644 --- a/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt +++ b/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt @@ -24,9 +24,10 @@ import java.io.Closeable @Suppress("KotlinJniMissingFunction") class VideoPipeline(val width: Int, val height: Int, - val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable { + val format: Int = ImageFormat.PRIVATE, + private val isMirrored: Boolean = false): SurfaceTexture.OnFrameAvailableListener, Closeable { companion object { - private const val MAX_IMAGES = 5 + private const val MAX_IMAGES = 3 private const val TAG = "VideoPipeline" } @@ -98,7 +99,7 @@ class VideoPipeline(val width: Int, val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener // TODO: Get correct orientation and isMirrored - val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false) + val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, isMirrored) frame.incrementRefCount() frameProcessor?.call(frame) frame.decrementRefCount() @@ -110,7 +111,7 @@ class VideoPipeline(val width: Int, * Configures the Pipeline to also call the given [FrameProcessor]. * * If the [frameProcessor] is `null`, this output channel will be removed. * * If the [frameProcessor] is not `null`, the [VideoPipeline] will create Frames - * using an [ImageWriter] and call the [FrameProcessor] with those Frames. + * using an [ImageWriter] and call the [FrameProcessor] with those Frames. */ fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) { synchronized(this) { diff --git a/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt b/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt index 0a0b0b3..f6eed2f 100644 --- a/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt +++ b/android/src/main/java/com/mrousavy/camera/core/outputs/CameraOutputs.kt @@ -1,6 +1,7 @@ package com.mrousavy.camera.core.outputs import android.graphics.ImageFormat +import android.hardware.camera2.CameraCharacteristics import android.hardware.camera2.CameraManager import android.media.Image import android.media.ImageReader @@ -91,6 +92,7 @@ class CameraOutputs(val cameraId: String, init { val characteristics = cameraManager.getCameraCharacteristics(cameraId) + val isMirrored = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT Log.i(TAG, "Preparing Outputs for Camera $cameraId...") @@ -117,7 +119,7 @@ class CameraOutputs(val cameraId: String, // Video output: High resolution repeating images (startRecording() or useFrameProcessor()) if (video != null) { val size = characteristics.getVideoSizes(cameraId, video.format).closestToOrMax(video.targetSize) - val videoPipeline = VideoPipeline(size.width, size.height, video.format) + val videoPipeline = VideoPipeline(size.width, size.height, video.format, isMirrored) Log.i(TAG, "Adding ${size.width}x${size.height} video output. (Format: ${video.format})") videoOutput = VideoPipelineOutput(videoPipeline, SurfaceOutput.OutputType.VIDEO) diff --git a/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt b/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt index 873f512..4331a64 100644 --- a/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt +++ b/android/src/main/java/com/mrousavy/camera/extensions/CameraDevice+createPhotoCaptureRequest.kt @@ -46,23 +46,23 @@ fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager, QualityPrioritization.BALANCED -> 92 QualityPrioritization.QUALITY -> 100 } - captureRequest[CaptureRequest.JPEG_QUALITY] = jpegQuality.toByte() + captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte()) captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees()) when (flashMode) { // Set the Flash Mode Flash.OFF -> { - captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON + captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON) } Flash.ON -> { - captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH + captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH) } Flash.AUTO -> { if (enableRedEyeReduction) { - captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE + captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) } else { - captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH + captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH) } } } @@ -75,17 +75,17 @@ fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager, val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false if (hasOpticalStabilization) { - captureRequest[CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE] = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF - captureRequest[CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE] = CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON + captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF) + captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) } else if (hasDigitalStabilization) { - captureRequest[CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE] = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON + captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) } else { // no stabilization is supported. ignore it } } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { - captureRequest[CaptureRequest.CONTROL_ZOOM_RATIO] = zoom + captureRequest.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoom) } else { val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!! captureRequest.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoom)) diff --git a/docs/docs/guides/FRAME_PROCESSORS_SKIA.mdx b/docs/docs/guides/FRAME_PROCESSORS_SKIA.mdx index 619989e..1cea05f 100644 --- a/docs/docs/guides/FRAME_PROCESSORS_SKIA.mdx +++ b/docs/docs/guides/FRAME_PROCESSORS_SKIA.mdx @@ -19,6 +19,16 @@ import useBaseUrl from '@docusaurus/useBaseUrl'; Skia Frame Processors are [Frame Processors](frame-processors) that allow you to draw onto the Frame using [react-native-skia](https://github.com/Shopify/react-native-skia). +Skia Frame Processors were introduced in VisionCamera V3 RC.0, but were removed again after VisionCamera V3 RC.9 due to the significantly increased complexity of the video pipeline in the codebase. + +``` +yarn add react-native-vision-camera@rc.9 +``` + +They worked perfectly fine for those RCs with some minor inconsistencies (landscape orientation didn't work on Android), which proves the concept. If you want to learn more about Skia Frame Processors, we at [Margelo](https://margelo.io) can build a custom solution for your company to implement drawable Frame Processors (e.g. filters, blurring, masks, colors, etc). See [PR #1740](https://github.com/mrousavy/react-native-vision-camera/pull/1740) for more details. + +### Documentation + For example, you might want to draw a rectangle around a user's face **without writing any native code**, while still **achieving native performance**: ```jsx diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index baea4e0..3deb873 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -501,7 +501,7 @@ PODS: - libwebp (~> 1.0) - SDWebImage/Core (~> 5.10) - SocketRocket (0.6.1) - - VisionCamera (3.0.0-rc.8): + - VisionCamera (3.0.0-rc.9): - React - React-callinvoker - React-Core @@ -733,7 +733,7 @@ SPEC CHECKSUMS: SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 - VisionCamera: 5bd7961602a7db4de21fdc3588df6ce01d693d37 + VisionCamera: 77e12500568c495e71914aacf52ccd7b9d3c5478 Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c diff --git a/example/ios/VisionCameraExample.xcodeproj/project.pbxproj b/example/ios/VisionCameraExample.xcodeproj/project.pbxproj index 8008784..c706663 100644 --- a/example/ios/VisionCameraExample.xcodeproj/project.pbxproj +++ b/example/ios/VisionCameraExample.xcodeproj/project.pbxproj @@ -11,7 +11,7 @@ 13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; }; 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; }; 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; }; - B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */ = {isa = PBXBuildFile; }; + B8DB3BD5263DE8B7004C18D7 /* (null) in Sources */ = {isa = PBXBuildFile; }; B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */; }; B8F0E10825E0199F00586F16 /* File.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F0E10725E0199F00586F16 /* File.swift */; }; C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */; }; @@ -375,7 +375,7 @@ files = ( 13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */, B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */, - B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */, + B8DB3BD5263DE8B7004C18D7 /* (null) in Sources */, B8F0E10825E0199F00586F16 /* File.swift in Sources */, 13B07FC11A68108700A75B9A /* main.m in Sources */, ); @@ -394,6 +394,8 @@ DEVELOPMENT_TEAM = CJW62Q77E7; ENABLE_BITCODE = NO; INFOPLIST_FILE = VisionCameraExample/Info.plist; + INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera"; + INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; OTHER_LDFLAGS = ( "$(inherited)", @@ -402,9 +404,13 @@ ); PRODUCT_BUNDLE_IDENTIFIER = com.mrousavy.camera.example; PRODUCT_NAME = VisionCameraExample; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES; SWIFT_OBJC_BRIDGING_HEADER = "VisionCameraExample-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.2; + TARGETED_DEVICE_FAMILY = "1,2"; VERSIONING_SYSTEM = "apple-generic"; }; name = Debug; @@ -418,6 +424,8 @@ CURRENT_PROJECT_VERSION = 1; DEVELOPMENT_TEAM = CJW62Q77E7; INFOPLIST_FILE = VisionCameraExample/Info.plist; + INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera"; + INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; OTHER_LDFLAGS = ( "$(inherited)", @@ -426,8 +434,12 @@ ); PRODUCT_BUNDLE_IDENTIFIER = com.mrousavy.camera.example; PRODUCT_NAME = VisionCameraExample; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES; SWIFT_OBJC_BRIDGING_HEADER = "VisionCameraExample-Bridging-Header.h"; SWIFT_VERSION = 5.2; + TARGETED_DEVICE_FAMILY = "1,2"; VERSIONING_SYSTEM = "apple-generic"; }; name = Release; diff --git a/example/ios/VisionCameraExample/Info.plist b/example/ios/VisionCameraExample/Info.plist index c25da90..305cbdf 100644 --- a/example/ios/VisionCameraExample/Info.plist +++ b/example/ios/VisionCameraExample/Info.plist @@ -35,10 +35,21 @@ + NSCameraUsageDescription + VisionCamera needs access to your Camera for very obvious reasons. NSLocationWhenInUseUsageDescription + NSMicrophoneUsageDescription + VisionCamera needs access to your Microphone to record videos with audio. + NSPhotoLibraryUsageDescription + VisionCamera needs access to your photo library to save captured videos and photos. + UIAppFonts + + Ionicons.ttf + MaterialCommunityIcons.ttf + UILaunchStoryboardName - LaunchScreen + LaunchScreen.storyboard UIRequiredDeviceCapabilities armv7 @@ -49,16 +60,5 @@ UIViewControllerBasedStatusBarAppearance - NSCameraUsageDescription - VisionCamera needs access to your Camera for very obvious reasons. - NSMicrophoneUsageDescription - VisionCamera needs access to your Microphone to record videos with audio. - NSPhotoLibraryUsageDescription - VisionCamera needs access to your photo library to save captured videos and photos. - UIAppFonts - - Ionicons.ttf - MaterialCommunityIcons.ttf - diff --git a/example/src/CameraPage.tsx b/example/src/CameraPage.tsx index b9727e8..95c19be 100644 --- a/example/src/CameraPage.tsx +++ b/example/src/CameraPage.tsx @@ -199,7 +199,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement { const frameProcessor = useFrameProcessor((frame) => { 'worklet'; - console.log(frame.timestamp, frame.toString(), frame.pixelFormat); + console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`); examplePlugin(frame); }, []); diff --git a/ios/CameraError.swift b/ios/CameraError.swift index 8fc6be3..09ddb9b 100644 --- a/ios/CameraError.swift +++ b/ios/CameraError.swift @@ -79,6 +79,7 @@ enum DeviceError: String { case lowLightBoostNotSupported = "low-light-boost-not-supported" case focusNotSupported = "focus-not-supported" case notAvailableOnSimulator = "camera-not-available-on-simulator" + case pixelFormatNotSupported = "pixel-format-not-supported" var code: String { return rawValue @@ -102,6 +103,8 @@ enum DeviceError: String { return "The microphone was unavailable." case .notAvailableOnSimulator: return "The Camera is not available on the iOS Simulator!" + case .pixelFormatNotSupported: + return "The given pixelFormat is not supported on the given Camera Device!" } } } diff --git a/ios/CameraView+AVCaptureSession.swift b/ios/CameraView+AVCaptureSession.swift index 0743ca7..86ca441 100644 --- a/ios/CameraView+AVCaptureSession.swift +++ b/ios/CameraView+AVCaptureSession.swift @@ -116,13 +116,24 @@ extension CameraView { videoOutput!.alwaysDiscardsLateVideoFrames = false if let pixelFormat = pixelFormat as? String { - let defaultFormat = CMFormatDescriptionGetMediaSubType(videoDeviceInput!.device.activeFormat.formatDescription) + let supportedPixelFormats = videoOutput!.availableVideoPixelFormatTypes + let defaultFormat = supportedPixelFormats.first! // first value is always the most efficient format var pixelFormatType: OSType = defaultFormat switch pixelFormat { case "yuv": - pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + } else if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) { + pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + } else { + invokeOnError(.device(.pixelFormatNotSupported)) + } case "rgb": - pixelFormatType = kCVPixelFormatType_32BGRA + if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + pixelFormatType = kCVPixelFormatType_32BGRA + } else { + invokeOnError(.device(.pixelFormatNotSupported)) + } case "native": pixelFormatType = defaultFormat default: @@ -135,7 +146,9 @@ extension CameraView { captureSession.addOutput(videoOutput!) } - onOrientationChanged() + if outputOrientation != .portrait { + updateOrientation() + } invokeOnInitialized() isReady = true diff --git a/ios/CameraView.swift b/ios/CameraView.swift index 8a46f43..f4cc38b 100644 --- a/ios/CameraView.swift +++ b/ios/CameraView.swift @@ -107,8 +107,11 @@ public final class CameraView: UIView { // pragma MARK: Setup override public init(frame: CGRect) { + previewView = PreviewView(frame: frame, session: captureSession) super.init(frame: frame) + addSubview(previewView) + NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError), name: .AVCaptureSessionRuntimeError, @@ -121,13 +124,6 @@ public final class CameraView: UIView { selector: #selector(audioSessionInterrupted), name: AVAudioSession.interruptionNotification, object: AVAudioSession.sharedInstance) - NotificationCenter.default.addObserver(self, - selector: #selector(onOrientationChanged), - name: UIDevice.orientationDidChangeNotification, - object: nil) - - previewView = PreviewView(frame: frame, session: captureSession) - addSubview(previewView) } @available(*, unavailable) @@ -145,9 +141,6 @@ public final class CameraView: UIView { NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: AVAudioSession.sharedInstance) - NotificationCenter.default.removeObserver(self, - name: UIDevice.orientationDidChangeNotification, - object: nil) } override public func willMove(toSuperview newSuperview: UIView?) { @@ -250,7 +243,7 @@ public final class CameraView: UIView { } } } - + func setupFpsGraph() { #if DEBUG if enableFpsGraph { @@ -265,11 +258,6 @@ public final class CameraView: UIView { #endif } - @objc - func onOrientationChanged() { - updateOrientation() - } - // pragma MARK: Event Invokers final func invokeOnError(_ error: CameraError, cause: NSError? = nil) { ReactLogger.log(level: .error, message: "Invoking onError(): \(error.message)") diff --git a/ios/CameraViewManager.swift b/ios/CameraViewManager.swift index 718da3c..9acf1cf 100644 --- a/ios/CameraViewManager.swift +++ b/ios/CameraViewManager.swift @@ -102,6 +102,7 @@ final class CameraViewManager: RCTViewManager { "supportsLowLightBoost": $0.isLowLightBoostSupported, "supportsFocus": $0.isFocusPointOfInterestSupported, "hardwareLevel": "full", + "sensorOrientation": "portrait", // TODO: Sensor Orientation? "formats": $0.formats.map { format -> [String: Any] in format.toDictionary() }, diff --git a/ios/Extensions/AVCaptureDevice.Format+toDictionary.swift b/ios/Extensions/AVCaptureDevice.Format+toDictionary.swift index 7c8f43c..f34f3ff 100644 --- a/ios/Extensions/AVCaptureDevice.Format+toDictionary.swift +++ b/ios/Extensions/AVCaptureDevice.Format+toDictionary.swift @@ -36,10 +36,10 @@ extension AVCaptureDevice.Format { } func toDictionary() -> [String: Any] { - let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription) - let pixelFormat = PixelFormat(mediaSubType: mediaSubType) + let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes + let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) } - var dict: [String: Any] = [ + return [ "videoStabilizationModes": videoStabilizationModes.map(\.descriptor), "autoFocusSystem": autoFocusSystem.descriptor, "photoHeight": highResolutionStillImageDimensions.height, @@ -54,9 +54,7 @@ extension AVCaptureDevice.Format { "supportsPhotoHDR": false, "minFps": minFrameRate, "maxFps": maxFrameRate, - "pixelFormats": [pixelFormat.unionValue], + "pixelFormats": pixelFormats.map(\.unionValue), ] - - return dict } } diff --git a/src/CameraDevice.ts b/src/CameraDevice.ts index c593813..1a210e8 100644 --- a/src/CameraDevice.ts +++ b/src/CameraDevice.ts @@ -1,4 +1,5 @@ import type { CameraPosition } from './CameraPosition'; +import { Orientation } from './Orientation'; import type { PixelFormat } from './PixelFormat'; /** @@ -226,4 +227,9 @@ export interface CameraDevice { * - On iOS, all devices are `full`. */ hardwareLevel: 'legacy' | 'limited' | 'full'; + /** + * Represents the sensor's orientation relative to the phone. + * For most phones this will be landscape, as Camera sensors are usually always rotated by 90 degrees (i.e. width and height are flipped). + */ + sensorOrientation: Orientation; }