perf: Improve pixelFormat and add CameraDevice.sensorOrientation (#1729)

* feat: Orientation

* fix orientation value in manifest

* Update AndroidManifest.xml

* Style

* fix: Set MAX_IMAGES to 3

* Pass `isMirrored` to `VideoPipeline`

* Update docs about Skia FPs

* Options

* Add iPad target

* Remove UIDevice onOrientationChanged listener

* Update CameraView+AVCaptureSession.swift

* Update CameraView+AVCaptureSession.swift

* Update CameraView+AVCaptureSession.swift

* Get available pixelFormats on iOS

* format

* Update CameraSession.kt

* Expose `CameraDevice.sensorOrientation`

* Lock orientation again
This commit is contained in:
Marc Rousavy 2023-09-01 15:07:16 +02:00 committed by GitHub
parent 01a79d63ef
commit 0e9f1ca640
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 95 additions and 58 deletions

View File

@ -17,6 +17,7 @@ import com.mrousavy.camera.extensions.getVideoSizes
import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.HardwareLevel
import com.mrousavy.camera.parsers.LensFacing
import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.VideoStabilizationMode
import kotlin.math.PI
import kotlin.math.atan
@ -36,6 +37,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false
private val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: floatArrayOf(35f /* 35mm default */)
private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
private val sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION)!!
private val name = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION)
else null) ?: "$lensFacing (${cameraId})"
@ -202,6 +204,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
map.putDouble("maxZoom", maxZoom)
map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android
map.putString("hardwareLevel", hardwareLevel.unionValue)
map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue)
val array = Arguments.createArray()
cameraConfig.outputFormats.forEach { f ->

View File

@ -56,7 +56,7 @@ class CameraSession(private val context: Context,
private const val TAG = "CameraSession"
// TODO: Samsung advertises 60 FPS but only allows 30 FPS for some reason.
private val CAN_SET_FPS = Build.MANUFACTURER != "samsung"
private val CAN_SET_FPS = !Build.MANUFACTURER.equals("samsung", true)
}
data class CapturedPhoto(val image: Image,

View File

@ -24,9 +24,10 @@ import java.io.Closeable
@Suppress("KotlinJniMissingFunction")
class VideoPipeline(val width: Int,
val height: Int,
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
val format: Int = ImageFormat.PRIVATE,
private val isMirrored: Boolean = false): SurfaceTexture.OnFrameAvailableListener, Closeable {
companion object {
private const val MAX_IMAGES = 5
private const val MAX_IMAGES = 3
private const val TAG = "VideoPipeline"
}
@ -98,7 +99,7 @@ class VideoPipeline(val width: Int,
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, isMirrored)
frame.incrementRefCount()
frameProcessor?.call(frame)
frame.decrementRefCount()
@ -110,7 +111,7 @@ class VideoPipeline(val width: Int,
* Configures the Pipeline to also call the given [FrameProcessor].
* * If the [frameProcessor] is `null`, this output channel will be removed.
* * If the [frameProcessor] is not `null`, the [VideoPipeline] will create Frames
* using an [ImageWriter] and call the [FrameProcessor] with those Frames.
* using an [ImageWriter] and call the [FrameProcessor] with those Frames.
*/
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
synchronized(this) {

View File

@ -1,6 +1,7 @@
package com.mrousavy.camera.core.outputs
import android.graphics.ImageFormat
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraManager
import android.media.Image
import android.media.ImageReader
@ -91,6 +92,7 @@ class CameraOutputs(val cameraId: String,
init {
val characteristics = cameraManager.getCameraCharacteristics(cameraId)
val isMirrored = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT
Log.i(TAG, "Preparing Outputs for Camera $cameraId...")
@ -117,7 +119,7 @@ class CameraOutputs(val cameraId: String,
// Video output: High resolution repeating images (startRecording() or useFrameProcessor())
if (video != null) {
val size = characteristics.getVideoSizes(cameraId, video.format).closestToOrMax(video.targetSize)
val videoPipeline = VideoPipeline(size.width, size.height, video.format)
val videoPipeline = VideoPipeline(size.width, size.height, video.format, isMirrored)
Log.i(TAG, "Adding ${size.width}x${size.height} video output. (Format: ${video.format})")
videoOutput = VideoPipelineOutput(videoPipeline, SurfaceOutput.OutputType.VIDEO)

View File

@ -46,23 +46,23 @@ fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager,
QualityPrioritization.BALANCED -> 92
QualityPrioritization.QUALITY -> 100
}
captureRequest[CaptureRequest.JPEG_QUALITY] = jpegQuality.toByte()
captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte())
captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees())
when (flashMode) {
// Set the Flash Mode
Flash.OFF -> {
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
}
Flash.ON -> {
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH)
}
Flash.AUTO -> {
if (enableRedEyeReduction) {
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE)
} else {
captureRequest[CaptureRequest.CONTROL_AE_MODE] = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)
}
}
}
@ -75,17 +75,17 @@ fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager,
val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)
val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false
if (hasOpticalStabilization) {
captureRequest[CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE] = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF
captureRequest[CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE] = CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON
captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF)
captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON)
} else if (hasDigitalStabilization) {
captureRequest[CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE] = CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON
captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON)
} else {
// no stabilization is supported. ignore it
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
captureRequest[CaptureRequest.CONTROL_ZOOM_RATIO] = zoom
captureRequest.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoom)
} else {
val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!!
captureRequest.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoom))

View File

@ -19,6 +19,16 @@ import useBaseUrl from '@docusaurus/useBaseUrl';
Skia Frame Processors are [Frame Processors](frame-processors) that allow you to draw onto the Frame using [react-native-skia](https://github.com/Shopify/react-native-skia).
Skia Frame Processors were introduced in VisionCamera V3 RC.0, but were removed again after VisionCamera V3 RC.9 due to the significantly increased complexity of the video pipeline in the codebase.
```
yarn add react-native-vision-camera@rc.9
```
They worked perfectly fine for those RCs with some minor inconsistencies (landscape orientation didn't work on Android), which proves the concept. If you want to learn more about Skia Frame Processors, we at [Margelo](https://margelo.io) can build a custom solution for your company to implement drawable Frame Processors (e.g. filters, blurring, masks, colors, etc). See [PR #1740](https://github.com/mrousavy/react-native-vision-camera/pull/1740) for more details.
### Documentation
For example, you might want to draw a rectangle around a user's face **without writing any native code**, while still **achieving native performance**:
```jsx

View File

@ -501,7 +501,7 @@ PODS:
- libwebp (~> 1.0)
- SDWebImage/Core (~> 5.10)
- SocketRocket (0.6.1)
- VisionCamera (3.0.0-rc.8):
- VisionCamera (3.0.0-rc.9):
- React
- React-callinvoker
- React-Core
@ -733,7 +733,7 @@ SPEC CHECKSUMS:
SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d
SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
VisionCamera: 5bd7961602a7db4de21fdc3588df6ce01d693d37
VisionCamera: 77e12500568c495e71914aacf52ccd7b9d3c5478
Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce
PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c

View File

@ -11,7 +11,7 @@
13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; };
13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; };
B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */ = {isa = PBXBuildFile; };
B8DB3BD5263DE8B7004C18D7 /* (null) in Sources */ = {isa = PBXBuildFile; };
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BD8263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m */; };
B8F0E10825E0199F00586F16 /* File.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F0E10725E0199F00586F16 /* File.swift */; };
C0B129659921D2EA967280B2 /* libPods-VisionCameraExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 3CDCFE89C25C89320B98945E /* libPods-VisionCameraExample.a */; };
@ -375,7 +375,7 @@
files = (
13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */,
B8DB3BDC263DEA31004C18D7 /* ExampleFrameProcessorPlugin.m in Sources */,
B8DB3BD5263DE8B7004C18D7 /* BuildFile in Sources */,
B8DB3BD5263DE8B7004C18D7 /* (null) in Sources */,
B8F0E10825E0199F00586F16 /* File.swift in Sources */,
13B07FC11A68108700A75B9A /* main.m in Sources */,
);
@ -394,6 +394,8 @@
DEVELOPMENT_TEAM = CJW62Q77E7;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = VisionCameraExample/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
OTHER_LDFLAGS = (
"$(inherited)",
@ -402,9 +404,13 @@
);
PRODUCT_BUNDLE_IDENTIFIER = com.mrousavy.camera.example;
PRODUCT_NAME = VisionCameraExample;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
SWIFT_OBJC_BRIDGING_HEADER = "VisionCameraExample-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
};
name = Debug;
@ -418,6 +424,8 @@
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = CJW62Q77E7;
INFOPLIST_FILE = VisionCameraExample/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
OTHER_LDFLAGS = (
"$(inherited)",
@ -426,8 +434,12 @@
);
PRODUCT_BUNDLE_IDENTIFIER = com.mrousavy.camera.example;
PRODUCT_NAME = VisionCameraExample;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
SWIFT_OBJC_BRIDGING_HEADER = "VisionCameraExample-Bridging-Header.h";
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
};
name = Release;

View File

@ -35,10 +35,21 @@
</dict>
</dict>
</dict>
<key>NSCameraUsageDescription</key>
<string>VisionCamera needs access to your Camera for very obvious reasons.</string>
<key>NSLocationWhenInUseUsageDescription</key>
<string></string>
<key>NSMicrophoneUsageDescription</key>
<string>VisionCamera needs access to your Microphone to record videos with audio.</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>VisionCamera needs access to your photo library to save captured videos and photos.</string>
<key>UIAppFonts</key>
<array>
<string>Ionicons.ttf</string>
<string>MaterialCommunityIcons.ttf</string>
</array>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<string>LaunchScreen.storyboard</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
@ -49,16 +60,5 @@
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>VisionCamera needs access to your Camera for very obvious reasons.</string>
<key>NSMicrophoneUsageDescription</key>
<string>VisionCamera needs access to your Microphone to record videos with audio.</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>VisionCamera needs access to your photo library to save captured videos and photos.</string>
<key>UIAppFonts</key>
<array>
<string>Ionicons.ttf</string>
<string>MaterialCommunityIcons.ttf</string>
</array>
</dict>
</plist>

View File

@ -199,7 +199,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
const frameProcessor = useFrameProcessor((frame) => {
'worklet';
console.log(frame.timestamp, frame.toString(), frame.pixelFormat);
console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`);
examplePlugin(frame);
}, []);

View File

@ -79,6 +79,7 @@ enum DeviceError: String {
case lowLightBoostNotSupported = "low-light-boost-not-supported"
case focusNotSupported = "focus-not-supported"
case notAvailableOnSimulator = "camera-not-available-on-simulator"
case pixelFormatNotSupported = "pixel-format-not-supported"
var code: String {
return rawValue
@ -102,6 +103,8 @@ enum DeviceError: String {
return "The microphone was unavailable."
case .notAvailableOnSimulator:
return "The Camera is not available on the iOS Simulator!"
case .pixelFormatNotSupported:
return "The given pixelFormat is not supported on the given Camera Device!"
}
}
}

View File

@ -116,13 +116,24 @@ extension CameraView {
videoOutput!.alwaysDiscardsLateVideoFrames = false
if let pixelFormat = pixelFormat as? String {
let defaultFormat = CMFormatDescriptionGetMediaSubType(videoDeviceInput!.device.activeFormat.formatDescription)
let supportedPixelFormats = videoOutput!.availableVideoPixelFormatTypes
let defaultFormat = supportedPixelFormats.first! // first value is always the most efficient format
var pixelFormatType: OSType = defaultFormat
switch pixelFormat {
case "yuv":
pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
} else if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
} else {
invokeOnError(.device(.pixelFormatNotSupported))
}
case "rgb":
pixelFormatType = kCVPixelFormatType_32BGRA
if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
pixelFormatType = kCVPixelFormatType_32BGRA
} else {
invokeOnError(.device(.pixelFormatNotSupported))
}
case "native":
pixelFormatType = defaultFormat
default:
@ -135,7 +146,9 @@ extension CameraView {
captureSession.addOutput(videoOutput!)
}
onOrientationChanged()
if outputOrientation != .portrait {
updateOrientation()
}
invokeOnInitialized()
isReady = true

View File

@ -107,8 +107,11 @@ public final class CameraView: UIView {
// pragma MARK: Setup
override public init(frame: CGRect) {
previewView = PreviewView(frame: frame, session: captureSession)
super.init(frame: frame)
addSubview(previewView)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
@ -121,13 +124,6 @@ public final class CameraView: UIView {
selector: #selector(audioSessionInterrupted),
name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance)
NotificationCenter.default.addObserver(self,
selector: #selector(onOrientationChanged),
name: UIDevice.orientationDidChangeNotification,
object: nil)
previewView = PreviewView(frame: frame, session: captureSession)
addSubview(previewView)
}
@available(*, unavailable)
@ -145,9 +141,6 @@ public final class CameraView: UIView {
NotificationCenter.default.removeObserver(self,
name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance)
NotificationCenter.default.removeObserver(self,
name: UIDevice.orientationDidChangeNotification,
object: nil)
}
override public func willMove(toSuperview newSuperview: UIView?) {
@ -265,11 +258,6 @@ public final class CameraView: UIView {
#endif
}
@objc
func onOrientationChanged() {
updateOrientation()
}
// pragma MARK: Event Invokers
final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: "Invoking onError(): \(error.message)")

View File

@ -102,6 +102,7 @@ final class CameraViewManager: RCTViewManager {
"supportsLowLightBoost": $0.isLowLightBoostSupported,
"supportsFocus": $0.isFocusPointOfInterestSupported,
"hardwareLevel": "full",
"sensorOrientation": "portrait", // TODO: Sensor Orientation?
"formats": $0.formats.map { format -> [String: Any] in
format.toDictionary()
},

View File

@ -36,10 +36,10 @@ extension AVCaptureDevice.Format {
}
func toDictionary() -> [String: Any] {
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
let pixelFormat = PixelFormat(mediaSubType: mediaSubType)
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
var dict: [String: Any] = [
return [
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
"autoFocusSystem": autoFocusSystem.descriptor,
"photoHeight": highResolutionStillImageDimensions.height,
@ -54,9 +54,7 @@ extension AVCaptureDevice.Format {
"supportsPhotoHDR": false,
"minFps": minFrameRate,
"maxFps": maxFrameRate,
"pixelFormats": [pixelFormat.unionValue],
"pixelFormats": pixelFormats.map(\.unionValue),
]
return dict
}
}

View File

@ -1,4 +1,5 @@
import type { CameraPosition } from './CameraPosition';
import { Orientation } from './Orientation';
import type { PixelFormat } from './PixelFormat';
/**
@ -226,4 +227,9 @@ export interface CameraDevice {
* - On iOS, all devices are `full`.
*/
hardwareLevel: 'legacy' | 'limited' | 'full';
/**
* Represents the sensor's orientation relative to the phone.
* For most phones this will be landscape, as Camera sensors are usually always rotated by 90 degrees (i.e. width and height are flipped).
*/
sensorOrientation: Orientation;
}