feat: Custom Orientation (#715)

* feat: Custom Orientation

* Update CameraView.swift

* Update CameraView.swift

* Try outputRotation approach

* whoops

* fix: Refactor `VideoCapture` instance

* Update orientation in didSetProps

* Update Orientation in iOS

* expose to objc

* Fix Orientation values

* format
This commit is contained in:
Marc Rousavy 2022-01-04 16:57:40 +01:00 committed by GitHub
parent dbfdf82c67
commit 48da1819fc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 164 additions and 51 deletions

View File

@ -41,8 +41,8 @@ fun CameraView.startRecording(options: ReadableMap, onRecordCallback: Callback)
val file = File.createTempFile("VisionCamera-${id}", ".mp4")
val fileOptions = FileOutputOptions.Builder(file).build()
var recording = videoCapture!!
.prepareRecording(context, fileOptions)
val recorder = videoCapture!!.output
var recording = recorder.prepareRecording(context, fileOptions)
if (audio == true) {
@SuppressLint("MissingPermission")

View File

@ -97,6 +97,7 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
var isActive = false
var torch = "off"
var zoom: Float = 1f // in "factor"
var orientation: String? = null
var enableZoomGesture = false
set(value) {
field = value
@ -124,7 +125,7 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
internal var camera: Camera? = null
internal var imageCapture: ImageCapture? = null
internal var videoCapture: Recorder? = null
internal var videoCapture: VideoCapture<Recorder>? = null
private var imageAnalysis: ImageAnalysis? = null
private var preview: Preview? = null
@ -141,10 +142,26 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
private val lifecycleRegistry: LifecycleRegistry
private var hostLifecycleState: Lifecycle.State
private val rotation: Int
private val inputRotation: Int
get() {
return context.displayRotation
}
private val outputRotation: Int
get() {
if (orientation != null) {
// user is overriding output orientation
return when (orientation!!) {
"portrait" -> Surface.ROTATION_0
"landscapeRight" -> Surface.ROTATION_90
"portraitUpsideDown" -> Surface.ROTATION_180
"landscapeLeft" -> Surface.ROTATION_270
else -> throw InvalidTypeScriptUnionError("orientation", orientation!!)
}
} else {
// use same as input rotation
return inputRotation
}
}
private var minZoom: Float = 1f
private var maxZoom: Float = 1f
@ -234,16 +251,17 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
})
}
@SuppressLint("RestrictedApi")
override fun onConfigurationChanged(newConfig: Configuration?) {
super.onConfigurationChanged(newConfig)
if (preview?.targetRotation != rotation) {
preview?.targetRotation = rotation
imageCapture?.targetRotation = rotation
imageAnalysis?.targetRotation = rotation
// TODO: videoCapture?.setTargetRotation(rotation)
updateOrientation()
}
@SuppressLint("RestrictedApi")
private fun updateOrientation() {
preview?.targetRotation = inputRotation
imageCapture?.targetRotation = outputRotation
videoCapture?.targetRotation = outputRotation
imageAnalysis?.targetRotation = outputRotation
}
private external fun initHybrid(): HybridData
@ -298,6 +316,7 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
val shouldReconfigureSession = changedProps.containsAny(propsThatRequireSessionReconfiguration)
val shouldReconfigureZoom = shouldReconfigureSession || changedProps.contains("zoom")
val shouldReconfigureTorch = shouldReconfigureSession || changedProps.contains("torch")
val shouldUpdateOrientation = shouldReconfigureSession || changedProps.contains("orientation")
if (changedProps.contains("isActive")) {
updateLifecycleState()
@ -312,6 +331,9 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
if (shouldReconfigureTorch) {
camera!!.cameraControl.enableTorch(torch == "on")
}
if (shouldUpdateOrientation) {
updateOrientation()
}
} catch (e: Throwable) {
Log.e(TAG, "update() threw: ${e.message}")
invokeOnError(e)
@ -362,17 +384,17 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
}
val previewBuilder = Preview.Builder()
.setTargetRotation(rotation)
.setTargetRotation(inputRotation)
val imageCaptureBuilder = ImageCapture.Builder()
.setTargetRotation(rotation)
.setTargetRotation(outputRotation)
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
val videoRecorderBuilder = Recorder.Builder()
.setExecutor(cameraExecutor)
val imageAnalysisBuilder = ImageAnalysis.Builder()
.setTargetRotation(rotation)
.setTargetRotation(outputRotation)
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.setBackgroundExecutor(frameProcessorThread)
@ -422,12 +444,9 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
}
}
val videoRecorder = videoRecorderBuilder.build()
val videoCapture = VideoCapture.withOutput(videoRecorder)
videoCapture.targetRotation = rotation
// Unbind use cases before rebinding
this.videoCapture = null
videoCapture = null
imageCapture = null
imageAnalysis = null
cameraProvider.unbindAll()
@ -436,8 +455,11 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
val useCases = ArrayList<UseCase>()
if (video == true) {
Log.i(TAG, "Adding VideoCapture use-case...")
this.videoCapture = videoRecorder
useCases.add(videoCapture)
val videoRecorder = videoRecorderBuilder.build()
videoCapture = VideoCapture.withOutput(videoRecorder)
videoCapture!!.targetRotation = outputRotation
useCases.add(videoCapture!!)
}
if (photo == true) {
if (fallbackToSnapshot) {

View File

@ -165,6 +165,13 @@ class CameraViewManager(reactContext: ReactApplicationContext) : SimpleViewManag
view.enableZoomGesture = enableZoomGesture
}
@ReactProp(name = "orientation")
fun setOrientation(view: CameraView, orientation: String) {
if (view.orientation != orientation)
addChangedPropToTransaction(view, "orientation")
view.orientation = orientation
}
companion object {
const val TAG = "CameraView"

View File

@ -230,6 +230,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
video={true}
audio={hasMicrophonePermission}
frameProcessor={device.supportsParallelVideoProcessing ? frameProcessor : undefined}
orientation="portrait"
frameProcessorFps={1}
onFrameProcessorPerformanceSuggestionAvailable={onFrameProcessorSuggestionAvailable}
/>

View File

@ -0,0 +1,61 @@
//
// CameraView+Orientation.swift
// VisionCamera
//
// Created by Marc Rousavy on 04.01.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import Foundation
import UIKit
extension CameraView {
/// Returns the current _interface_ orientation of the main window
private var windowInterfaceOrientation: UIInterfaceOrientation {
if #available(iOS 13.0, *) {
return UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .unknown
} else {
return UIApplication.shared.statusBarOrientation
}
}
/// Orientation of the input connection (preview)
private var inputOrientation: UIInterfaceOrientation {
return windowInterfaceOrientation
}
// Orientation of the output connections (photo, video, frame processor)
private var outputOrientation: UIInterfaceOrientation {
if let userOrientation = orientation as String?,
let parsedOrientation = try? UIInterfaceOrientation(withString: userOrientation) {
// user is overriding output orientation
return parsedOrientation
} else {
// use same as input orientation
return inputOrientation
}
}
internal func updateOrientation() {
// Updates the Orientation for all rotable connections (outputs) as well as for the preview layer
DispatchQueue.main.async {
// `windowInterfaceOrientation` and `videoPreviewLayer` should only be accessed from UI thread
let isMirrored = self.videoDeviceInput?.device.position == .front
self.videoPreviewLayer.connection?.setInterfaceOrientation(self.inputOrientation)
self.cameraQueue.async {
// Run those updates on cameraQueue since they can be blocking.
self.captureSession.outputs.forEach { output in
output.connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = isMirrored
}
connection.setInterfaceOrientation(self.outputOrientation)
}
}
}
}
}
}

View File

@ -56,6 +56,7 @@ public final class CameraView: UIView {
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
@objc var orientation: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@ -116,15 +117,6 @@ public final class CameraView: UIView {
return captureSession.isRunning
}
/// Returns the current _interface_ orientation of the main window
private var windowInterfaceOrientation: UIInterfaceOrientation {
if #available(iOS 13.0, *) {
return UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .unknown
} else {
return UIApplication.shared.statusBarOrientation
}
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
@ -205,6 +197,7 @@ public final class CameraView: UIView {
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
let shouldUpdateOrientation = changedProps.contains("orientation")
if shouldReconfigure ||
shouldReconfigureAudioSession ||
@ -213,7 +206,8 @@ public final class CameraView: UIView {
shouldUpdateZoom ||
shouldReconfigureFormat ||
shouldReconfigureDevice ||
shouldUpdateVideoStabilization {
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
cameraQueue.async {
if shouldReconfigure {
self.configureCaptureSession()
@ -246,6 +240,10 @@ public final class CameraView: UIView {
}
}
if shouldUpdateOrientation {
self.updateOrientation()
}
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
if shouldUpdateTorch {
self.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
@ -316,27 +314,7 @@ public final class CameraView: UIView {
@objc
func onOrientationChanged() {
// Updates the Orientation for all rotable connections (outputs) as well as for the preview layer
DispatchQueue.main.async {
// `windowInterfaceOrientation` and `videoPreviewLayer` should only be accessed from UI thread
let isMirrored = self.videoDeviceInput?.device.position == .front
let orientation = self.windowInterfaceOrientation
self.videoPreviewLayer.connection?.setInterfaceOrientation(orientation)
self.cameraQueue.async {
// Run those updates on cameraQueue since they can be blocking.
self.captureSession.outputs.forEach { output in
output.connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = isMirrored
}
connection.setInterfaceOrientation(orientation)
}
}
}
}
updateOrientation()
}
// pragma MARK: Event Invokers

View File

@ -45,6 +45,7 @@ RCT_EXPORT_VIEW_PROPERTY(preset, NSString);
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
RCT_EXPORT_VIEW_PROPERTY(orientation, NSString);
// Camera View Events
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock);

View File

@ -0,0 +1,31 @@
//
// UIInterfaceOrientation+descriptor.swift
// VisionCamera
//
// Created by Marc Rousavy on 04.01.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import Foundation
import UIKit
extension UIInterfaceOrientation {
init(withString string: String) throws {
switch string {
case "portrait":
self = .portrait
return
case "portraitUpsideDown":
self = .portraitUpsideDown
return
case "landscapeLeft":
self = .landscapeLeft
return
case "landscapeRight":
self = .landscapeRight
return
default:
throw EnumParserError.invalidValue
}
}
}

View File

@ -14,6 +14,8 @@
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */; };
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86400512784A23400E9D2CA /* CameraView+Orientation.swift */; };
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
@ -93,6 +95,8 @@
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
B84C10592694A182006EFA70 /* MakeJSIRuntime.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MakeJSIRuntime.h; sourceTree = "<group>"; };
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; };
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; };
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
@ -180,6 +184,7 @@
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
@ -248,6 +253,7 @@
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */,
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */,
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */,
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */,
);
path = Parsers;
sourceTree = "<group>";
@ -391,12 +397,14 @@
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */,
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */,
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */,
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,

View File

@ -156,6 +156,10 @@ export interface CameraProps extends ViewProps {
* @default false
*/
enableHighQualityPhotos?: boolean;
/**
* Represents the orientation of the Camera, and all outputs. If this value is not set, VisionCamera will use the device orientation per default.
*/
orientation?: 'portrait' | 'portraitUpsideDown' | 'landscapeLeft' | 'landscapeRight';
//#region Events
/**