Compare commits

...

13 Commits

Author SHA1 Message Date
c64516693c Merge pull request 'Fix Preview View Aspect Ratio Orientation Issues in android' (#8) from ivan/fix-android-preview-view-aspect-ratio-orientation-issues into main
Reviewed-on: #8
2024-10-12 16:21:18 -06:00
e9f08ef488 Fix Preview View Aspect Ratio Orientation Issues in android 2024-10-12 16:20:23 -06:00
bf122db919 Merge pull request 'Ensure custom exposure mode is supported' (#7) from ivan/ensure-capture-mode-is-supported into main
Reviewed-on: #7
2024-10-10 15:18:12 -06:00
3319e48f7d Ensure custom exposure mode is supported 2024-10-10 15:17:55 -06:00
58714f9dac Merge pull request 'iOS Camera Settings' (#6) from volodymyr/ios-camera-settings into main
Reviewed-on: #6
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2024-10-10 15:12:32 -06:00
8991779851 iOS Camera Settings 2024-10-08 15:53:47 +02:00
f8efa172ba Merge pull request 'Lock exposure on start recording' (#4) from volodymyr/ios-lock-exposure into main
Reviewed-on: #4
2024-09-27 11:52:36 -06:00
66f840eecb Lock exposure on start recording 2024-09-27 10:35:29 +02:00
fcf5fe70f3 Merge branch 'ivan/fix-android-orientation' 2024-08-14 01:28:00 -06:00
Rui Rodrigues
3a20c44a31 fix preview and recording orientation fix
- add onCameraConfigurationChanged to CameraSessionDelegate to notify CameraView when configuration changes
- when orientatin change update CameraView.PreviewView.videoPreviewLayer.connection orientation value
2024-08-02 14:39:23 +01:00
0329e7976d Account for orientation in PreviewView 2024-07-29 00:02:24 -06:00
7c162fecb1 Remove trailing whitespace 2024-07-28 16:37:20 -06:00
b28a152471 Fix last segment issue 2024-07-24 21:00:27 -06:00
17 changed files with 209 additions and 58 deletions

View File

@ -72,6 +72,10 @@ class CameraView(context: Context) :
var zoom: Float = 1f // in "factor" var zoom: Float = 1f // in "factor"
var exposure: Double = 1.0 var exposure: Double = 1.0
var orientation: Orientation = Orientation.PORTRAIT var orientation: Orientation = Orientation.PORTRAIT
set(value) {
field = value
previewView.orientation = value
}
var enableZoomGesture = false var enableZoomGesture = false
set(value) { set(value) {
field = value field = value

View File

@ -10,6 +10,7 @@ import com.mrousavy.camera.types.CodeScannerOptions
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.PixelFormat import com.mrousavy.camera.types.PixelFormat
import com.mrousavy.camera.types.ResizeMode import com.mrousavy.camera.types.ResizeMode
import android.util.Log
import com.mrousavy.camera.types.Torch import com.mrousavy.camera.types.Torch
import com.mrousavy.camera.types.VideoStabilizationMode import com.mrousavy.camera.types.VideoStabilizationMode
@ -182,6 +183,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
fun setOrientation(view: CameraView, orientation: String?) { fun setOrientation(view: CameraView, orientation: String?) {
if (orientation != null) { if (orientation != null) {
val newMode = Orientation.fromUnionValue(orientation) val newMode = Orientation.fromUnionValue(orientation)
Log.i(TAG, "Orientation set to: $newMode")
view.orientation = newMode view.orientation = newMode
} else { } else {
view.orientation = Orientation.PORTRAIT view.orientation = Orientation.PORTRAIT

View File

@ -312,7 +312,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableHdr enableHdr
) )
outputs.add(output) outputs.add(output)
// Size is usually landscape, so we flip it here
previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation) previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation)
} }

View File

@ -83,7 +83,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
} }
// Muxer specific // Muxer specific
private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat) { private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat, val callbacks: CameraSession.Callback,) {
val videoTrack: Int = muxer.addTrack(encodedFormat) val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs val startTimeUs: Long = startTimeUs
@ -95,16 +95,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
fun finish() { fun finish() {
muxer.stop() muxer.stop()
muxer.release() muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex)
} }
} }
private var muxerContext: MuxerContext? = null private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) { private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.let { muxerContext?.finish()
it.finish()
this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex)
}
chunkIndex++ chunkIndex++
val newFileName = "$chunkIndex.mp4" val newFileName = "$chunkIndex.mp4"
@ -116,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
) )
muxer.setOrientationHint(orientationHint) muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext( muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!! muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks
) )
} }

View File

@ -1,6 +1,7 @@
package com.mrousavy.camera.core package com.mrousavy.camera.core
import android.annotation.SuppressLint import android.annotation.SuppressLint
import android.content.res.Configuration
import android.content.Context import android.content.Context
import android.graphics.Point import android.graphics.Point
import android.util.Log import android.util.Log
@ -36,6 +37,13 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
updateLayout() updateLayout()
} }
} }
var orientation: Orientation = Orientation.PORTRAIT
set(value) {
if (field != value) {
Log.i(TAG, "View Orientation changed: $field -> $value")
field = value
}
}
private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT
set(value) { set(value) {
if (field != value) { if (field != value) {
@ -90,17 +98,14 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
} }
} }
override fun requestLayout() { private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
super.requestLayout() var contentSize = contentSize
// Manually trigger measure & layout, as RN on Android skips those. var androidOrientation = context.getResources().getConfiguration().orientation;
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
post { if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) {
measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)) contentSize = Size(contentSize.height, contentSize.width)
layout(left, top, right, bottom)
}
} }
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {
@ -128,11 +133,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec) super.onMeasure(widthMeasureSpec, heightMeasureSpec)
val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec))
val surfaceSize = size.rotatedBy(inputOrientation) val surfaceSize = size.rotatedBy(inputOrientation)
val fittedSize = getSize(surfaceSize, viewSize, resizeMode) val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode)
Log.i(TAG, "PreviewView is $viewSize, rendering $surfaceSize content ($inputOrientation). Resizing to: $fittedSize ($resizeMode)") Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)")
setMeasuredDimension(fittedSize.width, fittedSize.height) setMeasuredDimension(fittedSize.width, fittedSize.height)
} }

View File

@ -50,4 +50,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) { func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise) cameraSession.resumeRecording(promise: promise)
} }
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
} }

View File

@ -304,6 +304,15 @@ public final class CameraView: UIView, CameraSessionDelegate {
onInitialized([:]) onInitialized([:])
} }
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() { func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!") ReactLogger.log(level: .info, message: "Camera started!")
guard let onStarted = onStarted else { guard let onStarted = onStarted else {

View File

@ -86,5 +86,13 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve : (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject : (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject); : (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end @end

View File

@ -111,6 +111,18 @@ final class CameraViewManager: RCTViewManager {
} }
} }
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private // MARK: Private
private func getCameraView(withTag tag: NSNumber) -> CameraView { private func getCameraView(withTag tag: NSNumber) -> CameraView {

View File

@ -191,4 +191,68 @@ extension CameraSession {
} }
} }
} }
func lockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration
try captureDevice.lockForConfiguration()
// Get the current exposure duration and ISO
let currentExposureDuration = captureDevice.exposureDuration
let currentISO = captureDevice.iso
// Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
}
// Unlock the device after configuration
captureDevice.unlockForConfiguration()
} catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
}
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
}
} }

View File

@ -195,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized() self.delegate?.onSessionInitialized()
} }
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration. // After configuring, set this to the new configuration.
self.configuration = config self.configuration = config
} catch { } catch {

View File

@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes Called when the [CameraSession] successfully initializes
*/ */
func onSessionInitialized() func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/** /**
Called when the [CameraSession] starts streaming frames. (isActive=true) Called when the [CameraSession] starts streaming frames. (isActive=true)
*/ */

View File

@ -32,6 +32,15 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) { func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection // Set orientation for each connection
for connection in connections { for connection in connections {
connection.setOrientation(orientation)
}
}
}
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9) #if swift(>=5.9)
if #available(iOS 17.0, *) { if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg). // Camera Sensors are always in landscape rotation (90deg).
@ -41,19 +50,18 @@ extension AVCaptureOutput {
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording. // TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera? // Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) { if isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees videoRotationAngle = degrees
} }
} else { } else {
if connection.isVideoOrientationSupported { if isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation() videoOrientation = orientation.toAVCaptureVideoOrientation()
} }
} }
#else #else
if connection.isVideoOrientationSupported { if isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation() videoOrientation = orientation.toAVCaptureVideoOrientation()
} }
#endif #endif
} }
} }
}

View File

@ -113,5 +113,19 @@ class ViewController: UIViewController {
} }
} }
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
} }

View File

@ -319,6 +319,22 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion //#endregion
//#region Static Functions (NativeModule) //#region Static Functions (NativeModule)