5 Commits

Author SHA1 Message Date
e16c25c96c Decrease exposure on white balance issue 2024-09-27 09:42:30 +02:00
fcf5fe70f3 Merge branch 'ivan/fix-android-orientation' 2024-08-14 01:28:00 -06:00
Rui Rodrigues
3a20c44a31 fix preview and recording orientation fix
- add onCameraConfigurationChanged to CameraSessionDelegate to notify CameraView when configuration changes
- when orientatin change update CameraView.PreviewView.videoPreviewLayer.connection orientation value
2024-08-02 14:39:23 +01:00
0329e7976d Account for orientation in PreviewView 2024-07-29 00:02:24 -06:00
7c162fecb1 Remove trailing whitespace 2024-07-28 16:37:20 -06:00
11 changed files with 191 additions and 41 deletions

View File

@@ -72,6 +72,10 @@ class CameraView(context: Context) :
var zoom: Float = 1f // in "factor" var zoom: Float = 1f // in "factor"
var exposure: Double = 1.0 var exposure: Double = 1.0
var orientation: Orientation = Orientation.PORTRAIT var orientation: Orientation = Orientation.PORTRAIT
set(value) {
field = value
previewView.orientation = value
}
var enableZoomGesture = false var enableZoomGesture = false
set(value) { set(value) {
field = value field = value

View File

@@ -10,6 +10,7 @@ import com.mrousavy.camera.types.CodeScannerOptions
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.PixelFormat import com.mrousavy.camera.types.PixelFormat
import com.mrousavy.camera.types.ResizeMode import com.mrousavy.camera.types.ResizeMode
import android.util.Log
import com.mrousavy.camera.types.Torch import com.mrousavy.camera.types.Torch
import com.mrousavy.camera.types.VideoStabilizationMode import com.mrousavy.camera.types.VideoStabilizationMode
@@ -182,6 +183,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
fun setOrientation(view: CameraView, orientation: String?) { fun setOrientation(view: CameraView, orientation: String?) {
if (orientation != null) { if (orientation != null) {
val newMode = Orientation.fromUnionValue(orientation) val newMode = Orientation.fromUnionValue(orientation)
Log.i(TAG, "Orientation set to: $newMode")
view.orientation = newMode view.orientation = newMode
} else { } else {
view.orientation = Orientation.PORTRAIT view.orientation = Orientation.PORTRAIT

View File

@@ -312,7 +312,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableHdr enableHdr
) )
outputs.add(output) outputs.add(output)
// Size is usually landscape, so we flip it here
previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation) previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation)
} }

View File

@@ -36,6 +36,13 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
updateLayout() updateLayout()
} }
} }
var orientation: Orientation = Orientation.PORTRAIT
set(value) {
if (field != value) {
Log.i(TAG, "View Orientation changed: $field -> $value")
field = value
}
}
private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT
set(value) { set(value) {
if (field != value) { if (field != value) {
@@ -101,6 +108,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
} }
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize
// Swap dimensions if orientation is landscape
if (orientation.isLandscape()) {
contentSize = Size(contentSize.height, contentSize.width)
}
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {
@@ -128,11 +140,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec) super.onMeasure(widthMeasureSpec, heightMeasureSpec)
val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec))
val surfaceSize = size.rotatedBy(inputOrientation) val surfaceSize = size.rotatedBy(inputOrientation)
val fittedSize = getSize(surfaceSize, viewSize, resizeMode) val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode)
Log.i(TAG, "PreviewView is $viewSize, rendering $surfaceSize content ($inputOrientation). Resizing to: $fittedSize ($resizeMode)") Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)")
setMeasuredDimension(fittedSize.width, fittedSize.height) setMeasuredDimension(fittedSize.width, fittedSize.height)
} }

View File

@@ -87,6 +87,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
var pinchGestureRecognizer: UIPinchGestureRecognizer? var pinchGestureRecognizer: UIPinchGestureRecognizer?
var pinchScaleOffset: CGFloat = 1.0 var pinchScaleOffset: CGFloat = 1.0
private var currentConfigureCall: DispatchTime? private var currentConfigureCall: DispatchTime?
var lastProcessedTime: Date?
var previewView: PreviewView var previewView: PreviewView
#if DEBUG #if DEBUG
@@ -303,6 +304,15 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
onInitialized([:]) onInitialized([:])
} }
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() { func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!") ReactLogger.log(level: .info, message: "Camera started!")
@@ -321,6 +331,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
func onFrame(sampleBuffer: CMSampleBuffer) { func onFrame(sampleBuffer: CMSampleBuffer) {
processFrameIfNeeded(sampleBuffer)
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
if let frameProcessor = frameProcessor { if let frameProcessor = frameProcessor {
// Call Frame Processor // Call Frame Processor
@@ -395,3 +406,99 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
} }
} }
extension CameraView {
func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) {
let currentTime = Date()
if let lastTime = lastProcessedTime {
if currentTime.timeIntervalSince(lastTime) >= 10.0 {
processCapturedFrame(sampleBuffer)
lastProcessedTime = currentTime
}
} else {
// Process the first frame immediately
processCapturedFrame(sampleBuffer)
lastProcessedTime = currentTime
}
}
func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) {
ReactLogger.log(level: .info, message: "processCapturedFrame")
// Your existing processing logic
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
// Analyze for white balance
let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage)
if isWhiteBalanceIssue {
ReactLogger.log(level: .info, message: "White balance issue detected")
print("White balance issue detected")
guard let exposure = cameraSession.configuration?.exposure else {
updateExposure(0.5)
return
}
updateExposure(exposure - 0.2)
ReactLogger.log(level: .info, message: "Exposure = \(exposure)")
} else {
ReactLogger.log(level: .info, message: "White balance is okay")
print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)")
}
}
func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool {
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance")
let extent = ciImage.extent
// Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size)
let centerRect = CGRect(
x: extent.origin.x + extent.size.width * 0.25,
y: extent.origin.y + extent.size.height * 0.25,
width: extent.size.width * 0.5,
height: extent.size.height * 0.5
)
// Crop the image to the centerRect
let croppedImage = ciImage.cropped(to: centerRect)
let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])!
guard let outputImage = averageColorFilter.outputImage else {
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard")
return false
}
var bitmap = [UInt8](repeating: 0, count: 4)
let context = CIContext()
context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
let red = Float(bitmap[0]) / 255.0
let green = Float(bitmap[1]) / 255.0
let blue = Float(bitmap[2]) / 255.0
ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)")
// Check for white balance issue by comparing color channels
let threshold: Float = 0.25
if abs(red - green) > threshold
|| abs(blue - green) > threshold
|| abs(1 - red) < threshold
|| abs(1 - green) < threshold
|| abs(1 - blue) < threshold {
print("White balance issue detected")
return true
}
return false
}
func updateExposure (_ exposure: Float) {
ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]")
cameraSession.configure { config in
config.exposure = exposure
}
}
}

View File

@@ -195,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized() self.delegate?.onSessionInitialized()
} }
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration. // After configuring, set this to the new configuration.
self.configuration = config self.configuration = config
} catch { } catch {

View File

@@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes Called when the [CameraSession] successfully initializes
*/ */
func onSessionInitialized() func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/** /**
Called when the [CameraSession] starts streaming frames. (isActive=true) Called when the [CameraSession] starts streaming frames. (isActive=true)
*/ */

View File

@@ -11,7 +11,7 @@ import AVFoundation
class ChunkedRecorder: NSObject { class ChunkedRecorder: NSObject {
enum ChunkType { enum ChunkType {
case initialization case initialization
case data(index: UInt64, duration: CMTime?) case data(index: UInt64, duration: CMTime?)
@@ -21,12 +21,12 @@ class ChunkedRecorder: NSObject {
let url: URL let url: URL
let type: ChunkType let type: ChunkType
} }
let outputURL: URL let outputURL: URL
let onChunkReady: ((Chunk) -> Void) let onChunkReady: ((Chunk) -> Void)
private var chunkIndex: UInt64 = 0 private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws { init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
self.outputURL = outputURL self.outputURL = outputURL
self.onChunkReady = onChunkReady self.onChunkReady = onChunkReady
@@ -34,16 +34,16 @@ class ChunkedRecorder: NSObject {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil) throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
} }
} }
} }
extension ChunkedRecorder: AVAssetWriterDelegate { extension ChunkedRecorder: AVAssetWriterDelegate {
func assetWriter(_ writer: AVAssetWriter, func assetWriter(_ writer: AVAssetWriter,
didOutputSegmentData segmentData: Data, didOutputSegmentData segmentData: Data,
segmentType: AVAssetSegmentType, segmentType: AVAssetSegmentType,
segmentReport: AVAssetSegmentReport?) { segmentReport: AVAssetSegmentReport?) {
switch segmentType { switch segmentType {
case .initialization: case .initialization:
saveInitSegment(segmentData) saveInitSegment(segmentData)
@@ -53,13 +53,13 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
fatalError("Unknown AVAssetSegmentType!") fatalError("Unknown AVAssetSegmentType!")
} }
} }
private func saveInitSegment(_ data: Data) { private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4") let url = outputURL.appendingPathComponent("init.mp4")
save(data: data, url: url) save(data: data, url: url)
onChunkReady(url: url, type: .initialization) onChunkReady(url: url, type: .initialization)
} }
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) { private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4" let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name) let url = outputURL.appendingPathComponent(name)
@@ -72,7 +72,7 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration)) onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1 chunkIndex += 1
} }
private func save(data: Data, url: URL) { private func save(data: Data, url: URL) {
do { do {
try data.write(to: url) try data.write(to: url)
@@ -80,9 +80,9 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)") ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
} }
} }
private func onChunkReady(url: URL, type: ChunkType) { private func onChunkReady(url: URL, type: ChunkType) {
onChunkReady(Chunk(url: url, type: type)) onChunkReady(Chunk(url: url, type: type))
} }
} }

View File

@@ -84,7 +84,7 @@ class RecordingSession {
assetWriter.shouldOptimizeForNetworkUse = false assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS assetWriter.outputFileTypeProfile = .mpeg4AppleHLS
assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1) assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1)
/* /*
Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove
audio priming duration which advanced audio formats like AAC have, since the sample tables audio priming duration which advanced audio formats like AAC have, since the sample tables
@@ -95,7 +95,7 @@ class RecordingSession {
*/ */
let startTimeOffset = CMTime(value: 10, timescale: 1) let startTimeOffset = CMTime(value: 10, timescale: 1)
assetWriter.initialSegmentStartTime = startTimeOffset assetWriter.initialSegmentStartTime = startTimeOffset
assetWriter.delegate = recorder assetWriter.delegate = recorder
} catch let error as NSError { } catch let error as NSError {
throw CameraError.capture(.createRecorderError(message: error.description)) throw CameraError.capture(.createRecorderError(message: error.description))

View File

@@ -32,28 +32,36 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) { func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection // Set orientation for each connection
for connection in connections { for connection in connections {
#if swift(>=5.9) connection.setOrientation(orientation)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees
}
} else {
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
} }
} }
} }
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if isVideoRotationAngleSupported(degrees) {
videoRotationAngle = degrees
}
} else {
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
}
}

View File

@@ -113,5 +113,19 @@ class ViewController: UIViewController {
} }
} }
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
} }