3 Commits

Author SHA1 Message Date
e16c25c96c Decrease exposure on white balance issue 2024-09-27 09:42:30 +02:00
fcf5fe70f3 Merge branch 'ivan/fix-android-orientation' 2024-08-14 01:28:00 -06:00
Rui Rodrigues
3a20c44a31 fix preview and recording orientation fix
- add onCameraConfigurationChanged to CameraSessionDelegate to notify CameraView when configuration changes
- when orientatin change update CameraView.PreviewView.videoPreviewLayer.connection orientation value
2024-08-02 14:39:23 +01:00
5 changed files with 154 additions and 22 deletions

View File

@@ -87,6 +87,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
var pinchGestureRecognizer: UIPinchGestureRecognizer? var pinchGestureRecognizer: UIPinchGestureRecognizer?
var pinchScaleOffset: CGFloat = 1.0 var pinchScaleOffset: CGFloat = 1.0
private var currentConfigureCall: DispatchTime? private var currentConfigureCall: DispatchTime?
var lastProcessedTime: Date?
var previewView: PreviewView var previewView: PreviewView
#if DEBUG #if DEBUG
@@ -304,6 +305,15 @@ public final class CameraView: UIView, CameraSessionDelegate {
onInitialized([:]) onInitialized([:])
} }
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() { func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!") ReactLogger.log(level: .info, message: "Camera started!")
guard let onStarted = onStarted else { guard let onStarted = onStarted else {
@@ -321,6 +331,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
func onFrame(sampleBuffer: CMSampleBuffer) { func onFrame(sampleBuffer: CMSampleBuffer) {
processFrameIfNeeded(sampleBuffer)
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
if let frameProcessor = frameProcessor { if let frameProcessor = frameProcessor {
// Call Frame Processor // Call Frame Processor
@@ -395,3 +406,99 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
} }
} }
extension CameraView {
func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) {
let currentTime = Date()
if let lastTime = lastProcessedTime {
if currentTime.timeIntervalSince(lastTime) >= 10.0 {
processCapturedFrame(sampleBuffer)
lastProcessedTime = currentTime
}
} else {
// Process the first frame immediately
processCapturedFrame(sampleBuffer)
lastProcessedTime = currentTime
}
}
func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) {
ReactLogger.log(level: .info, message: "processCapturedFrame")
// Your existing processing logic
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
// Analyze for white balance
let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage)
if isWhiteBalanceIssue {
ReactLogger.log(level: .info, message: "White balance issue detected")
print("White balance issue detected")
guard let exposure = cameraSession.configuration?.exposure else {
updateExposure(0.5)
return
}
updateExposure(exposure - 0.2)
ReactLogger.log(level: .info, message: "Exposure = \(exposure)")
} else {
ReactLogger.log(level: .info, message: "White balance is okay")
print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)")
}
}
func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool {
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance")
let extent = ciImage.extent
// Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size)
let centerRect = CGRect(
x: extent.origin.x + extent.size.width * 0.25,
y: extent.origin.y + extent.size.height * 0.25,
width: extent.size.width * 0.5,
height: extent.size.height * 0.5
)
// Crop the image to the centerRect
let croppedImage = ciImage.cropped(to: centerRect)
let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])!
guard let outputImage = averageColorFilter.outputImage else {
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard")
return false
}
var bitmap = [UInt8](repeating: 0, count: 4)
let context = CIContext()
context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
let red = Float(bitmap[0]) / 255.0
let green = Float(bitmap[1]) / 255.0
let blue = Float(bitmap[2]) / 255.0
ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)")
// Check for white balance issue by comparing color channels
let threshold: Float = 0.25
if abs(red - green) > threshold
|| abs(blue - green) > threshold
|| abs(1 - red) < threshold
|| abs(1 - green) < threshold
|| abs(1 - blue) < threshold {
print("White balance issue detected")
return true
}
return false
}
func updateExposure (_ exposure: Float) {
ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]")
cameraSession.configure { config in
config.exposure = exposure
}
}
}

View File

@@ -195,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized() self.delegate?.onSessionInitialized()
} }
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration. // After configuring, set this to the new configuration.
self.configuration = config self.configuration = config
} catch { } catch {

View File

@@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes Called when the [CameraSession] successfully initializes
*/ */
func onSessionInitialized() func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/** /**
Called when the [CameraSession] starts streaming frames. (isActive=true) Called when the [CameraSession] starts streaming frames. (isActive=true)
*/ */

View File

@@ -32,6 +32,15 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) { func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection // Set orientation for each connection
for connection in connections { for connection in connections {
connection.setOrientation(orientation)
}
}
}
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9) #if swift(>=5.9)
if #available(iOS 17.0, *) { if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg). // Camera Sensors are always in landscape rotation (90deg).
@@ -41,19 +50,18 @@ extension AVCaptureOutput {
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording. // TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera? // Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) { if isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees videoRotationAngle = degrees
} }
} else { } else {
if connection.isVideoOrientationSupported { if isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation() videoOrientation = orientation.toAVCaptureVideoOrientation()
} }
} }
#else #else
if connection.isVideoOrientationSupported { if isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation() videoOrientation = orientation.toAVCaptureVideoOrientation()
} }
#endif #endif
} }
}
} }

View File

@@ -113,5 +113,19 @@ class ViewController: UIViewController {
} }
} }
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
} }