From e16c25c96cf29679fc5ca015921e584339985b3b Mon Sep 17 00:00:00 2001 From: Volodymyr Smolianinov Date: Fri, 27 Sep 2024 09:42:30 +0200 Subject: [PATCH] Decrease exposure on white balance issue --- package/ios/CameraView.swift | 98 ++++++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/package/ios/CameraView.swift b/package/ios/CameraView.swift index ffbfbc9..3b46ea5 100644 --- a/package/ios/CameraView.swift +++ b/package/ios/CameraView.swift @@ -87,6 +87,7 @@ public final class CameraView: UIView, CameraSessionDelegate { var pinchGestureRecognizer: UIPinchGestureRecognizer? var pinchScaleOffset: CGFloat = 1.0 private var currentConfigureCall: DispatchTime? + var lastProcessedTime: Date? var previewView: PreviewView #if DEBUG @@ -330,6 +331,7 @@ public final class CameraView: UIView, CameraSessionDelegate { } func onFrame(sampleBuffer: CMSampleBuffer) { + processFrameIfNeeded(sampleBuffer) #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS if let frameProcessor = frameProcessor { // Call Frame Processor @@ -404,3 +406,99 @@ public final class CameraView: UIView, CameraSessionDelegate { } } } + +extension CameraView { + + func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) { + let currentTime = Date() + if let lastTime = lastProcessedTime { + if currentTime.timeIntervalSince(lastTime) >= 10.0 { + processCapturedFrame(sampleBuffer) + lastProcessedTime = currentTime + } + } else { + // Process the first frame immediately + processCapturedFrame(sampleBuffer) + lastProcessedTime = currentTime + } + } + + func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) { + ReactLogger.log(level: .info, message: "processCapturedFrame") + // Your existing processing logic + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } + + let ciImage = CIImage(cvPixelBuffer: pixelBuffer) + + // Analyze for white balance + let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage) + + if isWhiteBalanceIssue { + ReactLogger.log(level: .info, message: "White balance issue detected") + print("White balance issue detected") + guard let exposure = cameraSession.configuration?.exposure else { + updateExposure(0.5) + return + } + updateExposure(exposure - 0.2) + ReactLogger.log(level: .info, message: "Exposure = \(exposure)") + } else { + ReactLogger.log(level: .info, message: "White balance is okay") + print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)") + } + } + + func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool { + ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance") + let extent = ciImage.extent + + // Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size) + let centerRect = CGRect( + x: extent.origin.x + extent.size.width * 0.25, + y: extent.origin.y + extent.size.height * 0.25, + width: extent.size.width * 0.5, + height: extent.size.height * 0.5 + ) + + // Crop the image to the centerRect + let croppedImage = ciImage.cropped(to: centerRect) + + let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])! + + guard let outputImage = averageColorFilter.outputImage else { + ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard") + return false + } + + var bitmap = [UInt8](repeating: 0, count: 4) + let context = CIContext() + context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil) + + let red = Float(bitmap[0]) / 255.0 + let green = Float(bitmap[1]) / 255.0 + let blue = Float(bitmap[2]) / 255.0 + + ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)") + + // Check for white balance issue by comparing color channels + let threshold: Float = 0.25 + if abs(red - green) > threshold + || abs(blue - green) > threshold + || abs(1 - red) < threshold + || abs(1 - green) < threshold + || abs(1 - blue) < threshold { + print("White balance issue detected") + return true + } + + return false + } + + func updateExposure (_ exposure: Float) { + ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]") + + cameraSession.configure { config in + config.exposure = exposure + } + } +}