Compare commits

..

1 Commits

Author SHA1 Message Date
e16c25c96c Decrease exposure on white balance issue 2024-09-27 09:42:30 +02:00
7 changed files with 111 additions and 114 deletions

View File

@ -1,7 +1,6 @@
package com.mrousavy.camera.core package com.mrousavy.camera.core
import android.annotation.SuppressLint import android.annotation.SuppressLint
import android.content.res.Configuration
import android.content.Context import android.content.Context
import android.graphics.Point import android.graphics.Point
import android.util.Log import android.util.Log
@ -98,14 +97,22 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
} }
} }
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { override fun requestLayout() {
var contentSize = contentSize super.requestLayout()
var androidOrientation = context.getResources().getConfiguration().orientation; // Manually trigger measure & layout, as RN on Android skips those.
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) { post {
contentSize = Size(contentSize.height, contentSize.width) measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY))
layout(left, top, right, bottom)
}
} }
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize
// Swap dimensions if orientation is landscape
if (orientation.isLandscape()) {
contentSize = Size(contentSize.height, contentSize.width)
}
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {

View File

@ -50,12 +50,4 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) { func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise) cameraSession.resumeRecording(promise: promise)
} }
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
} }

View File

@ -87,6 +87,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
var pinchGestureRecognizer: UIPinchGestureRecognizer? var pinchGestureRecognizer: UIPinchGestureRecognizer?
var pinchScaleOffset: CGFloat = 1.0 var pinchScaleOffset: CGFloat = 1.0
private var currentConfigureCall: DispatchTime? private var currentConfigureCall: DispatchTime?
var lastProcessedTime: Date?
var previewView: PreviewView var previewView: PreviewView
#if DEBUG #if DEBUG
@ -330,6 +331,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
func onFrame(sampleBuffer: CMSampleBuffer) { func onFrame(sampleBuffer: CMSampleBuffer) {
processFrameIfNeeded(sampleBuffer)
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
if let frameProcessor = frameProcessor { if let frameProcessor = frameProcessor {
// Call Frame Processor // Call Frame Processor
@ -404,3 +406,99 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
} }
} }
extension CameraView {
func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) {
let currentTime = Date()
if let lastTime = lastProcessedTime {
if currentTime.timeIntervalSince(lastTime) >= 10.0 {
processCapturedFrame(sampleBuffer)
lastProcessedTime = currentTime
}
} else {
// Process the first frame immediately
processCapturedFrame(sampleBuffer)
lastProcessedTime = currentTime
}
}
func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) {
ReactLogger.log(level: .info, message: "processCapturedFrame")
// Your existing processing logic
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
// Analyze for white balance
let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage)
if isWhiteBalanceIssue {
ReactLogger.log(level: .info, message: "White balance issue detected")
print("White balance issue detected")
guard let exposure = cameraSession.configuration?.exposure else {
updateExposure(0.5)
return
}
updateExposure(exposure - 0.2)
ReactLogger.log(level: .info, message: "Exposure = \(exposure)")
} else {
ReactLogger.log(level: .info, message: "White balance is okay")
print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)")
}
}
func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool {
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance")
let extent = ciImage.extent
// Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size)
let centerRect = CGRect(
x: extent.origin.x + extent.size.width * 0.25,
y: extent.origin.y + extent.size.height * 0.25,
width: extent.size.width * 0.5,
height: extent.size.height * 0.5
)
// Crop the image to the centerRect
let croppedImage = ciImage.cropped(to: centerRect)
let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])!
guard let outputImage = averageColorFilter.outputImage else {
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard")
return false
}
var bitmap = [UInt8](repeating: 0, count: 4)
let context = CIContext()
context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
let red = Float(bitmap[0]) / 255.0
let green = Float(bitmap[1]) / 255.0
let blue = Float(bitmap[2]) / 255.0
ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)")
// Check for white balance issue by comparing color channels
let threshold: Float = 0.25
if abs(red - green) > threshold
|| abs(blue - green) > threshold
|| abs(1 - red) < threshold
|| abs(1 - green) < threshold
|| abs(1 - blue) < threshold {
print("White balance issue detected")
return true
}
return false
}
func updateExposure (_ exposure: Float) {
ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]")
cameraSession.configure { config in
config.exposure = exposure
}
}
}

View File

@ -86,13 +86,5 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve : (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject : (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject); : (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end @end

View File

@ -111,18 +111,6 @@ final class CameraViewManager: RCTViewManager {
} }
} }
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private // MARK: Private
private func getCameraView(withTag tag: NSNumber) -> CameraView { private func getCameraView(withTag tag: NSNumber) -> CameraView {

View File

@ -191,68 +191,4 @@ extension CameraSession {
} }
} }
} }
func lockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration
try captureDevice.lockForConfiguration()
// Get the current exposure duration and ISO
let currentExposureDuration = captureDevice.exposureDuration
let currentISO = captureDevice.iso
// Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
}
// Unlock the device after configuration
captureDevice.unlockForConfiguration()
} catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
}
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
}
} }

View File

@ -319,22 +319,6 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion //#endregion
//#region Static Functions (NativeModule) //#region Static Functions (NativeModule)