Compare commits
	
		
			1 Commits
		
	
	
		
			695e317a77
			...
			volodymyr/
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| e16c25c96c | 
| @@ -16,7 +16,7 @@ buildscript { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   dependencies { |   dependencies { | ||||||
|     classpath "com.android.tools.build:gradle:8.5.2" |     classpath "com.android.tools.build:gradle:7.4.2" | ||||||
|     classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" |     classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" | ||||||
|   } |   } | ||||||
| } | } | ||||||
| @@ -133,8 +133,8 @@ android { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   compileOptions { |   compileOptions { | ||||||
|     sourceCompatibility JavaVersion.VERSION_17 |     sourceCompatibility JavaVersion.VERSION_1_8 | ||||||
|     targetCompatibility JavaVersion.VERSION_17 |     targetCompatibility JavaVersion.VERSION_1_8 | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   externalNativeBuild { |   externalNativeBuild { | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| distributionBase=GRADLE_USER_HOME | distributionBase=GRADLE_USER_HOME | ||||||
| distributionPath=wrapper/dists | distributionPath=wrapper/dists | ||||||
| distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip | distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip | ||||||
| zipStoreBase=GRADLE_USER_HOME | zipStoreBase=GRADLE_USER_HOME | ||||||
| zipStorePath=wrapper/dists | zipStorePath=wrapper/dists | ||||||
|   | |||||||
| @@ -1,3 +1,4 @@ | |||||||
| <manifest xmlns:android="http://schemas.android.com/apk/res/android"> | <manifest xmlns:android="http://schemas.android.com/apk/res/android" | ||||||
|  |           package="com.mrousavy.camera"> | ||||||
|  |  | ||||||
| </manifest> | </manifest> | ||||||
|   | |||||||
| @@ -7,14 +7,12 @@ import com.facebook.jni.HybridData | |||||||
| import com.facebook.proguard.annotations.DoNotStrip | import com.facebook.proguard.annotations.DoNotStrip | ||||||
| import com.facebook.react.bridge.ReactApplicationContext | import com.facebook.react.bridge.ReactApplicationContext | ||||||
| import com.facebook.react.bridge.UiThreadUtil | import com.facebook.react.bridge.UiThreadUtil | ||||||
| import com.facebook.react.common.annotations.FrameworkAPI |  | ||||||
| import com.facebook.react.turbomodule.core.CallInvokerHolderImpl | import com.facebook.react.turbomodule.core.CallInvokerHolderImpl | ||||||
| import com.facebook.react.uimanager.UIManagerHelper | import com.facebook.react.uimanager.UIManagerHelper | ||||||
| import com.mrousavy.camera.CameraView | import com.mrousavy.camera.CameraView | ||||||
| import com.mrousavy.camera.core.ViewNotFoundError | import com.mrousavy.camera.core.ViewNotFoundError | ||||||
| import java.lang.ref.WeakReference | import java.lang.ref.WeakReference | ||||||
|  |  | ||||||
| @OptIn(FrameworkAPI::class) |  | ||||||
| @Suppress("KotlinJniMissingFunction") // we use fbjni. | @Suppress("KotlinJniMissingFunction") // we use fbjni. | ||||||
| class VisionCameraProxy(private val reactContext: ReactApplicationContext) { | class VisionCameraProxy(private val reactContext: ReactApplicationContext) { | ||||||
|   companion object { |   companion object { | ||||||
|   | |||||||
| @@ -87,6 +87,7 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|   var pinchGestureRecognizer: UIPinchGestureRecognizer? |   var pinchGestureRecognizer: UIPinchGestureRecognizer? | ||||||
|   var pinchScaleOffset: CGFloat = 1.0 |   var pinchScaleOffset: CGFloat = 1.0 | ||||||
|   private var currentConfigureCall: DispatchTime? |   private var currentConfigureCall: DispatchTime? | ||||||
|  |   var lastProcessedTime: Date? | ||||||
|  |  | ||||||
|   var previewView: PreviewView |   var previewView: PreviewView | ||||||
|   #if DEBUG |   #if DEBUG | ||||||
| @@ -330,6 +331,7 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   func onFrame(sampleBuffer: CMSampleBuffer) { |   func onFrame(sampleBuffer: CMSampleBuffer) { | ||||||
|  |     processFrameIfNeeded(sampleBuffer) | ||||||
|     #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS |     #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS | ||||||
|       if let frameProcessor = frameProcessor { |       if let frameProcessor = frameProcessor { | ||||||
|         // Call Frame Processor |         // Call Frame Processor | ||||||
| @@ -404,3 +406,99 @@ public final class CameraView: UIView, CameraSessionDelegate { | |||||||
|     } |     } | ||||||
|   } |   } | ||||||
| } | } | ||||||
|  |  | ||||||
|  | extension CameraView { | ||||||
|  |    | ||||||
|  |   func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) { | ||||||
|  |     let currentTime = Date() | ||||||
|  |     if let lastTime = lastProcessedTime { | ||||||
|  |       if currentTime.timeIntervalSince(lastTime) >= 10.0 { | ||||||
|  |         processCapturedFrame(sampleBuffer) | ||||||
|  |         lastProcessedTime = currentTime | ||||||
|  |       } | ||||||
|  |     } else { | ||||||
|  |       // Process the first frame immediately | ||||||
|  |       processCapturedFrame(sampleBuffer) | ||||||
|  |       lastProcessedTime = currentTime | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) { | ||||||
|  |     ReactLogger.log(level: .info, message: "processCapturedFrame") | ||||||
|  |     // Your existing processing logic | ||||||
|  |     guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } | ||||||
|  |      | ||||||
|  |     let ciImage = CIImage(cvPixelBuffer: pixelBuffer) | ||||||
|  |      | ||||||
|  |     // Analyze for white balance | ||||||
|  |     let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage) | ||||||
|  |      | ||||||
|  |     if isWhiteBalanceIssue { | ||||||
|  |       ReactLogger.log(level: .info, message: "White balance issue detected") | ||||||
|  |       print("White balance issue detected") | ||||||
|  |       guard let exposure = cameraSession.configuration?.exposure else { | ||||||
|  |         updateExposure(0.5) | ||||||
|  |         return | ||||||
|  |       } | ||||||
|  |       updateExposure(exposure - 0.2) | ||||||
|  |       ReactLogger.log(level: .info, message: "Exposure = \(exposure)") | ||||||
|  |     } else { | ||||||
|  |       ReactLogger.log(level: .info, message: "White balance is okay") | ||||||
|  |       print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)") | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool { | ||||||
|  |     ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance") | ||||||
|  |     let extent = ciImage.extent | ||||||
|  |      | ||||||
|  |     // Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size) | ||||||
|  |     let centerRect = CGRect( | ||||||
|  |       x: extent.origin.x + extent.size.width * 0.25, | ||||||
|  |       y: extent.origin.y + extent.size.height * 0.25, | ||||||
|  |       width: extent.size.width * 0.5, | ||||||
|  |       height: extent.size.height * 0.5 | ||||||
|  |     ) | ||||||
|  |      | ||||||
|  |     // Crop the image to the centerRect | ||||||
|  |     let croppedImage = ciImage.cropped(to: centerRect) | ||||||
|  |      | ||||||
|  |     let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])! | ||||||
|  |      | ||||||
|  |     guard let outputImage = averageColorFilter.outputImage else { | ||||||
|  |       ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard") | ||||||
|  |       return false | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     var bitmap = [UInt8](repeating: 0, count: 4) | ||||||
|  |     let context = CIContext() | ||||||
|  |     context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil) | ||||||
|  |      | ||||||
|  |     let red = Float(bitmap[0]) / 255.0 | ||||||
|  |     let green = Float(bitmap[1]) / 255.0 | ||||||
|  |     let blue = Float(bitmap[2]) / 255.0 | ||||||
|  |      | ||||||
|  |     ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)") | ||||||
|  |      | ||||||
|  |     // Check for white balance issue by comparing color channels | ||||||
|  |     let threshold: Float = 0.25 | ||||||
|  |     if abs(red - green) > threshold | ||||||
|  |         || abs(blue - green) > threshold | ||||||
|  |         || abs(1 - red) < threshold | ||||||
|  |         || abs(1 - green) < threshold | ||||||
|  |         || abs(1 - blue) < threshold { | ||||||
|  |       print("White balance issue detected") | ||||||
|  |       return true | ||||||
|  |     } | ||||||
|  |      | ||||||
|  |     return false | ||||||
|  |   } | ||||||
|  |    | ||||||
|  |   func updateExposure (_ exposure: Float) { | ||||||
|  |     ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]") | ||||||
|  |      | ||||||
|  |     cameraSession.configure { config in | ||||||
|  |       config.exposure = exposure | ||||||
|  |     } | ||||||
|  |   } | ||||||
|  | } | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user