Compare commits
1 Commits
volodymyr/
...
eyenov/pre
Author | SHA1 | Date | |
---|---|---|---|
|
c994506abc |
@@ -107,47 +107,6 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
|
|
||||||
var contentSize = contentSize
|
|
||||||
// Swap dimensions if orientation is landscape
|
|
||||||
if (orientation.isLandscape()) {
|
|
||||||
contentSize = Size(contentSize.height, contentSize.width)
|
|
||||||
}
|
|
||||||
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
|
|
||||||
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
|
|
||||||
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {
|
|
||||||
// One of the aspect ratios is 0 or NaN, maybe the view hasn't been laid out yet.
|
|
||||||
return contentSize
|
|
||||||
}
|
|
||||||
|
|
||||||
val widthOverHeight = when (resizeMode) {
|
|
||||||
ResizeMode.COVER -> contentAspectRatio > containerAspectRatio
|
|
||||||
ResizeMode.CONTAIN -> contentAspectRatio < containerAspectRatio
|
|
||||||
}
|
|
||||||
|
|
||||||
return if (widthOverHeight) {
|
|
||||||
// Scale by width to cover height
|
|
||||||
val scaledWidth = containerSize.height * contentAspectRatio
|
|
||||||
Size(scaledWidth.roundToInt(), containerSize.height)
|
|
||||||
} else {
|
|
||||||
// Scale by height to cover width
|
|
||||||
val scaledHeight = containerSize.width / contentAspectRatio
|
|
||||||
Size(containerSize.width, scaledHeight.roundToInt())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressLint("DrawAllocation")
|
|
||||||
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
|
|
||||||
super.onMeasure(widthMeasureSpec, heightMeasureSpec)
|
|
||||||
|
|
||||||
val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec))
|
|
||||||
val surfaceSize = size.rotatedBy(inputOrientation)
|
|
||||||
val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode)
|
|
||||||
|
|
||||||
Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)")
|
|
||||||
setMeasuredDimension(fittedSize.width, fittedSize.height)
|
|
||||||
}
|
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private const val TAG = "PreviewView"
|
private const val TAG = "PreviewView"
|
||||||
}
|
}
|
||||||
|
@@ -87,7 +87,6 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
|||||||
var pinchGestureRecognizer: UIPinchGestureRecognizer?
|
var pinchGestureRecognizer: UIPinchGestureRecognizer?
|
||||||
var pinchScaleOffset: CGFloat = 1.0
|
var pinchScaleOffset: CGFloat = 1.0
|
||||||
private var currentConfigureCall: DispatchTime?
|
private var currentConfigureCall: DispatchTime?
|
||||||
var lastProcessedTime: Date?
|
|
||||||
|
|
||||||
var previewView: PreviewView
|
var previewView: PreviewView
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
@@ -331,7 +330,6 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func onFrame(sampleBuffer: CMSampleBuffer) {
|
func onFrame(sampleBuffer: CMSampleBuffer) {
|
||||||
processFrameIfNeeded(sampleBuffer)
|
|
||||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||||
if let frameProcessor = frameProcessor {
|
if let frameProcessor = frameProcessor {
|
||||||
// Call Frame Processor
|
// Call Frame Processor
|
||||||
@@ -406,99 +404,3 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
extension CameraView {
|
|
||||||
|
|
||||||
func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) {
|
|
||||||
let currentTime = Date()
|
|
||||||
if let lastTime = lastProcessedTime {
|
|
||||||
if currentTime.timeIntervalSince(lastTime) >= 10.0 {
|
|
||||||
processCapturedFrame(sampleBuffer)
|
|
||||||
lastProcessedTime = currentTime
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Process the first frame immediately
|
|
||||||
processCapturedFrame(sampleBuffer)
|
|
||||||
lastProcessedTime = currentTime
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) {
|
|
||||||
ReactLogger.log(level: .info, message: "processCapturedFrame")
|
|
||||||
// Your existing processing logic
|
|
||||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
|
||||||
|
|
||||||
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
|
||||||
|
|
||||||
// Analyze for white balance
|
|
||||||
let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage)
|
|
||||||
|
|
||||||
if isWhiteBalanceIssue {
|
|
||||||
ReactLogger.log(level: .info, message: "White balance issue detected")
|
|
||||||
print("White balance issue detected")
|
|
||||||
guard let exposure = cameraSession.configuration?.exposure else {
|
|
||||||
updateExposure(0.5)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
updateExposure(exposure - 0.2)
|
|
||||||
ReactLogger.log(level: .info, message: "Exposure = \(exposure)")
|
|
||||||
} else {
|
|
||||||
ReactLogger.log(level: .info, message: "White balance is okay")
|
|
||||||
print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool {
|
|
||||||
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance")
|
|
||||||
let extent = ciImage.extent
|
|
||||||
|
|
||||||
// Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size)
|
|
||||||
let centerRect = CGRect(
|
|
||||||
x: extent.origin.x + extent.size.width * 0.25,
|
|
||||||
y: extent.origin.y + extent.size.height * 0.25,
|
|
||||||
width: extent.size.width * 0.5,
|
|
||||||
height: extent.size.height * 0.5
|
|
||||||
)
|
|
||||||
|
|
||||||
// Crop the image to the centerRect
|
|
||||||
let croppedImage = ciImage.cropped(to: centerRect)
|
|
||||||
|
|
||||||
let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])!
|
|
||||||
|
|
||||||
guard let outputImage = averageColorFilter.outputImage else {
|
|
||||||
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard")
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
var bitmap = [UInt8](repeating: 0, count: 4)
|
|
||||||
let context = CIContext()
|
|
||||||
context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
|
|
||||||
|
|
||||||
let red = Float(bitmap[0]) / 255.0
|
|
||||||
let green = Float(bitmap[1]) / 255.0
|
|
||||||
let blue = Float(bitmap[2]) / 255.0
|
|
||||||
|
|
||||||
ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)")
|
|
||||||
|
|
||||||
// Check for white balance issue by comparing color channels
|
|
||||||
let threshold: Float = 0.25
|
|
||||||
if abs(red - green) > threshold
|
|
||||||
|| abs(blue - green) > threshold
|
|
||||||
|| abs(1 - red) < threshold
|
|
||||||
|| abs(1 - green) < threshold
|
|
||||||
|| abs(1 - blue) < threshold {
|
|
||||||
print("White balance issue detected")
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateExposure (_ exposure: Float) {
|
|
||||||
ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]")
|
|
||||||
|
|
||||||
cameraSession.configure { config in
|
|
||||||
config.exposure = exposure
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
Reference in New Issue
Block a user