Compare commits
3 Commits
ivan/fix-a
...
volodymyr/
Author | SHA1 | Date | |
---|---|---|---|
e16c25c96c | |||
fcf5fe70f3 | |||
|
3a20c44a31 |
@@ -87,6 +87,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
||||
var pinchGestureRecognizer: UIPinchGestureRecognizer?
|
||||
var pinchScaleOffset: CGFloat = 1.0
|
||||
private var currentConfigureCall: DispatchTime?
|
||||
var lastProcessedTime: Date?
|
||||
|
||||
var previewView: PreviewView
|
||||
#if DEBUG
|
||||
@@ -303,6 +304,15 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
||||
}
|
||||
onInitialized([:])
|
||||
}
|
||||
|
||||
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
|
||||
guard let configuration, let difference else { return }
|
||||
|
||||
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
|
||||
let videoPreviewLayer = previewView.videoPreviewLayer
|
||||
connection.setOrientation(configuration.orientation)
|
||||
}
|
||||
}
|
||||
|
||||
func onCameraStarted() {
|
||||
ReactLogger.log(level: .info, message: "Camera started!")
|
||||
@@ -321,6 +331,7 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
||||
}
|
||||
|
||||
func onFrame(sampleBuffer: CMSampleBuffer) {
|
||||
processFrameIfNeeded(sampleBuffer)
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
if let frameProcessor = frameProcessor {
|
||||
// Call Frame Processor
|
||||
@@ -395,3 +406,99 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension CameraView {
|
||||
|
||||
func processFrameIfNeeded(_ sampleBuffer: CMSampleBuffer) {
|
||||
let currentTime = Date()
|
||||
if let lastTime = lastProcessedTime {
|
||||
if currentTime.timeIntervalSince(lastTime) >= 10.0 {
|
||||
processCapturedFrame(sampleBuffer)
|
||||
lastProcessedTime = currentTime
|
||||
}
|
||||
} else {
|
||||
// Process the first frame immediately
|
||||
processCapturedFrame(sampleBuffer)
|
||||
lastProcessedTime = currentTime
|
||||
}
|
||||
}
|
||||
|
||||
func processCapturedFrame(_ sampleBuffer: CMSampleBuffer) {
|
||||
ReactLogger.log(level: .info, message: "processCapturedFrame")
|
||||
// Your existing processing logic
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
||||
|
||||
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
||||
|
||||
// Analyze for white balance
|
||||
let isWhiteBalanceIssue = analyzeFrameForWhiteBalance(ciImage: ciImage)
|
||||
|
||||
if isWhiteBalanceIssue {
|
||||
ReactLogger.log(level: .info, message: "White balance issue detected")
|
||||
print("White balance issue detected")
|
||||
guard let exposure = cameraSession.configuration?.exposure else {
|
||||
updateExposure(0.5)
|
||||
return
|
||||
}
|
||||
updateExposure(exposure - 0.2)
|
||||
ReactLogger.log(level: .info, message: "Exposure = \(exposure)")
|
||||
} else {
|
||||
ReactLogger.log(level: .info, message: "White balance is okay")
|
||||
print("White balance is okay. Exposure = \(cameraSession.configuration?.exposure)")
|
||||
}
|
||||
}
|
||||
|
||||
func analyzeFrameForWhiteBalance(ciImage: CIImage) -> Bool {
|
||||
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance")
|
||||
let extent = ciImage.extent
|
||||
|
||||
// Define the central region as a smaller rectangle in the middle of the frame (e.g., 1/4 the size)
|
||||
let centerRect = CGRect(
|
||||
x: extent.origin.x + extent.size.width * 0.25,
|
||||
y: extent.origin.y + extent.size.height * 0.25,
|
||||
width: extent.size.width * 0.5,
|
||||
height: extent.size.height * 0.5
|
||||
)
|
||||
|
||||
// Crop the image to the centerRect
|
||||
let croppedImage = ciImage.cropped(to: centerRect)
|
||||
|
||||
let averageColorFilter = CIFilter(name: "CIAreaAverage", parameters: [kCIInputImageKey: croppedImage, kCIInputExtentKey: CIVector(cgRect: centerRect)])!
|
||||
|
||||
guard let outputImage = averageColorFilter.outputImage else {
|
||||
ReactLogger.log(level: .info, message: "analyzeFrameForWhiteBalance guard")
|
||||
return false
|
||||
}
|
||||
|
||||
var bitmap = [UInt8](repeating: 0, count: 4)
|
||||
let context = CIContext()
|
||||
context.render(outputImage, toBitmap: &bitmap, rowBytes: 4, bounds: CGRect(x: 0, y: 0, width: 1, height: 1), format: .RGBA8, colorSpace: nil)
|
||||
|
||||
let red = Float(bitmap[0]) / 255.0
|
||||
let green = Float(bitmap[1]) / 255.0
|
||||
let blue = Float(bitmap[2]) / 255.0
|
||||
|
||||
ReactLogger.log(level: .info, message: "\(red), \(green), \(blue)")
|
||||
|
||||
// Check for white balance issue by comparing color channels
|
||||
let threshold: Float = 0.25
|
||||
if abs(red - green) > threshold
|
||||
|| abs(blue - green) > threshold
|
||||
|| abs(1 - red) < threshold
|
||||
|| abs(1 - green) < threshold
|
||||
|| abs(1 - blue) < threshold {
|
||||
print("White balance issue detected")
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func updateExposure (_ exposure: Float) {
|
||||
ReactLogger.log(level: .info, message: "Updating exposure: [\(exposure)]")
|
||||
|
||||
cameraSession.configure { config in
|
||||
config.exposure = exposure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -195,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
|
||||
self.delegate?.onSessionInitialized()
|
||||
}
|
||||
|
||||
self.delegate?.onCameraConfigurationChanged(config, difference)
|
||||
// After configuring, set this to the new configuration.
|
||||
self.configuration = config
|
||||
} catch {
|
||||
|
@@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject {
|
||||
Called when the [CameraSession] successfully initializes
|
||||
*/
|
||||
func onSessionInitialized()
|
||||
|
||||
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
|
||||
/**
|
||||
Called when the [CameraSession] starts streaming frames. (isActive=true)
|
||||
*/
|
||||
|
@@ -32,28 +32,36 @@ extension AVCaptureOutput {
|
||||
func setOrientation(_ orientation: Orientation) {
|
||||
// Set orientation for each connection
|
||||
for connection in connections {
|
||||
#if swift(>=5.9)
|
||||
if #available(iOS 17.0, *) {
|
||||
// Camera Sensors are always in landscape rotation (90deg).
|
||||
// We are setting the target rotation here, so we need to rotate by landscape once.
|
||||
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
|
||||
let degrees = cameraOrientation.toDegrees()
|
||||
|
||||
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
|
||||
// Does that work when we flip the camera?
|
||||
if connection.isVideoRotationAngleSupported(degrees) {
|
||||
connection.videoRotationAngle = degrees
|
||||
}
|
||||
} else {
|
||||
if connection.isVideoOrientationSupported {
|
||||
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
|
||||
}
|
||||
}
|
||||
#else
|
||||
if connection.isVideoOrientationSupported {
|
||||
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
|
||||
}
|
||||
#endif
|
||||
connection.setOrientation(orientation)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
extension AVCaptureConnection {
|
||||
func setOrientation(_ orientation: Orientation) {
|
||||
#if swift(>=5.9)
|
||||
if #available(iOS 17.0, *) {
|
||||
// Camera Sensors are always in landscape rotation (90deg).
|
||||
// We are setting the target rotation here, so we need to rotate by landscape once.
|
||||
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
|
||||
let degrees = cameraOrientation.toDegrees()
|
||||
|
||||
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
|
||||
// Does that work when we flip the camera?
|
||||
if isVideoRotationAngleSupported(degrees) {
|
||||
videoRotationAngle = degrees
|
||||
}
|
||||
} else {
|
||||
if isVideoOrientationSupported {
|
||||
videoOrientation = orientation.toAVCaptureVideoOrientation()
|
||||
}
|
||||
}
|
||||
#else
|
||||
if isVideoOrientationSupported {
|
||||
videoOrientation = orientation.toAVCaptureVideoOrientation()
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
@@ -113,5 +113,19 @@ class ViewController: UIViewController {
|
||||
}
|
||||
}
|
||||
|
||||
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
|
||||
switch UIDevice.current.orientation {
|
||||
case .landscapeLeft:
|
||||
cameraView.orientation = "landscape-right"
|
||||
case .landscapeRight:
|
||||
cameraView.orientation = "landscape-left"
|
||||
default:
|
||||
cameraView.orientation = "portrait"
|
||||
}
|
||||
|
||||
cameraView.didSetProps([])
|
||||
super.viewWillTransition(to: size, with: coordinator)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user