12 Commits

Author SHA1 Message Date
5b52acda26 Gross hack to make things sort of work 2024-11-10 17:51:15 -07:00
17f675657e WIP 2024-11-09 19:52:05 -07:00
c64516693c Merge pull request 'Fix Preview View Aspect Ratio Orientation Issues in android' (#8) from ivan/fix-android-preview-view-aspect-ratio-orientation-issues into main
Reviewed-on: #8
2024-10-12 16:21:18 -06:00
e9f08ef488 Fix Preview View Aspect Ratio Orientation Issues in android 2024-10-12 16:20:23 -06:00
bf122db919 Merge pull request 'Ensure custom exposure mode is supported' (#7) from ivan/ensure-capture-mode-is-supported into main
Reviewed-on: #7
2024-10-10 15:18:12 -06:00
3319e48f7d Ensure custom exposure mode is supported 2024-10-10 15:17:55 -06:00
58714f9dac Merge pull request 'iOS Camera Settings' (#6) from volodymyr/ios-camera-settings into main
Reviewed-on: #6
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2024-10-10 15:12:32 -06:00
8991779851 iOS Camera Settings 2024-10-08 15:53:47 +02:00
f8efa172ba Merge pull request 'Lock exposure on start recording' (#4) from volodymyr/ios-lock-exposure into main
Reviewed-on: #4
2024-09-27 11:52:36 -06:00
66f840eecb Lock exposure on start recording 2024-09-27 10:35:29 +02:00
fcf5fe70f3 Merge branch 'ivan/fix-android-orientation' 2024-08-14 01:28:00 -06:00
Rui Rodrigues
3a20c44a31 fix preview and recording orientation fix
- add onCameraConfigurationChanged to CameraSessionDelegate to notify CameraView when configuration changes
- when orientatin change update CameraView.PreviewView.videoPreviewLayer.connection orientation value
2024-08-02 14:39:23 +01:00
14 changed files with 300 additions and 87 deletions

View File

@@ -13,69 +13,36 @@ import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.WritableMap
import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.core.InsufficientStorageError
import com.mrousavy.camera.utils.FileUtils
import com.mrousavy.camera.types.Flash
import com.mrousavy.camera.types.QualityPrioritization
import com.mrousavy.camera.utils.*
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import kotlinx.coroutines.*
private const val TAG = "CameraView.takePhoto"
private const val TAG = "CameraView.takeSnapshot"
@SuppressLint("UnsafeOptInUsageError")
suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap {
val options = optionsMap.toHashMap()
Log.i(TAG, "Taking photo... Options: $options")
Log.i(TAG, "Taking snapshot... Options: $options")
val bitmap = previewView.getBitmap() ?: throw Error()
val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced"
val flash = options["flash"] as? String ?: "off"
val enableAutoStabilization = options["enableAutoStabilization"] == true
val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true
val enablePrecapture = options["enablePrecapture"] as? Boolean ?: false
val file = FileUtils.createTempFile(context, "png");
// TODO: Implement Red Eye Reduction
options["enableAutoRedEyeReduction"]
// Write snapshot to .jpg file
FileUtils.writeBitmapTofile(bitmap, file, 100)
val flashMode = Flash.fromUnionValue(flash)
val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization)
Log.i(TAG, "Successfully saved snapshot to file!")
val photo = cameraSession.takePhoto(
qualityPrioritizationMode,
flashMode,
enableShutterSound,
enableAutoStabilization,
enablePrecapture,
orientation
)
photo.use {
Log.i(TAG, "Successfully captured ${photo.image.width} x ${photo.image.height} photo!")
val cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId!!)
val path = try {
savePhotoToFile(context, cameraCharacteristics, photo)
} catch (e: IOException) {
if (e.message?.contains("no space left", true) == true) {
throw InsufficientStorageError()
} else {
throw e
}
}
Log.i(TAG, "Successfully saved photo to file! $path")
val map = Arguments.createMap()
map.putString("path", path)
map.putInt("width", photo.image.width)
map.putInt("height", photo.image.height)
map.putString("orientation", photo.orientation.unionValue)
map.putBoolean("isRawPhoto", photo.format == ImageFormat.RAW_SENSOR)
map.putBoolean("isMirrored", photo.isMirrored)
return map
}
// Parse output data
val map = Arguments.createMap()
map.putString("path", file.absolutePath)
map.putInt("width", bitmap.width)
map.putInt("height", bitmap.height)
map.putBoolean("isMirrored", false)
return map
}
private fun writePhotoToFile(photo: CameraSession.CapturedPhoto, file: File) {

View File

@@ -102,7 +102,7 @@ class CameraView(context: Context) :
// session
internal val cameraSession: CameraSession
private val previewView: PreviewView
val previewView: PreviewView
private var currentConfigureCall: Long = System.currentTimeMillis()
internal var frameProcessor: FrameProcessor? = null

View File

@@ -2,9 +2,13 @@ package com.mrousavy.camera.core
import android.annotation.SuppressLint
import android.content.Context
import android.content.res.Configuration
import android.graphics.Point
import android.os.Handler
import android.os.Looper
import android.util.Log
import android.util.Size
import android.view.PixelCopy
import android.view.SurfaceHolder
import android.view.SurfaceView
import com.facebook.react.bridge.UiThreadUtil
@@ -12,9 +16,69 @@ import com.mrousavy.camera.extensions.resize
import com.mrousavy.camera.extensions.rotatedBy
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.ResizeMode
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
import kotlin.math.roundToInt
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlinx.coroutines.withContext
import android.graphics.Bitmap
import android.graphics.Matrix
fun rotateBitmap90CounterClockwise(source: Bitmap): Bitmap {
val width = source.width
val height = source.height
// Create a new Bitmap with swapped width and height
val rotatedBitmap = Bitmap.createBitmap(height, width, source.config)
for (y in 0 until height) {
for (x in 0 until width) {
// Set the pixel in the new position
rotatedBitmap.setPixel(y, width - 1 - x, source.getPixel(x, y))
}
}
return rotatedBitmap
}
fun Bitmap.transformBitmap(orientation: Orientation): Bitmap {
return when (orientation) {
Orientation.PORTRAIT -> this // No transformation needed
Orientation.LANDSCAPE_LEFT -> {
// Transpose (swap width and height)
val transposedBitmap = Bitmap.createBitmap(height, width, config)
for (y in 0 until height) {
for (x in 0 until width) {
transposedBitmap.setPixel(y, width - 1 - x, getPixel(x, y))
}
}
transposedBitmap
}
Orientation.PORTRAIT_UPSIDE_DOWN -> {
// Invert vertically and horizontally (180-degree rotation)
val invertedBitmap = Bitmap.createBitmap(width, height, config)
for (y in 0 until height) {
for (x in 0 until width) {
invertedBitmap.setPixel(width - 1 - x, height - 1 - y, getPixel(x, y))
}
}
invertedBitmap
}
Orientation.LANDSCAPE_RIGHT -> {
// Transpose (swap width and height) and invert vertically
val transposedBitmap = Bitmap.createBitmap(height, width, config)
for (y in 0 until height) {
for (x in 0 until width) {
transposedBitmap.setPixel(height - 1 - y, x, getPixel(x, y))
}
}
transposedBitmap
}
}
}
@SuppressLint("ViewConstructor")
class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
@@ -80,6 +144,34 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
}
}
suspend fun getBitmap(): Bitmap? = withContext(Dispatchers.Main) {
val frame = holder.getSurfaceFrame()
val width = frame.width()
val height = frame.height()
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
// Use a coroutine to suspend until the PixelCopy request is complete
suspendCancellableCoroutine<Bitmap?> { continuation ->
PixelCopy.request(
holder.surface,
bitmap,
{ copyResult ->
if (copyResult == PixelCopy.SUCCESS) {
continuation.resume(rotateBitmap90CounterClockwise(bitmap))
} else {
continuation.resumeWithException(
RuntimeException("PixelCopy failed with error code $copyResult")
)
}
},
Handler(Looper.getMainLooper())
)
}
}
fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point {
val sensorOrientation = cameraDeviceDetails.sensorOrientation
val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height())
@@ -97,22 +189,14 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
}
}
override fun requestLayout() {
super.requestLayout()
// Manually trigger measure & layout, as RN on Android skips those.
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
post {
measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY))
layout(left, top, right, bottom)
}
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize
// Swap dimensions if orientation is landscape
if (orientation.isLandscape()) {
var androidOrientation = context.getResources().getConfiguration().orientation;
if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) {
contentSize = Size(contentSize.height, contentSize.width)
}
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {

View File

@@ -1,13 +1,33 @@
package com.mrousavy.camera.utils
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.util.Size
import java.io.File
import java.io.FileOutputStream
class FileUtils {
companion object {
fun createTempFile(context: Context, extension: String): File =
File.createTempFile("mrousavy", extension, context.cacheDir).also {
it.deleteOnExit()
fun writeBitmapTofile(bitmap: Bitmap, file: File, quality: Int) {
FileOutputStream(file).use { stream ->
bitmap.compress(Bitmap.CompressFormat.JPEG, 50, stream)
}
}
fun getImageSize(imagePath: String): Size {
val bitmapOptions = BitmapFactory.Options().also {
it.inJustDecodeBounds = true
}
BitmapFactory.decodeFile(imagePath, bitmapOptions)
val width = bitmapOptions.outWidth
val height = bitmapOptions.outHeight
return Size(width, height)
}
fun createTempFile(context: Context, extension: String): File =
File.createTempFile("mrousavy", extension, context.cacheDir).also {
it.deleteOnExit()
}
}
}

View File

@@ -50,4 +50,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise)
}
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
}

View File

@@ -303,6 +303,15 @@ public final class CameraView: UIView, CameraSessionDelegate {
}
onInitialized([:])
}
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!")

View File

@@ -86,5 +86,13 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end

View File

@@ -110,6 +110,18 @@ final class CameraViewManager: RCTViewManager {
resolve(result.descriptor)
}
}
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private

View File

@@ -34,7 +34,7 @@ extension CameraSession {
}
let enableAudio = self.configuration?.audio != .disabled
// Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else {
@@ -191,4 +191,68 @@ extension CameraSession {
}
}
}
func lockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration
try captureDevice.lockForConfiguration()
// Get the current exposure duration and ISO
let currentExposureDuration = captureDevice.exposureDuration
let currentISO = captureDevice.iso
// Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
}
// Unlock the device after configuration
captureDevice.unlockForConfiguration()
} catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
}
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
}
}

View File

@@ -195,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized()
}
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration.
self.configuration = config
} catch {

View File

@@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes
*/
func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/**
Called when the [CameraSession] starts streaming frames. (isActive=true)
*/

View File

@@ -32,28 +32,36 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection
for connection in connections {
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees
}
} else {
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
connection.setOrientation(orientation)
}
}
}
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if isVideoRotationAngleSupported(degrees) {
videoRotationAngle = degrees
}
} else {
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
}
}

View File

@@ -113,5 +113,19 @@ class ViewController: UIViewController {
}
}
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
}

View File

@@ -319,6 +319,22 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e)
}
}
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion
//#region Static Functions (NativeModule)