Compare commits

..

No commits in common. "main" and "ivan/attempt-merge" have entirely different histories.

17 changed files with 57 additions and 208 deletions

View File

@ -72,10 +72,6 @@ class CameraView(context: Context) :
var zoom: Float = 1f // in "factor"
var exposure: Double = 1.0
var orientation: Orientation = Orientation.PORTRAIT
set(value) {
field = value
previewView.orientation = value
}
var enableZoomGesture = false
set(value) {
field = value

View File

@ -10,7 +10,6 @@ import com.mrousavy.camera.types.CodeScannerOptions
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.PixelFormat
import com.mrousavy.camera.types.ResizeMode
import android.util.Log
import com.mrousavy.camera.types.Torch
import com.mrousavy.camera.types.VideoStabilizationMode
@ -183,7 +182,6 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
fun setOrientation(view: CameraView, orientation: String?) {
if (orientation != null) {
val newMode = Orientation.fromUnionValue(orientation)
Log.i(TAG, "Orientation set to: $newMode")
view.orientation = newMode
} else {
view.orientation = Orientation.PORTRAIT

View File

@ -312,7 +312,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableHdr
)
outputs.add(output)
// Size is usually landscape, so we flip it here
previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation)
}

View File

@ -83,7 +83,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
}
// Muxer specific
private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat, val callbacks: CameraSession.Callback,) {
private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat) {
val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs
@ -95,14 +95,16 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
fun finish() {
muxer.stop()
muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex)
}
}
private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.finish()
muxerContext?.let {
it.finish()
this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex)
}
chunkIndex++
val newFileName = "$chunkIndex.mp4"
@ -114,7 +116,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!
)
}

View File

@ -1,7 +1,6 @@
package com.mrousavy.camera.core
import android.annotation.SuppressLint
import android.content.res.Configuration
import android.content.Context
import android.graphics.Point
import android.util.Log
@ -37,13 +36,6 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
updateLayout()
}
}
var orientation: Orientation = Orientation.PORTRAIT
set(value) {
if (field != value) {
Log.i(TAG, "View Orientation changed: $field -> $value")
field = value
}
}
private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT
set(value) {
if (field != value) {
@ -98,14 +90,17 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
}
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize
var androidOrientation = context.getResources().getConfiguration().orientation;
if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) {
contentSize = Size(contentSize.height, contentSize.width)
override fun requestLayout() {
super.requestLayout()
// Manually trigger measure & layout, as RN on Android skips those.
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
post {
measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY))
layout(left, top, right, bottom)
}
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {
@ -133,11 +128,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec)
val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec))
val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec))
val surfaceSize = size.rotatedBy(inputOrientation)
val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode)
val fittedSize = getSize(surfaceSize, viewSize, resizeMode)
Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)")
Log.i(TAG, "PreviewView is $viewSize, rendering $surfaceSize content ($inputOrientation). Resizing to: $fittedSize ($resizeMode)")
setMeasuredDimension(fittedSize.width, fittedSize.height)
}

View File

@ -50,12 +50,4 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise)
}
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
}

View File

@ -303,15 +303,6 @@ public final class CameraView: UIView, CameraSessionDelegate {
}
onInitialized([:])
}
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!")

View File

@ -86,13 +86,5 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end

View File

@ -110,18 +110,6 @@ final class CameraViewManager: RCTViewManager {
resolve(result.descriptor)
}
}
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private

View File

@ -34,7 +34,7 @@ extension CameraSession {
}
let enableAudio = self.configuration?.audio != .disabled
// Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else {
@ -191,68 +191,4 @@ extension CameraSession {
}
}
}
func lockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration
try captureDevice.lockForConfiguration()
// Get the current exposure duration and ISO
let currentExposureDuration = captureDevice.exposureDuration
let currentISO = captureDevice.iso
// Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
}
// Unlock the device after configuration
captureDevice.unlockForConfiguration()
} catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
}
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
}
}

View File

@ -195,7 +195,6 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized()
}
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration.
self.configuration = config
} catch {

View File

@ -21,8 +21,6 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes
*/
func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/**
Called when the [CameraSession] starts streaming frames. (isActive=true)
*/

View File

@ -11,7 +11,7 @@ import AVFoundation
class ChunkedRecorder: NSObject {
enum ChunkType {
case initialization
case data(index: UInt64, duration: CMTime?)
@ -21,12 +21,12 @@ class ChunkedRecorder: NSObject {
let url: URL
let type: ChunkType
}
let outputURL: URL
let onChunkReady: ((Chunk) -> Void)
private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
self.outputURL = outputURL
self.onChunkReady = onChunkReady
@ -34,16 +34,16 @@ class ChunkedRecorder: NSObject {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
}
}
}
extension ChunkedRecorder: AVAssetWriterDelegate {
func assetWriter(_ writer: AVAssetWriter,
func assetWriter(_ writer: AVAssetWriter,
didOutputSegmentData segmentData: Data,
segmentType: AVAssetSegmentType,
segmentReport: AVAssetSegmentReport?) {
switch segmentType {
case .initialization:
saveInitSegment(segmentData)
@ -53,13 +53,13 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
fatalError("Unknown AVAssetSegmentType!")
}
}
private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4")
save(data: data, url: url)
onChunkReady(url: url, type: .initialization)
}
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name)
@ -72,7 +72,7 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1
}
private func save(data: Data, url: URL) {
do {
try data.write(to: url)
@ -80,9 +80,9 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
}
}
private func onChunkReady(url: URL, type: ChunkType) {
onChunkReady(Chunk(url: url, type: type))
}
}

View File

@ -84,7 +84,7 @@ class RecordingSession {
assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS
assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1)
/*
Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove
audio priming duration which advanced audio formats like AAC have, since the sample tables
@ -95,7 +95,7 @@ class RecordingSession {
*/
let startTimeOffset = CMTime(value: 10, timescale: 1)
assetWriter.initialSegmentStartTime = startTimeOffset
assetWriter.delegate = recorder
} catch let error as NSError {
throw CameraError.capture(.createRecorderError(message: error.description))

View File

@ -32,36 +32,28 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection
for connection in connections {
connection.setOrientation(orientation)
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees
}
} else {
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
}
}
}
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if isVideoRotationAngleSupported(degrees) {
videoRotationAngle = degrees
}
} else {
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
}
}

View File

@ -113,19 +113,5 @@ class ViewController: UIViewController {
}
}
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
}

View File

@ -319,22 +319,6 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e)
}
}
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion
//#region Static Functions (NativeModule)