Compare commits

..

No commits in common. "main" and "ivan/fix-last-segment-issue" have entirely different histories.

16 changed files with 51 additions and 204 deletions

View File

@ -72,10 +72,6 @@ class CameraView(context: Context) :
var zoom: Float = 1f // in "factor" var zoom: Float = 1f // in "factor"
var exposure: Double = 1.0 var exposure: Double = 1.0
var orientation: Orientation = Orientation.PORTRAIT var orientation: Orientation = Orientation.PORTRAIT
set(value) {
field = value
previewView.orientation = value
}
var enableZoomGesture = false var enableZoomGesture = false
set(value) { set(value) {
field = value field = value

View File

@ -10,7 +10,6 @@ import com.mrousavy.camera.types.CodeScannerOptions
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.PixelFormat import com.mrousavy.camera.types.PixelFormat
import com.mrousavy.camera.types.ResizeMode import com.mrousavy.camera.types.ResizeMode
import android.util.Log
import com.mrousavy.camera.types.Torch import com.mrousavy.camera.types.Torch
import com.mrousavy.camera.types.VideoStabilizationMode import com.mrousavy.camera.types.VideoStabilizationMode
@ -183,7 +182,6 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
fun setOrientation(view: CameraView, orientation: String?) { fun setOrientation(view: CameraView, orientation: String?) {
if (orientation != null) { if (orientation != null) {
val newMode = Orientation.fromUnionValue(orientation) val newMode = Orientation.fromUnionValue(orientation)
Log.i(TAG, "Orientation set to: $newMode")
view.orientation = newMode view.orientation = newMode
} else { } else {
view.orientation = Orientation.PORTRAIT view.orientation = Orientation.PORTRAIT

View File

@ -312,7 +312,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableHdr enableHdr
) )
outputs.add(output) outputs.add(output)
// Size is usually landscape, so we flip it here
previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation) previewView?.setSurfaceSize(size.width, size.height, deviceDetails.sensorOrientation)
} }

View File

@ -1,7 +1,6 @@
package com.mrousavy.camera.core package com.mrousavy.camera.core
import android.annotation.SuppressLint import android.annotation.SuppressLint
import android.content.res.Configuration
import android.content.Context import android.content.Context
import android.graphics.Point import android.graphics.Point
import android.util.Log import android.util.Log
@ -37,13 +36,6 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
updateLayout() updateLayout()
} }
} }
var orientation: Orientation = Orientation.PORTRAIT
set(value) {
if (field != value) {
Log.i(TAG, "View Orientation changed: $field -> $value")
field = value
}
}
private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT private var inputOrientation: Orientation = Orientation.LANDSCAPE_LEFT
set(value) { set(value) {
if (field != value) { if (field != value) {
@ -98,14 +90,17 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
} }
} }
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { override fun requestLayout() {
var contentSize = contentSize super.requestLayout()
var androidOrientation = context.getResources().getConfiguration().orientation; // Manually trigger measure & layout, as RN on Android skips those.
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) { post {
contentSize = Size(contentSize.height, contentSize.width) measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY))
layout(left, top, right, bottom)
} }
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {
@ -133,11 +128,11 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) { override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec) super.onMeasure(widthMeasureSpec, heightMeasureSpec)
val measuredViewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) val viewSize = Size(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec))
val surfaceSize = size.rotatedBy(inputOrientation) val surfaceSize = size.rotatedBy(inputOrientation)
val fittedSize = getSize(surfaceSize, measuredViewSize, resizeMode) val fittedSize = getSize(surfaceSize, viewSize, resizeMode)
Log.i(TAG, "PreviewView is $measuredViewSize rendering $surfaceSize orientation ($orientation). Resizing to: $fittedSize ($resizeMode)") Log.i(TAG, "PreviewView is $viewSize, rendering $surfaceSize content ($inputOrientation). Resizing to: $fittedSize ($resizeMode)")
setMeasuredDimension(fittedSize.width, fittedSize.height) setMeasuredDimension(fittedSize.width, fittedSize.height)
} }

View File

@ -50,12 +50,4 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) { func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise) cameraSession.resumeRecording(promise: promise)
} }
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
} }

View File

@ -303,15 +303,6 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
onInitialized([:]) onInitialized([:])
} }
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() { func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!") ReactLogger.log(level: .info, message: "Camera started!")

View File

@ -86,13 +86,5 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve : (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject : (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject); : (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end @end

View File

@ -110,18 +110,6 @@ final class CameraViewManager: RCTViewManager {
resolve(result.descriptor) resolve(result.descriptor)
} }
} }
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private // MARK: Private

View File

@ -34,7 +34,7 @@ extension CameraSession {
} }
let enableAudio = self.configuration?.audio != .disabled let enableAudio = self.configuration?.audio != .disabled
// Callback for when new chunks are ready // Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else { guard let delegate = self.delegate else {
@ -191,68 +191,4 @@ extension CameraSession {
} }
} }
} }
func lockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration
try captureDevice.lockForConfiguration()
// Get the current exposure duration and ISO
let currentExposureDuration = captureDevice.exposureDuration
let currentISO = captureDevice.iso
// Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
}
// Unlock the device after configuration
captureDevice.unlockForConfiguration()
} catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
}
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
}
} }

View File

@ -195,7 +195,6 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized() self.delegate?.onSessionInitialized()
} }
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration. // After configuring, set this to the new configuration.
self.configuration = config self.configuration = config
} catch { } catch {

View File

@ -21,8 +21,6 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes Called when the [CameraSession] successfully initializes
*/ */
func onSessionInitialized() func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/** /**
Called when the [CameraSession] starts streaming frames. (isActive=true) Called when the [CameraSession] starts streaming frames. (isActive=true)
*/ */

View File

@ -11,7 +11,7 @@ import AVFoundation
class ChunkedRecorder: NSObject { class ChunkedRecorder: NSObject {
enum ChunkType { enum ChunkType {
case initialization case initialization
case data(index: UInt64, duration: CMTime?) case data(index: UInt64, duration: CMTime?)
@ -21,12 +21,12 @@ class ChunkedRecorder: NSObject {
let url: URL let url: URL
let type: ChunkType let type: ChunkType
} }
let outputURL: URL let outputURL: URL
let onChunkReady: ((Chunk) -> Void) let onChunkReady: ((Chunk) -> Void)
private var chunkIndex: UInt64 = 0 private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws { init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
self.outputURL = outputURL self.outputURL = outputURL
self.onChunkReady = onChunkReady self.onChunkReady = onChunkReady
@ -34,16 +34,16 @@ class ChunkedRecorder: NSObject {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil) throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
} }
} }
} }
extension ChunkedRecorder: AVAssetWriterDelegate { extension ChunkedRecorder: AVAssetWriterDelegate {
func assetWriter(_ writer: AVAssetWriter, func assetWriter(_ writer: AVAssetWriter,
didOutputSegmentData segmentData: Data, didOutputSegmentData segmentData: Data,
segmentType: AVAssetSegmentType, segmentType: AVAssetSegmentType,
segmentReport: AVAssetSegmentReport?) { segmentReport: AVAssetSegmentReport?) {
switch segmentType { switch segmentType {
case .initialization: case .initialization:
saveInitSegment(segmentData) saveInitSegment(segmentData)
@ -53,13 +53,13 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
fatalError("Unknown AVAssetSegmentType!") fatalError("Unknown AVAssetSegmentType!")
} }
} }
private func saveInitSegment(_ data: Data) { private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4") let url = outputURL.appendingPathComponent("init.mp4")
save(data: data, url: url) save(data: data, url: url)
onChunkReady(url: url, type: .initialization) onChunkReady(url: url, type: .initialization)
} }
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) { private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4" let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name) let url = outputURL.appendingPathComponent(name)
@ -72,7 +72,7 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration)) onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1 chunkIndex += 1
} }
private func save(data: Data, url: URL) { private func save(data: Data, url: URL) {
do { do {
try data.write(to: url) try data.write(to: url)
@ -80,9 +80,9 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)") ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
} }
} }
private func onChunkReady(url: URL, type: ChunkType) { private func onChunkReady(url: URL, type: ChunkType) {
onChunkReady(Chunk(url: url, type: type)) onChunkReady(Chunk(url: url, type: type))
} }
} }

View File

@ -84,7 +84,7 @@ class RecordingSession {
assetWriter.shouldOptimizeForNetworkUse = false assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS assetWriter.outputFileTypeProfile = .mpeg4AppleHLS
assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1) assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1)
/* /*
Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove
audio priming duration which advanced audio formats like AAC have, since the sample tables audio priming duration which advanced audio formats like AAC have, since the sample tables
@ -95,7 +95,7 @@ class RecordingSession {
*/ */
let startTimeOffset = CMTime(value: 10, timescale: 1) let startTimeOffset = CMTime(value: 10, timescale: 1)
assetWriter.initialSegmentStartTime = startTimeOffset assetWriter.initialSegmentStartTime = startTimeOffset
assetWriter.delegate = recorder assetWriter.delegate = recorder
} catch let error as NSError { } catch let error as NSError {
throw CameraError.capture(.createRecorderError(message: error.description)) throw CameraError.capture(.createRecorderError(message: error.description))

View File

@ -32,36 +32,28 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) { func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection // Set orientation for each connection
for connection in connections { for connection in connections {
connection.setOrientation(orientation) #if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees
}
} else {
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
} }
} }
} }
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if isVideoRotationAngleSupported(degrees) {
videoRotationAngle = degrees
}
} else {
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
}
}

View File

@ -113,19 +113,5 @@ class ViewController: UIViewController {
} }
} }
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
} }

View File

@ -319,22 +319,6 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion //#endregion
//#region Static Functions (NativeModule) //#region Static Functions (NativeModule)