3 Commits

Author SHA1 Message Date
Rui Rodrigues
3a20c44a31 fix preview and recording orientation fix
- add onCameraConfigurationChanged to CameraSessionDelegate to notify CameraView when configuration changes
- when orientatin change update CameraView.PreviewView.videoPreviewLayer.connection orientation value
2024-08-02 14:39:23 +01:00
7c162fecb1 Remove trailing whitespace 2024-07-28 16:37:20 -06:00
b28a152471 Fix last segment issue 2024-07-24 21:00:27 -06:00
8 changed files with 75 additions and 43 deletions

View File

@@ -83,7 +83,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
} }
// Muxer specific // Muxer specific
private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat) { private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat, val callbacks: CameraSession.Callback,) {
val videoTrack: Int = muxer.addTrack(encodedFormat) val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs val startTimeUs: Long = startTimeUs
@@ -95,16 +95,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
fun finish() { fun finish() {
muxer.stop() muxer.stop()
muxer.release() muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex)
} }
} }
private var muxerContext: MuxerContext? = null private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) { private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.let { muxerContext?.finish()
it.finish()
this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex)
}
chunkIndex++ chunkIndex++
val newFileName = "$chunkIndex.mp4" val newFileName = "$chunkIndex.mp4"
@@ -116,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
) )
muxer.setOrientationHint(orientationHint) muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext( muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!! muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks
) )
} }

View File

@@ -303,6 +303,15 @@ public final class CameraView: UIView, CameraSessionDelegate {
} }
onInitialized([:]) onInitialized([:])
} }
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?) {
guard let configuration, let difference else { return }
if difference.orientationChanged, let connection = previewView.videoPreviewLayer.connection {
let videoPreviewLayer = previewView.videoPreviewLayer
connection.setOrientation(configuration.orientation)
}
}
func onCameraStarted() { func onCameraStarted() {
ReactLogger.log(level: .info, message: "Camera started!") ReactLogger.log(level: .info, message: "Camera started!")

View File

@@ -195,6 +195,7 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
self.delegate?.onSessionInitialized() self.delegate?.onSessionInitialized()
} }
self.delegate?.onCameraConfigurationChanged(config, difference)
// After configuring, set this to the new configuration. // After configuring, set this to the new configuration.
self.configuration = config self.configuration = config
} catch { } catch {

View File

@@ -21,6 +21,8 @@ protocol CameraSessionDelegate: AnyObject {
Called when the [CameraSession] successfully initializes Called when the [CameraSession] successfully initializes
*/ */
func onSessionInitialized() func onSessionInitialized()
func onCameraConfigurationChanged(_ configuration: CameraConfiguration?, _ difference: CameraConfiguration.Difference?)
/** /**
Called when the [CameraSession] starts streaming frames. (isActive=true) Called when the [CameraSession] starts streaming frames. (isActive=true)
*/ */

View File

@@ -11,7 +11,7 @@ import AVFoundation
class ChunkedRecorder: NSObject { class ChunkedRecorder: NSObject {
enum ChunkType { enum ChunkType {
case initialization case initialization
case data(index: UInt64, duration: CMTime?) case data(index: UInt64, duration: CMTime?)
@@ -21,12 +21,12 @@ class ChunkedRecorder: NSObject {
let url: URL let url: URL
let type: ChunkType let type: ChunkType
} }
let outputURL: URL let outputURL: URL
let onChunkReady: ((Chunk) -> Void) let onChunkReady: ((Chunk) -> Void)
private var chunkIndex: UInt64 = 0 private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws { init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
self.outputURL = outputURL self.outputURL = outputURL
self.onChunkReady = onChunkReady self.onChunkReady = onChunkReady
@@ -34,16 +34,16 @@ class ChunkedRecorder: NSObject {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil) throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
} }
} }
} }
extension ChunkedRecorder: AVAssetWriterDelegate { extension ChunkedRecorder: AVAssetWriterDelegate {
func assetWriter(_ writer: AVAssetWriter, func assetWriter(_ writer: AVAssetWriter,
didOutputSegmentData segmentData: Data, didOutputSegmentData segmentData: Data,
segmentType: AVAssetSegmentType, segmentType: AVAssetSegmentType,
segmentReport: AVAssetSegmentReport?) { segmentReport: AVAssetSegmentReport?) {
switch segmentType { switch segmentType {
case .initialization: case .initialization:
saveInitSegment(segmentData) saveInitSegment(segmentData)
@@ -53,13 +53,13 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
fatalError("Unknown AVAssetSegmentType!") fatalError("Unknown AVAssetSegmentType!")
} }
} }
private func saveInitSegment(_ data: Data) { private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4") let url = outputURL.appendingPathComponent("init.mp4")
save(data: data, url: url) save(data: data, url: url)
onChunkReady(url: url, type: .initialization) onChunkReady(url: url, type: .initialization)
} }
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) { private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4" let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name) let url = outputURL.appendingPathComponent(name)
@@ -72,7 +72,7 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration)) onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1 chunkIndex += 1
} }
private func save(data: Data, url: URL) { private func save(data: Data, url: URL) {
do { do {
try data.write(to: url) try data.write(to: url)
@@ -80,9 +80,9 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)") ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
} }
} }
private func onChunkReady(url: URL, type: ChunkType) { private func onChunkReady(url: URL, type: ChunkType) {
onChunkReady(Chunk(url: url, type: type)) onChunkReady(Chunk(url: url, type: type))
} }
} }

View File

@@ -84,7 +84,7 @@ class RecordingSession {
assetWriter.shouldOptimizeForNetworkUse = false assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS assetWriter.outputFileTypeProfile = .mpeg4AppleHLS
assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1) assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1)
/* /*
Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove
audio priming duration which advanced audio formats like AAC have, since the sample tables audio priming duration which advanced audio formats like AAC have, since the sample tables
@@ -95,7 +95,7 @@ class RecordingSession {
*/ */
let startTimeOffset = CMTime(value: 10, timescale: 1) let startTimeOffset = CMTime(value: 10, timescale: 1)
assetWriter.initialSegmentStartTime = startTimeOffset assetWriter.initialSegmentStartTime = startTimeOffset
assetWriter.delegate = recorder assetWriter.delegate = recorder
} catch let error as NSError { } catch let error as NSError {
throw CameraError.capture(.createRecorderError(message: error.description)) throw CameraError.capture(.createRecorderError(message: error.description))

View File

@@ -32,28 +32,36 @@ extension AVCaptureOutput {
func setOrientation(_ orientation: Orientation) { func setOrientation(_ orientation: Orientation) {
// Set orientation for each connection // Set orientation for each connection
for connection in connections { for connection in connections {
#if swift(>=5.9) connection.setOrientation(orientation)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if connection.isVideoRotationAngleSupported(degrees) {
connection.videoRotationAngle = degrees
}
} else {
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if connection.isVideoOrientationSupported {
connection.videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
} }
} }
} }
extension AVCaptureConnection {
func setOrientation(_ orientation: Orientation) {
#if swift(>=5.9)
if #available(iOS 17.0, *) {
// Camera Sensors are always in landscape rotation (90deg).
// We are setting the target rotation here, so we need to rotate by landscape once.
let cameraOrientation = orientation.rotateBy(orientation: .landscapeLeft)
let degrees = cameraOrientation.toDegrees()
// TODO: Don't rotate the video output because it adds overhead. Instead just use EXIF flags for the .mp4 file if recording.
// Does that work when we flip the camera?
if isVideoRotationAngleSupported(degrees) {
videoRotationAngle = degrees
}
} else {
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
}
#else
if isVideoOrientationSupported {
videoOrientation = orientation.toAVCaptureVideoOrientation()
}
#endif
}
}

View File

@@ -113,5 +113,19 @@ class ViewController: UIViewController {
} }
} }
override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
switch UIDevice.current.orientation {
case .landscapeLeft:
cameraView.orientation = "landscape-right"
case .landscapeRight:
cameraView.orientation = "landscape-left"
default:
cameraView.orientation = "portrait"
}
cameraView.didSetProps([])
super.viewWillTransition(to: size, with: coordinator)
}
} }