Compare commits
4 Commits
dean/fix-a
...
imalison/f
| Author | SHA1 | Date | |
|---|---|---|---|
| a2d218580c | |||
| 61863149c0 | |||
| 09b50938d2 | |||
| a158ed8350 |
@@ -178,6 +178,10 @@ dependencies {
|
|||||||
implementation "com.facebook.react:react-android:+"
|
implementation "com.facebook.react:react-android:+"
|
||||||
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3"
|
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3"
|
||||||
|
|
||||||
|
// Media3 muxer for fragmented MP4 (HLS-compatible) recording
|
||||||
|
implementation "androidx.media3:media3-muxer:1.5.0"
|
||||||
|
implementation "androidx.media3:media3-common:1.5.0"
|
||||||
|
|
||||||
if (enableCodeScanner) {
|
if (enableCodeScanner) {
|
||||||
// User enabled code-scanner, so we bundle the 2.4 MB model in the app.
|
// User enabled code-scanner, so we bundle the 2.4 MB model in the app.
|
||||||
implementation 'com.google.mlkit:barcode-scanning:17.2.0'
|
implementation 'com.google.mlkit:barcode-scanning:17.2.0'
|
||||||
|
|||||||
@@ -40,15 +40,26 @@ fun CameraView.invokeOnStopped() {
|
|||||||
this.sendEvent(event)
|
this.sendEvent(event)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
|
fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
|
||||||
Log.e(CameraView.TAG, "invokeOnError(...):")
|
Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
|
||||||
val event = Arguments.createMap()
|
val event = Arguments.createMap()
|
||||||
event.putInt("index", index)
|
event.putInt("index", index)
|
||||||
event.putString("filepath", filepath.toString())
|
event.putString("filepath", filepath.toString())
|
||||||
|
if (durationUs != null) {
|
||||||
|
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
|
||||||
|
}
|
||||||
val reactContext = context as ReactContext
|
val reactContext = context as ReactContext
|
||||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
|
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun CameraView.invokeOnInitReady(filepath: File) {
|
||||||
|
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
|
||||||
|
val event = Arguments.createMap()
|
||||||
|
event.putString("filepath", filepath.toString())
|
||||||
|
val reactContext = context as ReactContext
|
||||||
|
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
|
||||||
|
}
|
||||||
|
|
||||||
fun CameraView.invokeOnError(error: Throwable) {
|
fun CameraView.invokeOnError(error: Throwable) {
|
||||||
Log.e(CameraView.TAG, "invokeOnError(...):")
|
Log.e(CameraView.TAG, "invokeOnError(...):")
|
||||||
error.printStackTrace()
|
error.printStackTrace()
|
||||||
|
|||||||
@@ -271,8 +271,12 @@ class CameraView(context: Context) :
|
|||||||
invokeOnStopped()
|
invokeOnStopped()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onVideoChunkReady(filepath: File, index: Int) {
|
override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
|
||||||
invokeOnChunkReady(filepath, index)
|
invokeOnChunkReady(filepath, index, durationUs)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onInitSegmentReady(filepath: File) {
|
||||||
|
invokeOnInitReady(filepath)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
|
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
|
|||||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||||
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
||||||
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
|
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
|
||||||
|
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
|
||||||
.build()?.toMutableMap()
|
.build()?.toMutableMap()
|
||||||
|
|
||||||
override fun getName(): String = TAG
|
override fun getName(): String = TAG
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import android.util.Log
|
|||||||
import android.util.Size
|
import android.util.Size
|
||||||
import android.view.Surface
|
import android.view.Surface
|
||||||
import android.view.SurfaceHolder
|
import android.view.SurfaceHolder
|
||||||
|
import android.view.WindowManager
|
||||||
import androidx.core.content.ContextCompat
|
import androidx.core.content.ContextCompat
|
||||||
import com.google.mlkit.vision.barcode.common.Barcode
|
import com.google.mlkit.vision.barcode.common.Barcode
|
||||||
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
|
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
|
||||||
@@ -425,6 +426,21 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
|||||||
|
|
||||||
val fps = configuration?.fps ?: 30
|
val fps = configuration?.fps ?: 30
|
||||||
|
|
||||||
|
// Get actual device rotation from WindowManager since the React Native orientation hook
|
||||||
|
// doesn't update when rotating between landscape-left and landscape-right on Android.
|
||||||
|
// Map device rotation to the correct orientationHint for video recording:
|
||||||
|
// - Counter-clockwise (ROTATION_90) → 270° hint
|
||||||
|
// - Clockwise (ROTATION_270) → 90° hint
|
||||||
|
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
|
||||||
|
val deviceRotation = windowManager.defaultDisplay.rotation
|
||||||
|
val recordingOrientation = when (deviceRotation) {
|
||||||
|
Surface.ROTATION_0 -> Orientation.PORTRAIT
|
||||||
|
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
|
||||||
|
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
|
||||||
|
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
|
||||||
|
else -> Orientation.PORTRAIT
|
||||||
|
}
|
||||||
|
|
||||||
val recording = RecordingSession(
|
val recording = RecordingSession(
|
||||||
context,
|
context,
|
||||||
cameraId,
|
cameraId,
|
||||||
@@ -497,7 +513,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
|||||||
fun onInitialized()
|
fun onInitialized()
|
||||||
fun onStarted()
|
fun onStarted()
|
||||||
fun onStopped()
|
fun onStopped()
|
||||||
fun onVideoChunkReady(filepath: File, index: Int)
|
fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
|
||||||
|
fun onInitSegmentReady(filepath: File)
|
||||||
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
|
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import java.io.File
|
|||||||
import java.nio.ByteBuffer
|
import java.nio.ByteBuffer
|
||||||
|
|
||||||
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
|
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
|
||||||
MediaCodec.Callback() {
|
MediaCodec.Callback(), ChunkedRecorderInterface {
|
||||||
companion object {
|
companion object {
|
||||||
private const val TAG = "ChunkedRecorder"
|
private const val TAG = "ChunkedRecorder"
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
|
|
||||||
private val targetDurationUs = iFrameInterval * 1000000
|
private val targetDurationUs = iFrameInterval * 1000000
|
||||||
|
|
||||||
val surface: Surface = encoder.createInputSurface()
|
override val surface: Surface = encoder.createInputSurface()
|
||||||
|
|
||||||
init {
|
init {
|
||||||
if (!this.outputDirectory.exists()) {
|
if (!this.outputDirectory.exists()) {
|
||||||
@@ -95,7 +95,9 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
fun finish() {
|
fun finish() {
|
||||||
muxer.stop()
|
muxer.stop()
|
||||||
muxer.release()
|
muxer.release()
|
||||||
callbacks.onVideoChunkReady(filepath, chunkIndex)
|
// Calculate duration from start time - this is approximate
|
||||||
|
// The new FragmentedRecordingManager provides accurate duration
|
||||||
|
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,12 +135,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
return bufferInfo.presentationTimeUs - context.startTimeUs
|
return bufferInfo.presentationTimeUs - context.startTimeUs
|
||||||
}
|
}
|
||||||
|
|
||||||
fun start() {
|
override fun start() {
|
||||||
encoder.start()
|
encoder.start()
|
||||||
recording = true
|
recording = true
|
||||||
}
|
}
|
||||||
|
|
||||||
fun finish() {
|
override fun finish() {
|
||||||
synchronized(this) {
|
synchronized(this) {
|
||||||
muxerContext?.finish()
|
muxerContext?.finish()
|
||||||
recording = false
|
recording = false
|
||||||
|
|||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package com.mrousavy.camera.core
|
||||||
|
|
||||||
|
import android.view.Surface
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Common interface for chunked video recorders.
|
||||||
|
* Implemented by both ChunkedRecordingManager (regular MP4) and
|
||||||
|
* FragmentedRecordingManager (HLS-compatible fMP4).
|
||||||
|
*/
|
||||||
|
interface ChunkedRecorderInterface {
|
||||||
|
val surface: Surface
|
||||||
|
|
||||||
|
fun start()
|
||||||
|
fun finish()
|
||||||
|
}
|
||||||
@@ -0,0 +1,332 @@
|
|||||||
|
package com.mrousavy.camera.core
|
||||||
|
|
||||||
|
import android.media.MediaCodec
|
||||||
|
import android.media.MediaCodec.BufferInfo
|
||||||
|
import android.media.MediaCodecInfo
|
||||||
|
import android.media.MediaFormat
|
||||||
|
import android.util.Log
|
||||||
|
import android.util.Size
|
||||||
|
import android.view.Surface
|
||||||
|
import androidx.media3.common.Format
|
||||||
|
import androidx.media3.common.MimeTypes
|
||||||
|
import androidx.media3.common.util.UnstableApi
|
||||||
|
import androidx.media3.muxer.FragmentedMp4Muxer
|
||||||
|
import androidx.media3.muxer.Muxer
|
||||||
|
import com.mrousavy.camera.types.Orientation
|
||||||
|
import com.mrousavy.camera.types.RecordVideoOptions
|
||||||
|
import java.io.File
|
||||||
|
import java.io.FileOutputStream
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A recording manager that produces HLS-compatible fragmented MP4 segments.
|
||||||
|
*
|
||||||
|
* This produces output similar to the iOS implementation:
|
||||||
|
* - An initialization segment (init.mp4) containing codec configuration
|
||||||
|
* - Numbered data segments (0.mp4, 1.mp4, ...) containing media data
|
||||||
|
*
|
||||||
|
* Uses AndroidX Media3's FragmentedMp4Muxer which produces proper fMP4 output.
|
||||||
|
*/
|
||||||
|
@UnstableApi
|
||||||
|
class FragmentedRecordingManager(
|
||||||
|
private val encoder: MediaCodec,
|
||||||
|
private val outputDirectory: File,
|
||||||
|
private val orientationDegrees: Int,
|
||||||
|
private val targetSegmentDurationUs: Long,
|
||||||
|
private val callbacks: CameraSession.Callback
|
||||||
|
) : MediaCodec.Callback(), ChunkedRecorderInterface {
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
private const val TAG = "FragmentedRecorder"
|
||||||
|
|
||||||
|
fun fromParams(
|
||||||
|
callbacks: CameraSession.Callback,
|
||||||
|
size: Size,
|
||||||
|
enableAudio: Boolean,
|
||||||
|
fps: Int? = null,
|
||||||
|
cameraOrientation: Orientation,
|
||||||
|
bitRate: Int,
|
||||||
|
options: RecordVideoOptions,
|
||||||
|
outputDirectory: File,
|
||||||
|
segmentDurationSeconds: Int = 6
|
||||||
|
): FragmentedRecordingManager {
|
||||||
|
val mimeType = options.videoCodec.toMimeType()
|
||||||
|
val cameraOrientationDegrees = cameraOrientation.toDegrees()
|
||||||
|
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
|
||||||
|
|
||||||
|
val (width, height) = if (cameraOrientation.isLandscape()) {
|
||||||
|
size.height to size.width
|
||||||
|
} else {
|
||||||
|
size.width to size.height
|
||||||
|
}
|
||||||
|
|
||||||
|
val format = MediaFormat.createVideoFormat(mimeType, width, height)
|
||||||
|
val codec = MediaCodec.createEncoderByType(mimeType)
|
||||||
|
|
||||||
|
format.setInteger(
|
||||||
|
MediaFormat.KEY_COLOR_FORMAT,
|
||||||
|
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
|
||||||
|
)
|
||||||
|
fps?.apply {
|
||||||
|
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
|
||||||
|
}
|
||||||
|
// I-frame interval affects segment boundaries
|
||||||
|
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
|
||||||
|
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
|
||||||
|
|
||||||
|
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
|
||||||
|
|
||||||
|
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
|
||||||
|
|
||||||
|
return FragmentedRecordingManager(
|
||||||
|
codec,
|
||||||
|
outputDirectory,
|
||||||
|
recordingOrientationDegrees,
|
||||||
|
segmentDurationSeconds * 1_000_000L,
|
||||||
|
callbacks
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// State management
|
||||||
|
private var chunkIndex = 0
|
||||||
|
private var encodedFormat: MediaFormat? = null
|
||||||
|
private var recording = false
|
||||||
|
|
||||||
|
// Segment tracking
|
||||||
|
private var segmentContext: SegmentContext? = null
|
||||||
|
private var initSegmentEmitted = false
|
||||||
|
|
||||||
|
override val surface: Surface = encoder.createInputSurface()
|
||||||
|
|
||||||
|
init {
|
||||||
|
if (!outputDirectory.exists()) {
|
||||||
|
outputDirectory.mkdirs()
|
||||||
|
}
|
||||||
|
encoder.setCallback(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context for a single data segment being written.
|
||||||
|
* Init segments are created separately via createInitSegment().
|
||||||
|
*/
|
||||||
|
private inner class SegmentContext(
|
||||||
|
private val format: MediaFormat,
|
||||||
|
private val segmentIndex: Int
|
||||||
|
) {
|
||||||
|
private val filename = "$segmentIndex.mp4"
|
||||||
|
private val file = File(outputDirectory, filename)
|
||||||
|
private val outputStream = FileOutputStream(file)
|
||||||
|
private val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
|
||||||
|
private lateinit var videoTrack: Muxer.TrackToken
|
||||||
|
private var startTimeUs: Long = -1L
|
||||||
|
private var lastTimeUs: Long = 0L
|
||||||
|
private var sampleCount = 0
|
||||||
|
|
||||||
|
init {
|
||||||
|
val media3Format = convertToMedia3Format(format)
|
||||||
|
videoTrack = muxer.addTrack(media3Format)
|
||||||
|
Log.d(TAG, "Created segment context: $filename")
|
||||||
|
}
|
||||||
|
|
||||||
|
fun writeSample(buffer: ByteBuffer, bufferInfo: BufferInfo): Boolean {
|
||||||
|
if (startTimeUs < 0) {
|
||||||
|
startTimeUs = bufferInfo.presentationTimeUs
|
||||||
|
}
|
||||||
|
lastTimeUs = bufferInfo.presentationTimeUs
|
||||||
|
|
||||||
|
val isKeyFrame = (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
|
||||||
|
|
||||||
|
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
|
||||||
|
sampleCount++
|
||||||
|
|
||||||
|
// Check if we should start a new segment at the next keyframe
|
||||||
|
if (isKeyFrame && sampleCount > 1) {
|
||||||
|
val segmentDurationUs = bufferInfo.presentationTimeUs - startTimeUs
|
||||||
|
if (segmentDurationUs >= targetSegmentDurationUs) {
|
||||||
|
return true // Signal to create new segment
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
fun finish(): Long {
|
||||||
|
try {
|
||||||
|
muxer.close()
|
||||||
|
outputStream.close()
|
||||||
|
} catch (e: Exception) {
|
||||||
|
Log.e(TAG, "Error closing segment", e)
|
||||||
|
}
|
||||||
|
|
||||||
|
val durationUs = if (lastTimeUs > startTimeUs) lastTimeUs - startTimeUs else 0L
|
||||||
|
callbacks.onVideoChunkReady(file, segmentIndex, durationUs)
|
||||||
|
|
||||||
|
Log.d(TAG, "Finished segment: $filename, samples=$sampleCount, duration=${durationUs/1000}ms")
|
||||||
|
return durationUs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun createNewSegment() {
|
||||||
|
val format = encodedFormat
|
||||||
|
if (format == null) {
|
||||||
|
Log.e(TAG, "Cannot create segment: encodedFormat is null")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close previous segment
|
||||||
|
segmentContext?.finish()
|
||||||
|
|
||||||
|
// Create new data segment (init segments are created separately)
|
||||||
|
segmentContext = SegmentContext(format, chunkIndex)
|
||||||
|
chunkIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun start() {
|
||||||
|
encoder.start()
|
||||||
|
recording = true
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun finish() {
|
||||||
|
synchronized(this) {
|
||||||
|
recording = false
|
||||||
|
segmentContext?.finish()
|
||||||
|
segmentContext = null
|
||||||
|
try {
|
||||||
|
encoder.stop()
|
||||||
|
encoder.release()
|
||||||
|
} catch (e: Exception) {
|
||||||
|
Log.e(TAG, "Error stopping encoder", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaCodec.Callback methods
|
||||||
|
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
|
||||||
|
// Not used for Surface input
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
|
||||||
|
synchronized(this) {
|
||||||
|
if (!recording) {
|
||||||
|
encoder.releaseOutputBuffer(index, false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
val encodedData = encoder.getOutputBuffer(index)
|
||||||
|
if (encodedData == null) {
|
||||||
|
Log.e(TAG, "getOutputBuffer returned null")
|
||||||
|
encoder.releaseOutputBuffer(index, false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait until init segment is emitted (happens in onOutputFormatChanged)
|
||||||
|
if (!initSegmentEmitted) {
|
||||||
|
encoder.releaseOutputBuffer(index, false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create first data segment if needed
|
||||||
|
if (segmentContext == null) {
|
||||||
|
createNewSegment()
|
||||||
|
}
|
||||||
|
|
||||||
|
val context = segmentContext
|
||||||
|
if (context == null) {
|
||||||
|
encoder.releaseOutputBuffer(index, false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
val shouldStartNewSegment = context.writeSample(encodedData, bufferInfo)
|
||||||
|
if (shouldStartNewSegment) {
|
||||||
|
createNewSegment()
|
||||||
|
// Write this keyframe to the new segment as well
|
||||||
|
segmentContext?.writeSample(encodedData, bufferInfo)
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
Log.e(TAG, "Error writing sample", e)
|
||||||
|
}
|
||||||
|
|
||||||
|
encoder.releaseOutputBuffer(index, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
|
||||||
|
Log.e(TAG, "Codec error: ${e.message}")
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
|
||||||
|
Log.i(TAG, "Output format changed: $format")
|
||||||
|
encodedFormat = format
|
||||||
|
|
||||||
|
// Create the init segment immediately when we get the format
|
||||||
|
// This produces an fMP4 file with just ftyp + moov (no samples)
|
||||||
|
if (!initSegmentEmitted) {
|
||||||
|
createInitSegment(format)
|
||||||
|
initSegmentEmitted = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an initialization segment containing only codec configuration (ftyp + moov).
|
||||||
|
* This is done by creating a muxer, adding the track, and immediately closing it
|
||||||
|
* without writing any samples.
|
||||||
|
*/
|
||||||
|
private fun createInitSegment(format: MediaFormat) {
|
||||||
|
val initFile = File(outputDirectory, "init.mp4")
|
||||||
|
try {
|
||||||
|
val outputStream = FileOutputStream(initFile)
|
||||||
|
val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
|
||||||
|
|
||||||
|
// Convert and add the track
|
||||||
|
val media3Format = convertToMedia3Format(format)
|
||||||
|
muxer.addTrack(media3Format)
|
||||||
|
|
||||||
|
// Close immediately - this writes just the header (ftyp + moov)
|
||||||
|
muxer.close()
|
||||||
|
outputStream.close()
|
||||||
|
|
||||||
|
Log.d(TAG, "Created init segment: ${initFile.absolutePath}")
|
||||||
|
callbacks.onInitSegmentReady(initFile)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
Log.e(TAG, "Error creating init segment", e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun convertToMedia3Format(mediaFormat: MediaFormat): Format {
|
||||||
|
val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME) ?: MimeTypes.VIDEO_H264
|
||||||
|
val width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH)
|
||||||
|
val height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)
|
||||||
|
val bitRate = try { mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE) } catch (e: Exception) { -1 }
|
||||||
|
val frameRate = try { mediaFormat.getInteger(MediaFormat.KEY_FRAME_RATE) } catch (e: Exception) { -1 }
|
||||||
|
|
||||||
|
// Get CSD (Codec Specific Data) if available - required for init segment
|
||||||
|
val csd0 = mediaFormat.getByteBuffer("csd-0")
|
||||||
|
val csd1 = mediaFormat.getByteBuffer("csd-1")
|
||||||
|
|
||||||
|
val initData = mutableListOf<ByteArray>()
|
||||||
|
csd0?.let {
|
||||||
|
val bytes = ByteArray(it.remaining())
|
||||||
|
it.duplicate().get(bytes)
|
||||||
|
initData.add(bytes)
|
||||||
|
}
|
||||||
|
csd1?.let {
|
||||||
|
val bytes = ByteArray(it.remaining())
|
||||||
|
it.duplicate().get(bytes)
|
||||||
|
initData.add(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Format.Builder()
|
||||||
|
.setSampleMimeType(mimeType)
|
||||||
|
.setWidth(width)
|
||||||
|
.setHeight(height)
|
||||||
|
.setRotationDegrees(orientationDegrees)
|
||||||
|
.apply {
|
||||||
|
if (bitRate > 0) setAverageBitrate(bitRate)
|
||||||
|
if (frameRate > 0) setFrameRate(frameRate.toFloat())
|
||||||
|
if (initData.isNotEmpty()) setInitializationData(initData)
|
||||||
|
}
|
||||||
|
.build()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,8 +9,10 @@ import android.os.Looper
|
|||||||
import android.util.Log
|
import android.util.Log
|
||||||
import android.util.Size
|
import android.util.Size
|
||||||
import android.view.PixelCopy
|
import android.view.PixelCopy
|
||||||
|
import android.view.Surface
|
||||||
import android.view.SurfaceHolder
|
import android.view.SurfaceHolder
|
||||||
import android.view.SurfaceView
|
import android.view.SurfaceView
|
||||||
|
import android.view.WindowManager
|
||||||
import com.facebook.react.bridge.UiThreadUtil
|
import com.facebook.react.bridge.UiThreadUtil
|
||||||
import com.mrousavy.camera.extensions.resize
|
import com.mrousavy.camera.extensions.resize
|
||||||
import com.mrousavy.camera.extensions.rotatedBy
|
import com.mrousavy.camera.extensions.rotatedBy
|
||||||
@@ -150,6 +152,8 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
|
|||||||
val width = frame.width()
|
val width = frame.width()
|
||||||
val height = frame.height()
|
val height = frame.height()
|
||||||
|
|
||||||
|
// Create bitmap matching surface frame dimensions for PixelCopy
|
||||||
|
// The original code swapped dimensions assuming landscape input - keep that for consistency
|
||||||
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
|
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
|
||||||
|
|
||||||
// Use a coroutine to suspend until the PixelCopy request is complete
|
// Use a coroutine to suspend until the PixelCopy request is complete
|
||||||
@@ -159,7 +163,23 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
|
|||||||
bitmap,
|
bitmap,
|
||||||
{ copyResult ->
|
{ copyResult ->
|
||||||
if (copyResult == PixelCopy.SUCCESS) {
|
if (copyResult == PixelCopy.SUCCESS) {
|
||||||
continuation.resume(rotateBitmap90CounterClockwise(bitmap))
|
// Get actual device rotation from WindowManager instead of relying on
|
||||||
|
// the orientation prop, which may not update on Android when rotating
|
||||||
|
// between landscape-left and landscape-right.
|
||||||
|
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
|
||||||
|
val deviceRotation = windowManager.defaultDisplay.rotation
|
||||||
|
|
||||||
|
val actualOrientation = when (deviceRotation) {
|
||||||
|
Surface.ROTATION_0 -> Orientation.PORTRAIT
|
||||||
|
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
|
||||||
|
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
|
||||||
|
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
|
||||||
|
else -> Orientation.PORTRAIT
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
|
||||||
|
|
||||||
|
continuation.resume(bitmap.transformBitmap(actualOrientation))
|
||||||
} else {
|
} else {
|
||||||
continuation.resumeWithException(
|
continuation.resumeWithException(
|
||||||
RuntimeException("PixelCopy failed with error code $copyResult")
|
RuntimeException("PixelCopy failed with error code $copyResult")
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import android.content.Context
|
|||||||
import android.util.Log
|
import android.util.Log
|
||||||
import android.util.Size
|
import android.util.Size
|
||||||
import android.view.Surface
|
import android.view.Surface
|
||||||
|
import androidx.media3.common.util.UnstableApi
|
||||||
import com.facebook.common.statfs.StatFsHelper
|
import com.facebook.common.statfs.StatFsHelper
|
||||||
import com.mrousavy.camera.extensions.getRecommendedBitRate
|
import com.mrousavy.camera.extensions.getRecommendedBitRate
|
||||||
import com.mrousavy.camera.types.Orientation
|
import com.mrousavy.camera.types.Orientation
|
||||||
@@ -14,6 +15,8 @@ import android.os.Environment
|
|||||||
import java.text.SimpleDateFormat
|
import java.text.SimpleDateFormat
|
||||||
import java.util.Locale
|
import java.util.Locale
|
||||||
import java.util.Date
|
import java.util.Date
|
||||||
|
|
||||||
|
@UnstableApi
|
||||||
class RecordingSession(
|
class RecordingSession(
|
||||||
context: Context,
|
context: Context,
|
||||||
val cameraId: String,
|
val cameraId: String,
|
||||||
@@ -27,6 +30,8 @@ class RecordingSession(
|
|||||||
private val callback: (video: Video) -> Unit,
|
private val callback: (video: Video) -> Unit,
|
||||||
private val onError: (error: CameraError) -> Unit,
|
private val onError: (error: CameraError) -> Unit,
|
||||||
private val allCallbacks: CameraSession.Callback,
|
private val allCallbacks: CameraSession.Callback,
|
||||||
|
// Use the new FragmentedMp4Muxer-based recorder for HLS-compatible output
|
||||||
|
private val useFragmentedMp4: Boolean = true
|
||||||
) {
|
) {
|
||||||
companion object {
|
companion object {
|
||||||
private const val TAG = "RecordingSession"
|
private const val TAG = "RecordingSession"
|
||||||
@@ -34,6 +39,9 @@ class RecordingSession(
|
|||||||
private const val AUDIO_SAMPLING_RATE = 44_100
|
private const val AUDIO_SAMPLING_RATE = 44_100
|
||||||
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
|
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
|
||||||
private const val AUDIO_CHANNELS = 1
|
private const val AUDIO_CHANNELS = 1
|
||||||
|
|
||||||
|
// Segment duration in seconds (matching iOS default of 6 seconds)
|
||||||
|
private const val SEGMENT_DURATION_SECONDS = 6
|
||||||
}
|
}
|
||||||
|
|
||||||
data class Video(val path: String, val durationMs: Long, val size: Size)
|
data class Video(val path: String, val durationMs: Long, val size: Size)
|
||||||
@@ -41,7 +49,23 @@ class RecordingSession(
|
|||||||
private val outputPath: File = File(filePath)
|
private val outputPath: File = File(filePath)
|
||||||
|
|
||||||
private val bitRate = getBitRate()
|
private val bitRate = getBitRate()
|
||||||
private val recorder = ChunkedRecordingManager.fromParams(
|
|
||||||
|
// Use FragmentedRecordingManager for HLS-compatible fMP4 output,
|
||||||
|
// or fall back to ChunkedRecordingManager for regular MP4 chunks
|
||||||
|
private val recorder: ChunkedRecorderInterface = if (useFragmentedMp4) {
|
||||||
|
FragmentedRecordingManager.fromParams(
|
||||||
|
allCallbacks,
|
||||||
|
size,
|
||||||
|
enableAudio,
|
||||||
|
fps,
|
||||||
|
cameraOrientation,
|
||||||
|
bitRate,
|
||||||
|
options,
|
||||||
|
outputPath,
|
||||||
|
SEGMENT_DURATION_SECONDS
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
ChunkedRecordingManager.fromParams(
|
||||||
allCallbacks,
|
allCallbacks,
|
||||||
size,
|
size,
|
||||||
enableAudio,
|
enableAudio,
|
||||||
@@ -51,6 +75,7 @@ class RecordingSession(
|
|||||||
options,
|
options,
|
||||||
outputPath
|
outputPath
|
||||||
)
|
)
|
||||||
|
}
|
||||||
private var startTime: Long? = null
|
private var startTime: Long? = null
|
||||||
val surface: Surface
|
val surface: Surface
|
||||||
get() {
|
get() {
|
||||||
|
|||||||
Reference in New Issue
Block a user