Compare commits

...

3 Commits

5 changed files with 1267 additions and 225 deletions

View File

@@ -178,10 +178,6 @@ dependencies {
implementation "com.facebook.react:react-android:+"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3"
// Media3 muxer for fragmented MP4 (HLS-compatible) recording
implementation "androidx.media3:media3-muxer:1.5.0"
implementation "androidx.media3:media3-common:1.5.0"
if (enableCodeScanner) {
// User enabled code-scanner, so we bundle the 2.4 MB model in the app.
implementation 'com.google.mlkit:barcode-scanning:17.2.0'

View File

@@ -429,15 +429,15 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording:
// - Counter-clockwise (ROTATION_90) → 270° hint
// - Clockwise (ROTATION_270) → 90° hint
// - Counter-clockwise (ROTATION_90) → 90° hint
// - Clockwise (ROTATION_270) → 270° hint
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
@@ -448,7 +448,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableAudio,
fps,
videoOutput.enableHdr,
orientation,
recordingOrientation,
options,
filePath,
callback,

View File

@@ -7,37 +7,26 @@ import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import androidx.media3.common.Format
import androidx.media3.common.MimeTypes
import androidx.media3.common.util.UnstableApi
import androidx.media3.muxer.FragmentedMp4Muxer
import androidx.media3.muxer.Muxer
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.io.FileOutputStream
import java.nio.ByteBuffer
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* This produces output similar to the iOS implementation:
* - An initialization segment (init.mp4) containing codec configuration
* - Numbered data segments (0.mp4, 1.mp4, ...) containing media data
*
* Uses AndroidX Media3's FragmentedMp4Muxer which produces proper fMP4 output.
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
@UnstableApi
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val outputDirectory: File,
private val orientationDegrees: Int,
private val targetSegmentDurationUs: Long,
private val callbacks: CameraSession.Callback
private val muxer: HlsMuxer,
private val configuredFps: Int
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
fun fromParams(
callbacks: CameraSession.Callback,
@@ -48,18 +37,25 @@ class FragmentedRecordingManager(
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = 6
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
// Use cameraOrientation from Android (computed from device rotation)
// instead of options.orientation from JS which may be stale
val recordingOrientationDegrees = cameraOrientation.toDegrees()
// Swap dimensions based on orientation - same logic as ChunkedRecordingManager
// When camera is in landscape orientation, we need to swap width/height for the encoder
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
Log.d(TAG, "Input size: ${size.width}x${size.height}, " +
"encoder size: ${width}x${height}, " +
"orientation: $cameraOrientation ($recordingOrientationDegrees°)")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
@@ -67,121 +63,48 @@ class FragmentedRecordingManager(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
// I-frame interval affects segment boundaries
val effectiveFps = fps ?: 30
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
Log.d(TAG, "Video Format: $format, orientation: $recordingOrientationDegrees")
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return FragmentedRecordingManager(
codec,
outputDirectory,
recordingOrientationDegrees,
segmentDurationSeconds * 1_000_000L,
callbacks
// Create muxer with callbacks and orientation
val muxer = HlsMuxer(
outputDirectory = outputDirectory,
callback = object : HlsMuxer.Callback {
override fun onInitSegmentReady(file: File) {
callbacks.onInitSegmentReady(file)
}
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
callbacks.onVideoChunkReady(file, index, durationUs)
}
},
orientationDegrees = recordingOrientationDegrees
)
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
Log.d(TAG, "Created HlsMuxer with orientation: $recordingOrientationDegrees degrees, fps: $effectiveFps")
return FragmentedRecordingManager(codec, muxer, effectiveFps)
}
}
// State management
private var chunkIndex = 0
private var encodedFormat: MediaFormat? = null
private var recording = false
// Segment tracking
private var segmentContext: SegmentContext? = null
private var initSegmentEmitted = false
private var muxerStarted = false
private var trackIndex = -1
override val surface: Surface = encoder.createInputSurface()
init {
if (!outputDirectory.exists()) {
outputDirectory.mkdirs()
}
encoder.setCallback(this)
}
/**
* Context for a single data segment being written.
* Init segments are created separately via createInitSegment().
*/
private inner class SegmentContext(
private val format: MediaFormat,
private val segmentIndex: Int
) {
private val filename = "$segmentIndex.mp4"
private val file = File(outputDirectory, filename)
private val outputStream = FileOutputStream(file)
private val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
private lateinit var videoTrack: Muxer.TrackToken
private var startTimeUs: Long = -1L
private var lastTimeUs: Long = 0L
private var sampleCount = 0
init {
val media3Format = convertToMedia3Format(format)
videoTrack = muxer.addTrack(media3Format)
Log.d(TAG, "Created segment context: $filename")
}
fun writeSample(buffer: ByteBuffer, bufferInfo: BufferInfo): Boolean {
if (startTimeUs < 0) {
startTimeUs = bufferInfo.presentationTimeUs
}
lastTimeUs = bufferInfo.presentationTimeUs
val isKeyFrame = (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
sampleCount++
// Check if we should start a new segment at the next keyframe
if (isKeyFrame && sampleCount > 1) {
val segmentDurationUs = bufferInfo.presentationTimeUs - startTimeUs
if (segmentDurationUs >= targetSegmentDurationUs) {
return true // Signal to create new segment
}
}
return false
}
fun finish(): Long {
try {
muxer.close()
outputStream.close()
} catch (e: Exception) {
Log.e(TAG, "Error closing segment", e)
}
val durationUs = if (lastTimeUs > startTimeUs) lastTimeUs - startTimeUs else 0L
callbacks.onVideoChunkReady(file, segmentIndex, durationUs)
Log.d(TAG, "Finished segment: $filename, samples=$sampleCount, duration=${durationUs/1000}ms")
return durationUs
}
}
private fun createNewSegment() {
val format = encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create segment: encodedFormat is null")
return
}
// Close previous segment
segmentContext?.finish()
// Create new data segment (init segments are created separately)
segmentContext = SegmentContext(format, chunkIndex)
chunkIndex++
}
override fun start() {
encoder.start()
recording = true
@@ -190,8 +113,12 @@ class FragmentedRecordingManager(
override fun finish() {
synchronized(this) {
recording = false
segmentContext?.finish()
segmentContext = null
if (muxerStarted) {
muxer.stop()
muxer.release()
}
try {
encoder.stop()
encoder.release()
@@ -202,6 +129,7 @@ class FragmentedRecordingManager(
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
@@ -213,37 +141,20 @@ class FragmentedRecordingManager(
return
}
val encodedData = encoder.getOutputBuffer(index)
if (encodedData == null) {
if (!muxerStarted) {
encoder.releaseOutputBuffer(index, false)
return
}
val buffer = encoder.getOutputBuffer(index)
if (buffer == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
// Wait until init segment is emitted (happens in onOutputFormatChanged)
if (!initSegmentEmitted) {
encoder.releaseOutputBuffer(index, false)
return
}
// Create first data segment if needed
if (segmentContext == null) {
createNewSegment()
}
val context = segmentContext
if (context == null) {
encoder.releaseOutputBuffer(index, false)
return
}
try {
val shouldStartNewSegment = context.writeSample(encodedData, bufferInfo)
if (shouldStartNewSegment) {
createNewSegment()
// Write this keyframe to the new segment as well
segmentContext?.writeSample(encodedData, bufferInfo)
}
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
@@ -257,76 +168,13 @@ class FragmentedRecordingManager(
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
Log.i(TAG, "Output format changed: $format")
encodedFormat = format
synchronized(this) {
Log.i(TAG, "Output format changed: $format")
// Create the init segment immediately when we get the format
// This produces an fMP4 file with just ftyp + moov (no samples)
if (!initSegmentEmitted) {
createInitSegment(format)
initSegmentEmitted = true
// Pass configured fps to muxer (not the encoder's output format fps which may differ)
trackIndex = muxer.addTrack(format, configuredFps)
muxer.start()
muxerStarted = true
}
}
/**
* Creates an initialization segment containing only codec configuration (ftyp + moov).
* This is done by creating a muxer, adding the track, and immediately closing it
* without writing any samples.
*/
private fun createInitSegment(format: MediaFormat) {
val initFile = File(outputDirectory, "init.mp4")
try {
val outputStream = FileOutputStream(initFile)
val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
// Convert and add the track
val media3Format = convertToMedia3Format(format)
muxer.addTrack(media3Format)
// Close immediately - this writes just the header (ftyp + moov)
muxer.close()
outputStream.close()
Log.d(TAG, "Created init segment: ${initFile.absolutePath}")
callbacks.onInitSegmentReady(initFile)
} catch (e: Exception) {
Log.e(TAG, "Error creating init segment", e)
}
}
private fun convertToMedia3Format(mediaFormat: MediaFormat): Format {
val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME) ?: MimeTypes.VIDEO_H264
val width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH)
val height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)
val bitRate = try { mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE) } catch (e: Exception) { -1 }
val frameRate = try { mediaFormat.getInteger(MediaFormat.KEY_FRAME_RATE) } catch (e: Exception) { -1 }
// Get CSD (Codec Specific Data) if available - required for init segment
val csd0 = mediaFormat.getByteBuffer("csd-0")
val csd1 = mediaFormat.getByteBuffer("csd-1")
val initData = mutableListOf<ByteArray>()
csd0?.let {
val bytes = ByteArray(it.remaining())
it.duplicate().get(bytes)
initData.add(bytes)
}
csd1?.let {
val bytes = ByteArray(it.remaining())
it.duplicate().get(bytes)
initData.add(bytes)
}
return Format.Builder()
.setSampleMimeType(mimeType)
.setWidth(width)
.setHeight(height)
.setRotationDegrees(orientationDegrees)
.apply {
if (bitRate > 0) setAverageBitrate(bitRate)
if (frameRate > 0) setFrameRate(frameRate.toFloat())
if (initData.isNotEmpty()) setInitializationData(initData)
}
.build()
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,6 @@ import android.content.Context
import android.util.Log
import android.util.Size
import android.view.Surface
import androidx.media3.common.util.UnstableApi
import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation
@@ -16,7 +15,6 @@ import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
@UnstableApi
class RecordingSession(
context: Context,
val cameraId: String,
@@ -30,7 +28,7 @@ class RecordingSession(
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback,
// Use the new FragmentedMp4Muxer-based recorder for HLS-compatible output
// Use FragmentedRecordingManager for HLS-compatible fMP4 output
private val useFragmentedMp4: Boolean = true
) {
companion object {