Merge pull request 'Android Fmp4' (#17) from loewy/android-fmp4-normalize-timestamp-fix-fps into main

Reviewed-on: #17
This commit is contained in:
2026-01-06 17:21:29 +00:00
12 changed files with 1309 additions and 33 deletions

View File

@@ -40,15 +40,26 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event) this.sendEvent(event)
} }
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
val event = Arguments.createMap() val event = Arguments.createMap()
event.putInt("index", index) event.putInt("index", index)
event.putString("filepath", filepath.toString()) event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event) reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
} }
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) { fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace() error.printStackTrace()

View File

@@ -271,8 +271,12 @@ class CameraView(context: Context) :
invokeOnStopped() invokeOnStopped()
} }
override fun onVideoChunkReady(filepath: File, index: Int) { override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index) invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
} }
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) { override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,6 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError")) .put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned")) .put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady")) .put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap() .build()?.toMutableMap()
override fun getName(): String = TAG override fun getName(): String = TAG

View File

@@ -428,19 +428,21 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
// Get actual device rotation from WindowManager since the React Native orientation hook // Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android. // doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording: // Map device rotation to the correct orientation for video recording.
// - Counter-clockwise (ROTATION_90) → 270° hint // Surface.ROTATION_90 = device rotated 90° CCW = phone top on LEFT = LANDSCAPE_LEFT
// - Clockwise (ROTATION_270) → 90° hint // Surface.ROTATION_270 = device rotated 90° CW = phone top on RIGHT = LANDSCAPE_RIGHT
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) { val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT else -> Orientation.PORTRAIT
} }
Log.i(TAG, "startRecording: orientation=${recordingOrientation.toDegrees()}° (deviceRotation=$deviceRotation)")
val recording = RecordingSession( val recording = RecordingSession(
context, context,
cameraId, cameraId,
@@ -448,7 +450,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableAudio, enableAudio,
fps, fps,
videoOutput.enableHdr, videoOutput.enableHdr,
orientation, recordingOrientation,
options, options,
filePath, filePath,
callback, callback,
@@ -513,7 +515,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized() fun onInitialized()
fun onStarted() fun onStarted()
fun onStopped() fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int) fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
} }
} }

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) : class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback() { MediaCodec.Callback(), ChunkedRecorderInterface {
companion object { companion object {
private const val TAG = "ChunkedRecorder" private const val TAG = "ChunkedRecorder"
@@ -30,8 +30,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
iFrameInterval: Int = 5 iFrameInterval: Int = 5
): ChunkedRecordingManager { ): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType() val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees() // Use cameraOrientation (from WindowManager) for rotation metadata
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees(); // The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
// Note: MediaMuxer.setOrientationHint() uses opposite convention from HlsMuxer's rotation matrix
// We need to invert the rotation: 90 <-> 270, while 0 and 180 stay the same
val orientationDegrees = when (cameraOrientation.toDegrees()) {
90 -> 270
270 -> 90
else -> cameraOrientation.toDegrees()
}
val (width, height) = if (cameraOrientation.isLandscape()) { val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width size.height to size.width
} else { } else {
@@ -55,12 +62,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval) format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate) format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees") Log.d(TAG, "Video Format: $format, orientation: $orientationDegrees")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface // Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work. // we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager( return ChunkedRecordingManager(
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks codec, outputDirectory, orientationDegrees, iFrameInterval, callbacks
) )
} }
} }
@@ -73,7 +80,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000 private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface() override val surface: Surface = encoder.createInputSurface()
init { init {
if (!this.outputDirectory.exists()) { if (!this.outputDirectory.exists()) {
@@ -91,11 +98,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxer.start() muxer.start()
} }
fun writeSample(buffer: java.nio.ByteBuffer, bufferInfo: BufferInfo) {
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
}
fun finish() { fun finish() {
muxer.stop() muxer.stop()
muxer.release() muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex) callbacks.onVideoChunkReady(filepath, chunkIndex, null)
} }
} }
@@ -133,12 +143,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
return bufferInfo.presentationTimeUs - context.startTimeUs return bufferInfo.presentationTimeUs - context.startTimeUs
} }
fun start() { override fun start() {
encoder.start() encoder.start()
recording = true recording = true
} }
fun finish() { override fun finish() {
synchronized(this) { synchronized(this) {
muxerContext?.finish() muxerContext?.finish()
recording = false recording = false
@@ -168,7 +178,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
return return
} }
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo) context.writeSample(encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
} }
} }

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -0,0 +1,176 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val muxer: HlsMuxer
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
// Use cameraOrientation (from WindowManager) for rotation metadata
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
val orientationDegrees = cameraOrientation.toDegrees()
// Swap dimensions based on camera orientation, same as ChunkedRecordingManager
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees°")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
// Use 30fps as conservative default since many Android devices can't sustain
// higher frame rates at high resolutions. This affects:
// - Encoder: bitrate allocation and I-frame interval calculation
// - HlsMuxer: timescale for accurate sample durations
// The actual frame timing comes from camera timestamps regardless of this setting.
val effectiveFps = 30
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val muxer = HlsMuxer(
outputDirectory = outputDirectory,
callback = object : HlsMuxer.Callback {
override fun onInitSegmentReady(file: File) {
callbacks.onInitSegmentReady(file)
}
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
callbacks.onVideoChunkReady(file, index, durationUs)
}
},
orientationDegrees = orientationDegrees,
fps = effectiveFps
)
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
return FragmentedRecordingManager(codec, muxer)
}
}
private var recording = false
private var muxerStarted = false
private var trackIndex = -1
override val surface: Surface = encoder.createInputSurface()
init {
encoder.setCallback(this)
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
if (muxerStarted) {
muxer.stop()
muxer.release()
}
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
if (!muxerStarted) {
encoder.releaseOutputBuffer(index, false)
return
}
val buffer = encoder.getOutputBuffer(index)
if (buffer == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
try {
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
synchronized(this) {
Log.i(TAG, "Output format changed: $format")
trackIndex = muxer.addTrack(format)
muxer.start()
muxerStarted = true
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -8,12 +8,14 @@ import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.types.StreamSegmentType
import com.mrousavy.camera.utils.FileUtils import com.mrousavy.camera.utils.FileUtils
import java.io.File import java.io.File
import android.os.Environment import android.os.Environment
import java.text.SimpleDateFormat import java.text.SimpleDateFormat
import java.util.Locale import java.util.Locale
import java.util.Date import java.util.Date
class RecordingSession( class RecordingSession(
context: Context, context: Context,
val cameraId: String, val cameraId: String,
@@ -26,7 +28,7 @@ class RecordingSession(
private val filePath: String, private val filePath: String,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit, private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback, private val allCallbacks: CameraSession.Callback
) { ) {
companion object { companion object {
private const val TAG = "RecordingSession" private const val TAG = "RecordingSession"
@@ -34,6 +36,9 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100 private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1 private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
} }
data class Video(val path: String, val durationMs: Long, val size: Size) data class Video(val path: String, val durationMs: Long, val size: Size)
@@ -42,16 +47,33 @@ class RecordingSession(
private val outputPath: File = File(filePath.removePrefix("file://")) private val outputPath: File = File(filePath.removePrefix("file://"))
private val bitRate = getBitRate() private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks, // Use FragmentedRecordingManager for HLS-compatible fMP4 output,
size, // or fall back to ChunkedRecordingManager for regular MP4 chunks
enableAudio, private val recorder: ChunkedRecorderInterface = if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
fps, FragmentedRecordingManager.fromParams(
cameraOrientation, allCallbacks,
bitRate, size,
options, enableAudio,
outputPath fps,
) cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
}
private var startTime: Long? = null private var startTime: Long? = null
val surface: Surface val surface: Surface
get() { get() {
@@ -60,7 +82,7 @@ class RecordingSession(
fun start() { fun start() {
synchronized(this) { synchronized(this) {
Log.i(TAG, "Starting RecordingSession..") Log.i(TAG, "Starting RecordingSession with ${options.streamSegmentType} recorder..")
startTime = System.currentTimeMillis() startTime = System.currentTimeMillis()
recorder.start() recorder.start()
} }

View File

@@ -9,6 +9,7 @@ class RecordVideoOptions(map: ReadableMap) {
var videoBitRateOverride: Double? = null var videoBitRateOverride: Double? = null
var videoBitRateMultiplier: Double? = null var videoBitRateMultiplier: Double? = null
var orientation: Orientation? = null var orientation: Orientation? = null
var streamSegmentType: StreamSegmentType = StreamSegmentType.FRAGMENTED_MP4
init { init {
if (map.hasKey("fileType")) { if (map.hasKey("fileType")) {
@@ -29,5 +30,8 @@ class RecordVideoOptions(map: ReadableMap) {
if (map.hasKey("orientation")) { if (map.hasKey("orientation")) {
orientation = Orientation.fromUnionValue(map.getString("orientation")) orientation = Orientation.fromUnionValue(map.getString("orientation"))
} }
if (map.hasKey("streamSegmentType")) {
streamSegmentType = StreamSegmentType.fromUnionValue(map.getString("streamSegmentType"))
}
} }
} }

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.types
enum class StreamSegmentType(override val unionValue: String) : JSUnionValue {
FRAGMENTED_MP4("FRAGMENTED_MP4"),
RB_CHUNKED_MP4("RB_CHUNKED_MP4");
companion object : JSUnionValue.Companion<StreamSegmentType> {
override fun fromUnionValue(unionValue: String?): StreamSegmentType =
when (unionValue) {
"FRAGMENTED_MP4" -> FRAGMENTED_MP4
"RB_CHUNKED_MP4" -> RB_CHUNKED_MP4
else -> FRAGMENTED_MP4 // Default to fMP4
}
}
}

View File

@@ -41,6 +41,17 @@ export interface RecordVideoOptions {
* @default 'normal' * @default 'normal'
*/ */
videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number
/**
* The stream segment type for recording on Android.
* - `FRAGMENTED_MP4`: HLS-compatible segments (init.mp4 + numbered segments)
* - `RB_CHUNKED_MP4`: Legacy chunked MP4 format
*
* iOS always uses FRAGMENTED_MP4 regardless of this setting.
*
* @platform android
* @default 'FRAGMENTED_MP4'
*/
streamSegmentType?: 'FRAGMENTED_MP4' | 'RB_CHUNKED_MP4'
} }
/** /**