Compare commits

...

22 Commits

Author SHA1 Message Date
Dean
dd26812a9c fix: Add pasp box to declare square pixels (1:1) for web playback
The codec string fix caused videos to appear squished on web players
like Shaka. Adding an explicit pixel aspect ratio (pasp) box with
1:1 ratio tells the player not to apply any SAR scaling.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 12:22:43 -08:00
Dean
b716608379 fix: Skip NAL header byte when reading SPS profile data in HlsMuxer
The SPS NAL unit format is: [NAL header, profile_idc, constraint_flags, level_idc, ...]
The code was incorrectly reading from byte 0 (NAL header, typically 0x67)
instead of byte 1 (profile_idc).

This produced invalid codec strings like `avc1.676400` instead of valid
ones like `avc1.64001f`, causing Shaka Player on web to fail with error
4032 (unable to parse codec).

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 11:52:08 -08:00
0ecc3d8210 Merge pull request 'fix: Handle both file:// and file:/ URI prefixes' (#19) from dean/fix-file-prefix-fmp4 into main
Reviewed-on: #19
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2026-01-06 19:41:32 +00:00
309e1e9457 Merge branch 'main' into dean/fix-file-prefix-fmp4 2026-01-06 17:38:24 +00:00
71b08e6898 Merge pull request 'Android Fmp4' (#17) from loewy/android-fmp4-normalize-timestamp-fix-fps into main
Reviewed-on: #17
2026-01-06 17:21:29 +00:00
Dean
699481f6f8 fix: Handle both file:// and file:/ URI prefixes
The previous code only stripped file:// (double slash) but some paths
come with file:/ (single slash), causing FileNotFoundException.

Fixes RAILBIRD-FRONTEND-1JH

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-06 08:45:39 -08:00
11ce9ba8f6 ensure compatability with rb chunked and fmp4, move orientation detection for rb chunked to chunked manager 2026-01-03 13:40:09 -08:00
dd9de38a7d use window manager to determine device rotation in android 2026-01-02 10:04:49 -08:00
3f5d0a2109 fix fps (alter tfhd and trun size, add logs 2026-01-02 10:04:49 -08:00
6c2319608d normalize timestamps and fix framerate metadata in init file 2026-01-02 10:04:47 -08:00
27f127fe94 Fix orientation issues 2026-01-02 10:02:51 -08:00
92b29cbd78 Write our own muxer to make hls uupload actually work 2026-01-02 10:02:51 -08:00
fb23c57a6c feat: Add fragmented MP4 (fMP4) support for Android
Implements HLS-compatible fragmented MP4 recording on Android using
AndroidX Media3 FragmentedMp4Muxer, matching the iOS implementation.

Changes:
- Add FragmentedRecordingManager for fMP4 segment output
- Add ChunkedRecorderInterface to abstract recorder implementations
- Add onInitSegmentReady callback for init segment (init.mp4)
- Update onVideoChunkReady to include segment duration
- RecordingSession now uses FragmentedRecordingManager by default

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-02 10:02:51 -08:00
8d06ab9e66 Merge pull request 'Strip file prefix ( for expo-file-system)' (#15) from loewy/stripe-file-prefix into main
Reviewed-on: #15
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2025-12-26 17:26:42 +00:00
f6b6cfb3d5 strip file prefix 2025-12-19 12:55:41 -08:00
3ac555a2b3 Merge pull request 'Stop recording on init write failure' (#14) from loewy/stop-recording-on-init-write-failure into main
Reviewed-on: #14
2025-12-19 20:53:06 +00:00
7e1e074e0f force recording to stop on init write failure and fix silent failure 2025-12-18 13:29:31 -08:00
b269e9c493 fix silent init file write failure 2025-12-18 13:09:34 -08:00
5fe7f35127 Merge pull request 'Fix orientation Android - get orientation directly from WindowManager' (#11) from loewy/fix-android-orientation-bugs into main
Reviewed-on: #11
2025-12-17 23:20:52 +00:00
61863149c0 flip orientation in camera session 2025-12-11 14:17:34 -08:00
09b50938d2 get orientation change from WindowManager for android 2025-12-11 13:02:00 -08:00
a158ed8350 Merge pull request 'Bump react native w/ api 35 compatability' (#10) from bump-react-native-with-api35 into main
Reviewed-on: #10
2025-12-11 18:04:51 +00:00
16 changed files with 1407 additions and 47 deletions

View File

@@ -40,15 +40,26 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event) this.sendEvent(event)
} }
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
val event = Arguments.createMap() val event = Arguments.createMap()
event.putInt("index", index) event.putInt("index", index)
event.putString("filepath", filepath.toString()) event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event) reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
} }
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) { fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace() error.printStackTrace()

View File

@@ -271,8 +271,12 @@ class CameraView(context: Context) :
invokeOnStopped() invokeOnStopped()
} }
override fun onVideoChunkReady(filepath: File, index: Int) { override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index) invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
} }
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) { override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,6 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError")) .put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned")) .put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady")) .put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap() .build()?.toMutableMap()
override fun getName(): String = TAG override fun getName(): String = TAG

View File

@@ -15,6 +15,7 @@ import android.util.Log
import android.util.Size import android.util.Size
import android.view.Surface import android.view.Surface
import android.view.SurfaceHolder import android.view.SurfaceHolder
import android.view.WindowManager
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import com.google.mlkit.vision.barcode.common.Barcode import com.google.mlkit.vision.barcode.common.Barcode
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
@@ -425,6 +426,23 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
val fps = configuration?.fps ?: 30 val fps = configuration?.fps ?: 30
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientation for video recording.
// Surface.ROTATION_90 = device rotated 90° CCW = phone top on LEFT = LANDSCAPE_LEFT
// Surface.ROTATION_270 = device rotated 90° CW = phone top on RIGHT = LANDSCAPE_RIGHT
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "startRecording: orientation=${recordingOrientation.toDegrees()}° (deviceRotation=$deviceRotation)")
val recording = RecordingSession( val recording = RecordingSession(
context, context,
cameraId, cameraId,
@@ -432,7 +450,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableAudio, enableAudio,
fps, fps,
videoOutput.enableHdr, videoOutput.enableHdr,
orientation, recordingOrientation,
options, options,
filePath, filePath,
callback, callback,
@@ -497,7 +515,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized() fun onInitialized()
fun onStarted() fun onStarted()
fun onStopped() fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int) fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
} }
} }

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) : class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback() { MediaCodec.Callback(), ChunkedRecorderInterface {
companion object { companion object {
private const val TAG = "ChunkedRecorder" private const val TAG = "ChunkedRecorder"
@@ -30,8 +30,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
iFrameInterval: Int = 5 iFrameInterval: Int = 5
): ChunkedRecordingManager { ): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType() val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees() // Use cameraOrientation (from WindowManager) for rotation metadata
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees(); // The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
// Note: MediaMuxer.setOrientationHint() uses opposite convention from HlsMuxer's rotation matrix
// We need to invert the rotation: 90 <-> 270, while 0 and 180 stay the same
val orientationDegrees = when (cameraOrientation.toDegrees()) {
90 -> 270
270 -> 90
else -> cameraOrientation.toDegrees()
}
val (width, height) = if (cameraOrientation.isLandscape()) { val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width size.height to size.width
} else { } else {
@@ -55,12 +62,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval) format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate) format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees") Log.d(TAG, "Video Format: $format, orientation: $orientationDegrees")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface // Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work. // we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager( return ChunkedRecordingManager(
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks codec, outputDirectory, orientationDegrees, iFrameInterval, callbacks
) )
} }
} }
@@ -73,7 +80,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000 private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface() override val surface: Surface = encoder.createInputSurface()
init { init {
if (!this.outputDirectory.exists()) { if (!this.outputDirectory.exists()) {
@@ -91,11 +98,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxer.start() muxer.start()
} }
fun writeSample(buffer: java.nio.ByteBuffer, bufferInfo: BufferInfo) {
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
}
fun finish() { fun finish() {
muxer.stop() muxer.stop()
muxer.release() muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex) callbacks.onVideoChunkReady(filepath, chunkIndex, null)
} }
} }
@@ -133,12 +143,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
return bufferInfo.presentationTimeUs - context.startTimeUs return bufferInfo.presentationTimeUs - context.startTimeUs
} }
fun start() { override fun start() {
encoder.start() encoder.start()
recording = true recording = true
} }
fun finish() { override fun finish() {
synchronized(this) { synchronized(this) {
muxerContext?.finish() muxerContext?.finish()
recording = false recording = false
@@ -168,7 +178,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
return return
} }
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo) context.writeSample(encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
} }
} }

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -0,0 +1,176 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val muxer: HlsMuxer
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
// Use cameraOrientation (from WindowManager) for rotation metadata
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
val orientationDegrees = cameraOrientation.toDegrees()
// Swap dimensions based on camera orientation, same as ChunkedRecordingManager
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees°")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
// Use 30fps as conservative default since many Android devices can't sustain
// higher frame rates at high resolutions. This affects:
// - Encoder: bitrate allocation and I-frame interval calculation
// - HlsMuxer: timescale for accurate sample durations
// The actual frame timing comes from camera timestamps regardless of this setting.
val effectiveFps = 30
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val muxer = HlsMuxer(
outputDirectory = outputDirectory,
callback = object : HlsMuxer.Callback {
override fun onInitSegmentReady(file: File) {
callbacks.onInitSegmentReady(file)
}
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
callbacks.onVideoChunkReady(file, index, durationUs)
}
},
orientationDegrees = orientationDegrees,
fps = effectiveFps
)
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
return FragmentedRecordingManager(codec, muxer)
}
}
private var recording = false
private var muxerStarted = false
private var trackIndex = -1
override val surface: Surface = encoder.createInputSurface()
init {
encoder.setCallback(this)
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
if (muxerStarted) {
muxer.stop()
muxer.release()
}
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
if (!muxerStarted) {
encoder.releaseOutputBuffer(index, false)
return
}
val buffer = encoder.getOutputBuffer(index)
if (buffer == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
try {
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
synchronized(this) {
Log.i(TAG, "Output format changed: $format")
trackIndex = muxer.addTrack(format)
muxer.start()
muxerStarted = true
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -9,8 +9,10 @@ import android.os.Looper
import android.util.Log import android.util.Log
import android.util.Size import android.util.Size
import android.view.PixelCopy import android.view.PixelCopy
import android.view.Surface
import android.view.SurfaceHolder import android.view.SurfaceHolder
import android.view.SurfaceView import android.view.SurfaceView
import android.view.WindowManager
import com.facebook.react.bridge.UiThreadUtil import com.facebook.react.bridge.UiThreadUtil
import com.mrousavy.camera.extensions.resize import com.mrousavy.camera.extensions.resize
import com.mrousavy.camera.extensions.rotatedBy import com.mrousavy.camera.extensions.rotatedBy
@@ -150,6 +152,8 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
val width = frame.width() val width = frame.width()
val height = frame.height() val height = frame.height()
// Create bitmap matching surface frame dimensions for PixelCopy
// The original code swapped dimensions assuming landscape input - keep that for consistency
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888) val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
// Use a coroutine to suspend until the PixelCopy request is complete // Use a coroutine to suspend until the PixelCopy request is complete
@@ -159,7 +163,23 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
bitmap, bitmap,
{ copyResult -> { copyResult ->
if (copyResult == PixelCopy.SUCCESS) { if (copyResult == PixelCopy.SUCCESS) {
continuation.resume(rotateBitmap90CounterClockwise(bitmap)) // Get actual device rotation from WindowManager instead of relying on
// the orientation prop, which may not update on Android when rotating
// between landscape-left and landscape-right.
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val actualOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
continuation.resume(bitmap.transformBitmap(actualOrientation))
} else { } else {
continuation.resumeWithException( continuation.resumeWithException(
RuntimeException("PixelCopy failed with error code $copyResult") RuntimeException("PixelCopy failed with error code $copyResult")

View File

@@ -8,12 +8,14 @@ import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.types.StreamSegmentType
import com.mrousavy.camera.utils.FileUtils import com.mrousavy.camera.utils.FileUtils
import java.io.File import java.io.File
import android.os.Environment import android.os.Environment
import java.text.SimpleDateFormat import java.text.SimpleDateFormat
import java.util.Locale import java.util.Locale
import java.util.Date import java.util.Date
class RecordingSession( class RecordingSession(
context: Context, context: Context,
val cameraId: String, val cameraId: String,
@@ -26,7 +28,7 @@ class RecordingSession(
private val filePath: String, private val filePath: String,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit, private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback, private val allCallbacks: CameraSession.Callback
) { ) {
companion object { companion object {
private const val TAG = "RecordingSession" private const val TAG = "RecordingSession"
@@ -34,23 +36,45 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100 private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1 private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
} }
data class Video(val path: String, val durationMs: Long, val size: Size) data class Video(val path: String, val durationMs: Long, val size: Size)
private val outputPath: File = File(filePath) // Normalize path - expo-file-system passes file:// URIs but File expects raw paths
// Handle both file:// and file:/ variants
private val outputPath: File = File(filePath.replace(Regex("^file:/+"), "/"))
private val bitRate = getBitRate() private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks, // Use FragmentedRecordingManager for HLS-compatible fMP4 output,
size, // or fall back to ChunkedRecordingManager for regular MP4 chunks
enableAudio, private val recorder: ChunkedRecorderInterface = if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
fps, FragmentedRecordingManager.fromParams(
cameraOrientation, allCallbacks,
bitRate, size,
options, enableAudio,
outputPath fps,
) cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
}
private var startTime: Long? = null private var startTime: Long? = null
val surface: Surface val surface: Surface
get() { get() {
@@ -59,7 +83,7 @@ class RecordingSession(
fun start() { fun start() {
synchronized(this) { synchronized(this) {
Log.i(TAG, "Starting RecordingSession..") Log.i(TAG, "Starting RecordingSession with ${options.streamSegmentType} recorder..")
startTime = System.currentTimeMillis() startTime = System.currentTimeMillis()
recorder.start() recorder.start()
} }

View File

@@ -9,6 +9,7 @@ class RecordVideoOptions(map: ReadableMap) {
var videoBitRateOverride: Double? = null var videoBitRateOverride: Double? = null
var videoBitRateMultiplier: Double? = null var videoBitRateMultiplier: Double? = null
var orientation: Orientation? = null var orientation: Orientation? = null
var streamSegmentType: StreamSegmentType = StreamSegmentType.FRAGMENTED_MP4
init { init {
if (map.hasKey("fileType")) { if (map.hasKey("fileType")) {
@@ -29,5 +30,8 @@ class RecordVideoOptions(map: ReadableMap) {
if (map.hasKey("orientation")) { if (map.hasKey("orientation")) {
orientation = Orientation.fromUnionValue(map.getString("orientation")) orientation = Orientation.fromUnionValue(map.getString("orientation"))
} }
if (map.hasKey("streamSegmentType")) {
streamSegmentType = StreamSegmentType.fromUnionValue(map.getString("streamSegmentType"))
}
} }
} }

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.types
enum class StreamSegmentType(override val unionValue: String) : JSUnionValue {
FRAGMENTED_MP4("FRAGMENTED_MP4"),
RB_CHUNKED_MP4("RB_CHUNKED_MP4");
companion object : JSUnionValue.Companion<StreamSegmentType> {
override fun fromUnionValue(unionValue: String?): StreamSegmentType =
when (unionValue) {
"FRAGMENTED_MP4" -> FRAGMENTED_MP4
"RB_CHUNKED_MP4" -> RB_CHUNKED_MP4
else -> FRAGMENTED_MP4 // Default to fMP4
}
}
}

View File

@@ -20,6 +20,9 @@ extension CameraSession {
onError: @escaping (_ error: CameraError) -> Void) { onError: @escaping (_ error: CameraError) -> Void) {
// Run on Camera Queue // Run on Camera Queue
CameraQueues.cameraQueue.async { CameraQueues.cameraQueue.async {
// Normalize path - expo-file-system passes file:// URIs but FileManager expects raw paths
let normalizedPath = filePath.hasPrefix("file://") ? String(filePath.dropFirst(7)) : filePath
let start = DispatchTime.now() let start = DispatchTime.now()
ReactLogger.log(level: .info, message: "Starting Video recording...") ReactLogger.log(level: .info, message: "Starting Video recording...")
@@ -38,11 +41,27 @@ extension CameraSession {
// Callback for when new chunks are ready // Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else { guard let delegate = self.delegate else {
ReactLogger.log(level: .warning, message: "Chunk ready but delegate is nil, dropping chunk: \(chunk)")
return return
} }
delegate.onVideoChunkReady(chunk: chunk) delegate.onVideoChunkReady(chunk: chunk)
} }
// Callback for when a chunk write fails (e.g. init file write failure)
let onChunkError: (Error) -> Void = { error in
ReactLogger.log(level: .error, message: "Chunk write error, stopping recording: \(error.localizedDescription)")
// Stop recording immediately
if let session = self.recordingSession {
session.stop(clock: self.captureSession.clock)
}
// Surface error to RN
if let cameraError = error as? CameraError {
onError(cameraError)
} else {
onError(.capture(.fileError))
}
}
// Callback for when the recording ends // Callback for when the recording ends
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer { defer {
@@ -82,22 +101,23 @@ extension CameraSession {
} }
} }
if !FileManager.default.fileExists(atPath: filePath) { if !FileManager.default.fileExists(atPath: normalizedPath) {
do { do {
try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true) try FileManager.default.createDirectory(atPath: normalizedPath, withIntermediateDirectories: true)
} catch { } catch {
onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription))) onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription)))
return return
} }
} }
ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)") ReactLogger.log(level: .info, message: "Will record to temporary file: \(normalizedPath)")
do { do {
// Create RecordingSession for the temp file // Create RecordingSession for the temp file
let recordingSession = try RecordingSession(outputDiretory: filePath, let recordingSession = try RecordingSession(outputDiretory: normalizedPath,
fileType: options.fileType, fileType: options.fileType,
onChunkReady: onChunkReady, onChunkReady: onChunkReady,
onChunkError: onChunkError,
completion: onFinish) completion: onFinish)
// Init Audio + Activate Audio Session (optional) // Init Audio + Activate Audio Session (optional)

View File

@@ -24,12 +24,14 @@ class ChunkedRecorder: NSObject {
let outputURL: URL let outputURL: URL
let onChunkReady: ((Chunk) -> Void) let onChunkReady: ((Chunk) -> Void)
let onError: ((Error) -> Void)?
private var chunkIndex: UInt64 = 0 private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws { init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void), onError: ((Error) -> Void)? = nil) throws {
self.outputURL = outputURL self.outputURL = outputURL
self.onChunkReady = onChunkReady self.onChunkReady = onChunkReady
self.onError = onError
guard FileManager.default.fileExists(atPath: outputURL.path) else { guard FileManager.default.fileExists(atPath: outputURL.path) else {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil) throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
} }
@@ -56,28 +58,36 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
private func saveInitSegment(_ data: Data) { private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4") let url = outputURL.appendingPathComponent("init.mp4")
save(data: data, url: url) do {
onChunkReady(url: url, type: .initialization) try data.write(to: url)
onChunkReady(url: url, type: .initialization)
} catch {
ReactLogger.log(level: .error, message: "Failed to write init file \(url): \(error.localizedDescription)")
onError?(CameraError.capture(.fileError))
}
} }
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) { private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4" let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name) let url = outputURL.appendingPathComponent(name)
save(data: data, url: url) if save(data: data, url: url) {
let duration = report? let duration = report?
.trackReports .trackReports
.filter { $0.mediaType == .video } .filter { $0.mediaType == .video }
.first? .first?
.duration .duration
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration)) onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1 chunkIndex += 1
}
} }
private func save(data: Data, url: URL) { private func save(data: Data, url: URL) -> Bool {
do { do {
try data.write(to: url) try data.write(to: url)
return true
} catch { } catch {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)") ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
return false
} }
} }

View File

@@ -74,12 +74,13 @@ class RecordingSession {
init(outputDiretory: String, init(outputDiretory: String,
fileType: AVFileType, fileType: AVFileType,
onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void), onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void),
onChunkError: ((Error) -> Void)? = nil,
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws { completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
completionHandler = completion completionHandler = completion
do { do {
let outputURL = URL(fileURLWithPath: outputDiretory) let outputURL = URL(fileURLWithPath: outputDiretory)
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady) recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady, onError: onChunkError)
assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!) assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!)
assetWriter.shouldOptimizeForNetworkUse = false assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS assetWriter.outputFileTypeProfile = .mpeg4AppleHLS

View File

@@ -41,6 +41,17 @@ export interface RecordVideoOptions {
* @default 'normal' * @default 'normal'
*/ */
videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number
/**
* The stream segment type for recording on Android.
* - `FRAGMENTED_MP4`: HLS-compatible segments (init.mp4 + numbered segments)
* - `RB_CHUNKED_MP4`: Legacy chunked MP4 format
*
* iOS always uses FRAGMENTED_MP4 regardless of this setting.
*
* @platform android
* @default 'FRAGMENTED_MP4'
*/
streamSegmentType?: 'FRAGMENTED_MP4' | 'RB_CHUNKED_MP4'
} }
/** /**