Compare commits

..

17 Commits

Author SHA1 Message Date
Dean
dd26812a9c fix: Add pasp box to declare square pixels (1:1) for web playback
The codec string fix caused videos to appear squished on web players
like Shaka. Adding an explicit pixel aspect ratio (pasp) box with
1:1 ratio tells the player not to apply any SAR scaling.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 12:22:43 -08:00
Dean
b716608379 fix: Skip NAL header byte when reading SPS profile data in HlsMuxer
The SPS NAL unit format is: [NAL header, profile_idc, constraint_flags, level_idc, ...]
The code was incorrectly reading from byte 0 (NAL header, typically 0x67)
instead of byte 1 (profile_idc).

This produced invalid codec strings like `avc1.676400` instead of valid
ones like `avc1.64001f`, causing Shaka Player on web to fail with error
4032 (unable to parse codec).

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 11:52:08 -08:00
0ecc3d8210 Merge pull request 'fix: Handle both file:// and file:/ URI prefixes' (#19) from dean/fix-file-prefix-fmp4 into main
Reviewed-on: #19
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2026-01-06 19:41:32 +00:00
309e1e9457 Merge branch 'main' into dean/fix-file-prefix-fmp4 2026-01-06 17:38:24 +00:00
71b08e6898 Merge pull request 'Android Fmp4' (#17) from loewy/android-fmp4-normalize-timestamp-fix-fps into main
Reviewed-on: #17
2026-01-06 17:21:29 +00:00
Dean
699481f6f8 fix: Handle both file:// and file:/ URI prefixes
The previous code only stripped file:// (double slash) but some paths
come with file:/ (single slash), causing FileNotFoundException.

Fixes RAILBIRD-FRONTEND-1JH

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-06 08:45:39 -08:00
11ce9ba8f6 ensure compatability with rb chunked and fmp4, move orientation detection for rb chunked to chunked manager 2026-01-03 13:40:09 -08:00
dd9de38a7d use window manager to determine device rotation in android 2026-01-02 10:04:49 -08:00
3f5d0a2109 fix fps (alter tfhd and trun size, add logs 2026-01-02 10:04:49 -08:00
6c2319608d normalize timestamps and fix framerate metadata in init file 2026-01-02 10:04:47 -08:00
27f127fe94 Fix orientation issues 2026-01-02 10:02:51 -08:00
92b29cbd78 Write our own muxer to make hls uupload actually work 2026-01-02 10:02:51 -08:00
fb23c57a6c feat: Add fragmented MP4 (fMP4) support for Android
Implements HLS-compatible fragmented MP4 recording on Android using
AndroidX Media3 FragmentedMp4Muxer, matching the iOS implementation.

Changes:
- Add FragmentedRecordingManager for fMP4 segment output
- Add ChunkedRecorderInterface to abstract recorder implementations
- Add onInitSegmentReady callback for init segment (init.mp4)
- Update onVideoChunkReady to include segment duration
- RecordingSession now uses FragmentedRecordingManager by default

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-02 10:02:51 -08:00
8d06ab9e66 Merge pull request 'Strip file prefix ( for expo-file-system)' (#15) from loewy/stripe-file-prefix into main
Reviewed-on: #15
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2025-12-26 17:26:42 +00:00
f6b6cfb3d5 strip file prefix 2025-12-19 12:55:41 -08:00
3ac555a2b3 Merge pull request 'Stop recording on init write failure' (#14) from loewy/stop-recording-on-init-write-failure into main
Reviewed-on: #14
2025-12-19 20:53:06 +00:00
7e1e074e0f force recording to stop on init write failure and fix silent failure 2025-12-18 13:29:31 -08:00
15 changed files with 1361 additions and 41 deletions

View File

@@ -40,15 +40,26 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event)
}
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
Log.e(CameraView.TAG, "invokeOnError(...):")
fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
val event = Arguments.createMap()
event.putInt("index", index)
event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
}
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace()

View File

@@ -271,8 +271,12 @@ class CameraView(context: Context) :
invokeOnStopped()
}
override fun onVideoChunkReady(filepath: File, index: Int) {
invokeOnChunkReady(filepath, index)
override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
}
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,6 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap()
override fun getName(): String = TAG

View File

@@ -428,19 +428,21 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording:
// - Counter-clockwise (ROTATION_90) → 270° hint
// - Clockwise (ROTATION_270) → 90° hint
// Map device rotation to the correct orientation for video recording.
// Surface.ROTATION_90 = device rotated 90° CCW = phone top on LEFT = LANDSCAPE_LEFT
// Surface.ROTATION_270 = device rotated 90° CW = phone top on RIGHT = LANDSCAPE_RIGHT
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "startRecording: orientation=${recordingOrientation.toDegrees()}° (deviceRotation=$deviceRotation)")
val recording = RecordingSession(
context,
cameraId,
@@ -448,7 +450,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableAudio,
fps,
videoOutput.enableHdr,
orientation,
recordingOrientation,
options,
filePath,
callback,
@@ -513,7 +515,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized()
fun onStarted()
fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int)
fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
}
}

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback() {
MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "ChunkedRecorder"
@@ -30,8 +30,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
iFrameInterval: Int = 5
): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees();
// Use cameraOrientation (from WindowManager) for rotation metadata
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
// Note: MediaMuxer.setOrientationHint() uses opposite convention from HlsMuxer's rotation matrix
// We need to invert the rotation: 90 <-> 270, while 0 and 180 stay the same
val orientationDegrees = when (cameraOrientation.toDegrees()) {
90 -> 270
270 -> 90
else -> cameraOrientation.toDegrees()
}
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
@@ -55,12 +62,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
Log.d(TAG, "Video Format: $format, orientation: $orientationDegrees")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks
codec, outputDirectory, orientationDegrees, iFrameInterval, callbacks
)
}
}
@@ -73,7 +80,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface()
override val surface: Surface = encoder.createInputSurface()
init {
if (!this.outputDirectory.exists()) {
@@ -91,11 +98,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxer.start()
}
fun writeSample(buffer: java.nio.ByteBuffer, bufferInfo: BufferInfo) {
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
}
fun finish() {
muxer.stop()
muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex)
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
}
}
@@ -133,12 +143,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
return bufferInfo.presentationTimeUs - context.startTimeUs
}
fun start() {
override fun start() {
encoder.start()
recording = true
}
fun finish() {
override fun finish() {
synchronized(this) {
muxerContext?.finish()
recording = false
@@ -168,7 +178,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
encoder.releaseOutputBuffer(index, false)
return
}
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo)
context.writeSample(encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -0,0 +1,176 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val muxer: HlsMuxer
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
// Use cameraOrientation (from WindowManager) for rotation metadata
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
val orientationDegrees = cameraOrientation.toDegrees()
// Swap dimensions based on camera orientation, same as ChunkedRecordingManager
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees°")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
// Use 30fps as conservative default since many Android devices can't sustain
// higher frame rates at high resolutions. This affects:
// - Encoder: bitrate allocation and I-frame interval calculation
// - HlsMuxer: timescale for accurate sample durations
// The actual frame timing comes from camera timestamps regardless of this setting.
val effectiveFps = 30
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val muxer = HlsMuxer(
outputDirectory = outputDirectory,
callback = object : HlsMuxer.Callback {
override fun onInitSegmentReady(file: File) {
callbacks.onInitSegmentReady(file)
}
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
callbacks.onVideoChunkReady(file, index, durationUs)
}
},
orientationDegrees = orientationDegrees,
fps = effectiveFps
)
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
return FragmentedRecordingManager(codec, muxer)
}
}
private var recording = false
private var muxerStarted = false
private var trackIndex = -1
override val surface: Surface = encoder.createInputSurface()
init {
encoder.setCallback(this)
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
if (muxerStarted) {
muxer.stop()
muxer.release()
}
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
if (!muxerStarted) {
encoder.releaseOutputBuffer(index, false)
return
}
val buffer = encoder.getOutputBuffer(index)
if (buffer == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
try {
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
synchronized(this) {
Log.i(TAG, "Output format changed: $format")
trackIndex = muxer.addTrack(format)
muxer.start()
muxerStarted = true
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -8,12 +8,14 @@ import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.types.StreamSegmentType
import com.mrousavy.camera.utils.FileUtils
import java.io.File
import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
class RecordingSession(
context: Context,
val cameraId: String,
@@ -26,7 +28,7 @@ class RecordingSession(
private val filePath: String,
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback,
private val allCallbacks: CameraSession.Callback
) {
companion object {
private const val TAG = "RecordingSession"
@@ -34,23 +36,45 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
}
data class Video(val path: String, val durationMs: Long, val size: Size)
private val outputPath: File = File(filePath)
// Normalize path - expo-file-system passes file:// URIs but File expects raw paths
// Handle both file:// and file:/ variants
private val outputPath: File = File(filePath.replace(Regex("^file:/+"), "/"))
private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
// Use FragmentedRecordingManager for HLS-compatible fMP4 output,
// or fall back to ChunkedRecordingManager for regular MP4 chunks
private val recorder: ChunkedRecorderInterface = if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
FragmentedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
}
private var startTime: Long? = null
val surface: Surface
get() {
@@ -59,7 +83,7 @@ class RecordingSession(
fun start() {
synchronized(this) {
Log.i(TAG, "Starting RecordingSession..")
Log.i(TAG, "Starting RecordingSession with ${options.streamSegmentType} recorder..")
startTime = System.currentTimeMillis()
recorder.start()
}

View File

@@ -9,6 +9,7 @@ class RecordVideoOptions(map: ReadableMap) {
var videoBitRateOverride: Double? = null
var videoBitRateMultiplier: Double? = null
var orientation: Orientation? = null
var streamSegmentType: StreamSegmentType = StreamSegmentType.FRAGMENTED_MP4
init {
if (map.hasKey("fileType")) {
@@ -29,5 +30,8 @@ class RecordVideoOptions(map: ReadableMap) {
if (map.hasKey("orientation")) {
orientation = Orientation.fromUnionValue(map.getString("orientation"))
}
if (map.hasKey("streamSegmentType")) {
streamSegmentType = StreamSegmentType.fromUnionValue(map.getString("streamSegmentType"))
}
}
}

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.types
enum class StreamSegmentType(override val unionValue: String) : JSUnionValue {
FRAGMENTED_MP4("FRAGMENTED_MP4"),
RB_CHUNKED_MP4("RB_CHUNKED_MP4");
companion object : JSUnionValue.Companion<StreamSegmentType> {
override fun fromUnionValue(unionValue: String?): StreamSegmentType =
when (unionValue) {
"FRAGMENTED_MP4" -> FRAGMENTED_MP4
"RB_CHUNKED_MP4" -> RB_CHUNKED_MP4
else -> FRAGMENTED_MP4 // Default to fMP4
}
}
}

View File

@@ -20,6 +20,9 @@ extension CameraSession {
onError: @escaping (_ error: CameraError) -> Void) {
// Run on Camera Queue
CameraQueues.cameraQueue.async {
// Normalize path - expo-file-system passes file:// URIs but FileManager expects raw paths
let normalizedPath = filePath.hasPrefix("file://") ? String(filePath.dropFirst(7)) : filePath
let start = DispatchTime.now()
ReactLogger.log(level: .info, message: "Starting Video recording...")
@@ -38,11 +41,27 @@ extension CameraSession {
// Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else {
ReactLogger.log(level: .warning, message: "Chunk ready but delegate is nil, dropping chunk: \(chunk)")
return
}
delegate.onVideoChunkReady(chunk: chunk)
}
// Callback for when a chunk write fails (e.g. init file write failure)
let onChunkError: (Error) -> Void = { error in
ReactLogger.log(level: .error, message: "Chunk write error, stopping recording: \(error.localizedDescription)")
// Stop recording immediately
if let session = self.recordingSession {
session.stop(clock: self.captureSession.clock)
}
// Surface error to RN
if let cameraError = error as? CameraError {
onError(cameraError)
} else {
onError(.capture(.fileError))
}
}
// Callback for when the recording ends
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer {
@@ -82,22 +101,23 @@ extension CameraSession {
}
}
if !FileManager.default.fileExists(atPath: filePath) {
if !FileManager.default.fileExists(atPath: normalizedPath) {
do {
try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true)
try FileManager.default.createDirectory(atPath: normalizedPath, withIntermediateDirectories: true)
} catch {
onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription)))
return
}
}
ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)")
ReactLogger.log(level: .info, message: "Will record to temporary file: \(normalizedPath)")
do {
// Create RecordingSession for the temp file
let recordingSession = try RecordingSession(outputDiretory: filePath,
let recordingSession = try RecordingSession(outputDiretory: normalizedPath,
fileType: options.fileType,
onChunkReady: onChunkReady,
onChunkError: onChunkError,
completion: onFinish)
// Init Audio + Activate Audio Session (optional)

View File

@@ -24,12 +24,14 @@ class ChunkedRecorder: NSObject {
let outputURL: URL
let onChunkReady: ((Chunk) -> Void)
let onError: ((Error) -> Void)?
private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void), onError: ((Error) -> Void)? = nil) throws {
self.outputURL = outputURL
self.onChunkReady = onChunkReady
self.onError = onError
guard FileManager.default.fileExists(atPath: outputURL.path) else {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
}
@@ -56,8 +58,12 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4")
if save(data: data, url: url) {
do {
try data.write(to: url)
onChunkReady(url: url, type: .initialization)
} catch {
ReactLogger.log(level: .error, message: "Failed to write init file \(url): \(error.localizedDescription)")
onError?(CameraError.capture(.fileError))
}
}

View File

@@ -74,12 +74,13 @@ class RecordingSession {
init(outputDiretory: String,
fileType: AVFileType,
onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void),
onChunkError: ((Error) -> Void)? = nil,
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
completionHandler = completion
do {
let outputURL = URL(fileURLWithPath: outputDiretory)
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady)
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady, onError: onChunkError)
assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!)
assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS

View File

@@ -41,6 +41,17 @@ export interface RecordVideoOptions {
* @default 'normal'
*/
videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number
/**
* The stream segment type for recording on Android.
* - `FRAGMENTED_MP4`: HLS-compatible segments (init.mp4 + numbered segments)
* - `RB_CHUNKED_MP4`: Legacy chunked MP4 format
*
* iOS always uses FRAGMENTED_MP4 regardless of this setting.
*
* @platform android
* @default 'FRAGMENTED_MP4'
*/
streamSegmentType?: 'FRAGMENTED_MP4' | 'RB_CHUNKED_MP4'
}
/**