Compare commits
22 Commits
dean/fix-a
...
fix/hlsmux
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd26812a9c | ||
|
|
b716608379 | ||
| 0ecc3d8210 | |||
| 309e1e9457 | |||
| 71b08e6898 | |||
|
|
699481f6f8 | ||
| 11ce9ba8f6 | |||
| dd9de38a7d | |||
| 3f5d0a2109 | |||
| 6c2319608d | |||
| 27f127fe94 | |||
| 92b29cbd78 | |||
| fb23c57a6c | |||
| 8d06ab9e66 | |||
| f6b6cfb3d5 | |||
| 3ac555a2b3 | |||
| 7e1e074e0f | |||
| b269e9c493 | |||
| 5fe7f35127 | |||
| 61863149c0 | |||
| 09b50938d2 | |||
| a158ed8350 |
@@ -40,15 +40,26 @@ fun CameraView.invokeOnStopped() {
|
||||
this.sendEvent(event)
|
||||
}
|
||||
|
||||
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
|
||||
Log.e(CameraView.TAG, "invokeOnError(...):")
|
||||
fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
|
||||
Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
|
||||
val event = Arguments.createMap()
|
||||
event.putInt("index", index)
|
||||
event.putString("filepath", filepath.toString())
|
||||
if (durationUs != null) {
|
||||
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
|
||||
}
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
|
||||
}
|
||||
|
||||
fun CameraView.invokeOnInitReady(filepath: File) {
|
||||
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
|
||||
val event = Arguments.createMap()
|
||||
event.putString("filepath", filepath.toString())
|
||||
val reactContext = context as ReactContext
|
||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
|
||||
}
|
||||
|
||||
fun CameraView.invokeOnError(error: Throwable) {
|
||||
Log.e(CameraView.TAG, "invokeOnError(...):")
|
||||
error.printStackTrace()
|
||||
|
||||
@@ -271,8 +271,12 @@ class CameraView(context: Context) :
|
||||
invokeOnStopped()
|
||||
}
|
||||
|
||||
override fun onVideoChunkReady(filepath: File, index: Int) {
|
||||
invokeOnChunkReady(filepath, index)
|
||||
override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
|
||||
invokeOnChunkReady(filepath, index, durationUs)
|
||||
}
|
||||
|
||||
override fun onInitSegmentReady(filepath: File) {
|
||||
invokeOnInitReady(filepath)
|
||||
}
|
||||
|
||||
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
|
||||
|
||||
@@ -32,6 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
|
||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
||||
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
|
||||
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
|
||||
.build()?.toMutableMap()
|
||||
|
||||
override fun getName(): String = TAG
|
||||
|
||||
@@ -15,6 +15,7 @@ import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.Surface
|
||||
import android.view.SurfaceHolder
|
||||
import android.view.WindowManager
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.google.mlkit.vision.barcode.common.Barcode
|
||||
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
|
||||
@@ -425,6 +426,23 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
||||
|
||||
val fps = configuration?.fps ?: 30
|
||||
|
||||
// Get actual device rotation from WindowManager since the React Native orientation hook
|
||||
// doesn't update when rotating between landscape-left and landscape-right on Android.
|
||||
// Map device rotation to the correct orientation for video recording.
|
||||
// Surface.ROTATION_90 = device rotated 90° CCW = phone top on LEFT = LANDSCAPE_LEFT
|
||||
// Surface.ROTATION_270 = device rotated 90° CW = phone top on RIGHT = LANDSCAPE_RIGHT
|
||||
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
|
||||
val deviceRotation = windowManager.defaultDisplay.rotation
|
||||
val recordingOrientation = when (deviceRotation) {
|
||||
Surface.ROTATION_0 -> Orientation.PORTRAIT
|
||||
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
|
||||
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
|
||||
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
|
||||
else -> Orientation.PORTRAIT
|
||||
}
|
||||
|
||||
Log.i(TAG, "startRecording: orientation=${recordingOrientation.toDegrees()}° (deviceRotation=$deviceRotation)")
|
||||
|
||||
val recording = RecordingSession(
|
||||
context,
|
||||
cameraId,
|
||||
@@ -432,7 +450,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
||||
enableAudio,
|
||||
fps,
|
||||
videoOutput.enableHdr,
|
||||
orientation,
|
||||
recordingOrientation,
|
||||
options,
|
||||
filePath,
|
||||
callback,
|
||||
@@ -497,7 +515,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
||||
fun onInitialized()
|
||||
fun onStarted()
|
||||
fun onStopped()
|
||||
fun onVideoChunkReady(filepath: File, index: Int)
|
||||
fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
|
||||
fun onInitSegmentReady(filepath: File)
|
||||
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import java.io.File
|
||||
import java.nio.ByteBuffer
|
||||
|
||||
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
|
||||
MediaCodec.Callback() {
|
||||
MediaCodec.Callback(), ChunkedRecorderInterface {
|
||||
companion object {
|
||||
private const val TAG = "ChunkedRecorder"
|
||||
|
||||
@@ -30,8 +30,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
||||
iFrameInterval: Int = 5
|
||||
): ChunkedRecordingManager {
|
||||
val mimeType = options.videoCodec.toMimeType()
|
||||
val cameraOrientationDegrees = cameraOrientation.toDegrees()
|
||||
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees();
|
||||
// Use cameraOrientation (from WindowManager) for rotation metadata
|
||||
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
|
||||
// Note: MediaMuxer.setOrientationHint() uses opposite convention from HlsMuxer's rotation matrix
|
||||
// We need to invert the rotation: 90 <-> 270, while 0 and 180 stay the same
|
||||
val orientationDegrees = when (cameraOrientation.toDegrees()) {
|
||||
90 -> 270
|
||||
270 -> 90
|
||||
else -> cameraOrientation.toDegrees()
|
||||
}
|
||||
val (width, height) = if (cameraOrientation.isLandscape()) {
|
||||
size.height to size.width
|
||||
} else {
|
||||
@@ -55,12 +62,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
|
||||
|
||||
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
|
||||
Log.d(TAG, "Video Format: $format, orientation: $orientationDegrees")
|
||||
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
|
||||
// we can use for input and wrap it with a class that handles the EGL work.
|
||||
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
|
||||
return ChunkedRecordingManager(
|
||||
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks
|
||||
codec, outputDirectory, orientationDegrees, iFrameInterval, callbacks
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -73,7 +80,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
||||
|
||||
private val targetDurationUs = iFrameInterval * 1000000
|
||||
|
||||
val surface: Surface = encoder.createInputSurface()
|
||||
override val surface: Surface = encoder.createInputSurface()
|
||||
|
||||
init {
|
||||
if (!this.outputDirectory.exists()) {
|
||||
@@ -91,11 +98,14 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
||||
muxer.start()
|
||||
}
|
||||
|
||||
fun writeSample(buffer: java.nio.ByteBuffer, bufferInfo: BufferInfo) {
|
||||
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
|
||||
}
|
||||
|
||||
fun finish() {
|
||||
muxer.stop()
|
||||
muxer.release()
|
||||
callbacks.onVideoChunkReady(filepath, chunkIndex)
|
||||
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,12 +143,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
||||
return bufferInfo.presentationTimeUs - context.startTimeUs
|
||||
}
|
||||
|
||||
fun start() {
|
||||
override fun start() {
|
||||
encoder.start()
|
||||
recording = true
|
||||
}
|
||||
|
||||
fun finish() {
|
||||
override fun finish() {
|
||||
synchronized(this) {
|
||||
muxerContext?.finish()
|
||||
recording = false
|
||||
@@ -168,7 +178,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
||||
encoder.releaseOutputBuffer(index, false)
|
||||
return
|
||||
}
|
||||
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo)
|
||||
context.writeSample(encodedData, bufferInfo)
|
||||
encoder.releaseOutputBuffer(index, false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
package com.mrousavy.camera.core
|
||||
|
||||
import android.view.Surface
|
||||
|
||||
/**
|
||||
* Common interface for chunked video recorders.
|
||||
* Implemented by both ChunkedRecordingManager (regular MP4) and
|
||||
* FragmentedRecordingManager (HLS-compatible fMP4).
|
||||
*/
|
||||
interface ChunkedRecorderInterface {
|
||||
val surface: Surface
|
||||
|
||||
fun start()
|
||||
fun finish()
|
||||
}
|
||||
@@ -0,0 +1,176 @@
|
||||
package com.mrousavy.camera.core
|
||||
|
||||
import android.media.MediaCodec
|
||||
import android.media.MediaCodec.BufferInfo
|
||||
import android.media.MediaCodecInfo
|
||||
import android.media.MediaFormat
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.Surface
|
||||
import com.mrousavy.camera.types.Orientation
|
||||
import com.mrousavy.camera.types.RecordVideoOptions
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* A recording manager that produces HLS-compatible fragmented MP4 segments.
|
||||
*
|
||||
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
|
||||
* - init.mp4: Initialization segment (ftyp + moov with mvex)
|
||||
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
|
||||
*/
|
||||
class FragmentedRecordingManager(
|
||||
private val encoder: MediaCodec,
|
||||
private val muxer: HlsMuxer
|
||||
) : MediaCodec.Callback(), ChunkedRecorderInterface {
|
||||
|
||||
companion object {
|
||||
private const val TAG = "FragmentedRecorder"
|
||||
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
|
||||
|
||||
fun fromParams(
|
||||
callbacks: CameraSession.Callback,
|
||||
size: Size,
|
||||
enableAudio: Boolean,
|
||||
fps: Int? = null,
|
||||
cameraOrientation: Orientation,
|
||||
bitRate: Int,
|
||||
options: RecordVideoOptions,
|
||||
outputDirectory: File,
|
||||
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
|
||||
): FragmentedRecordingManager {
|
||||
val mimeType = options.videoCodec.toMimeType()
|
||||
// Use cameraOrientation (from WindowManager) for rotation metadata
|
||||
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
|
||||
val orientationDegrees = cameraOrientation.toDegrees()
|
||||
|
||||
// Swap dimensions based on camera orientation, same as ChunkedRecordingManager
|
||||
val (width, height) = if (cameraOrientation.isLandscape()) {
|
||||
size.height to size.width
|
||||
} else {
|
||||
size.width to size.height
|
||||
}
|
||||
|
||||
Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees°")
|
||||
|
||||
val format = MediaFormat.createVideoFormat(mimeType, width, height)
|
||||
val codec = MediaCodec.createEncoderByType(mimeType)
|
||||
|
||||
format.setInteger(
|
||||
MediaFormat.KEY_COLOR_FORMAT,
|
||||
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
|
||||
)
|
||||
|
||||
// Use 30fps as conservative default since many Android devices can't sustain
|
||||
// higher frame rates at high resolutions. This affects:
|
||||
// - Encoder: bitrate allocation and I-frame interval calculation
|
||||
// - HlsMuxer: timescale for accurate sample durations
|
||||
// The actual frame timing comes from camera timestamps regardless of this setting.
|
||||
val effectiveFps = 30
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
|
||||
|
||||
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
|
||||
|
||||
val muxer = HlsMuxer(
|
||||
outputDirectory = outputDirectory,
|
||||
callback = object : HlsMuxer.Callback {
|
||||
override fun onInitSegmentReady(file: File) {
|
||||
callbacks.onInitSegmentReady(file)
|
||||
}
|
||||
|
||||
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
|
||||
callbacks.onVideoChunkReady(file, index, durationUs)
|
||||
}
|
||||
},
|
||||
orientationDegrees = orientationDegrees,
|
||||
fps = effectiveFps
|
||||
)
|
||||
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
|
||||
|
||||
return FragmentedRecordingManager(codec, muxer)
|
||||
}
|
||||
}
|
||||
|
||||
private var recording = false
|
||||
private var muxerStarted = false
|
||||
private var trackIndex = -1
|
||||
|
||||
override val surface: Surface = encoder.createInputSurface()
|
||||
|
||||
init {
|
||||
encoder.setCallback(this)
|
||||
}
|
||||
|
||||
override fun start() {
|
||||
encoder.start()
|
||||
recording = true
|
||||
}
|
||||
|
||||
override fun finish() {
|
||||
synchronized(this) {
|
||||
recording = false
|
||||
|
||||
if (muxerStarted) {
|
||||
muxer.stop()
|
||||
muxer.release()
|
||||
}
|
||||
|
||||
try {
|
||||
encoder.stop()
|
||||
encoder.release()
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "Error stopping encoder", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MediaCodec.Callback methods
|
||||
|
||||
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
|
||||
// Not used for Surface input
|
||||
}
|
||||
|
||||
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
|
||||
synchronized(this) {
|
||||
if (!recording) {
|
||||
encoder.releaseOutputBuffer(index, false)
|
||||
return
|
||||
}
|
||||
|
||||
if (!muxerStarted) {
|
||||
encoder.releaseOutputBuffer(index, false)
|
||||
return
|
||||
}
|
||||
|
||||
val buffer = encoder.getOutputBuffer(index)
|
||||
if (buffer == null) {
|
||||
Log.e(TAG, "getOutputBuffer returned null")
|
||||
encoder.releaseOutputBuffer(index, false)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "Error writing sample", e)
|
||||
}
|
||||
|
||||
encoder.releaseOutputBuffer(index, false)
|
||||
}
|
||||
}
|
||||
|
||||
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
|
||||
Log.e(TAG, "Codec error: ${e.message}")
|
||||
}
|
||||
|
||||
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
|
||||
synchronized(this) {
|
||||
Log.i(TAG, "Output format changed: $format")
|
||||
|
||||
trackIndex = muxer.addTrack(format)
|
||||
muxer.start()
|
||||
muxerStarted = true
|
||||
}
|
||||
}
|
||||
}
|
||||
1019
package/android/src/main/java/com/mrousavy/camera/core/HlsMuxer.kt
Normal file
1019
package/android/src/main/java/com/mrousavy/camera/core/HlsMuxer.kt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -9,8 +9,10 @@ import android.os.Looper
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import android.view.PixelCopy
|
||||
import android.view.Surface
|
||||
import android.view.SurfaceHolder
|
||||
import android.view.SurfaceView
|
||||
import android.view.WindowManager
|
||||
import com.facebook.react.bridge.UiThreadUtil
|
||||
import com.mrousavy.camera.extensions.resize
|
||||
import com.mrousavy.camera.extensions.rotatedBy
|
||||
@@ -150,6 +152,8 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
|
||||
val width = frame.width()
|
||||
val height = frame.height()
|
||||
|
||||
// Create bitmap matching surface frame dimensions for PixelCopy
|
||||
// The original code swapped dimensions assuming landscape input - keep that for consistency
|
||||
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
|
||||
|
||||
// Use a coroutine to suspend until the PixelCopy request is complete
|
||||
@@ -159,7 +163,23 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
|
||||
bitmap,
|
||||
{ copyResult ->
|
||||
if (copyResult == PixelCopy.SUCCESS) {
|
||||
continuation.resume(rotateBitmap90CounterClockwise(bitmap))
|
||||
// Get actual device rotation from WindowManager instead of relying on
|
||||
// the orientation prop, which may not update on Android when rotating
|
||||
// between landscape-left and landscape-right.
|
||||
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
|
||||
val deviceRotation = windowManager.defaultDisplay.rotation
|
||||
|
||||
val actualOrientation = when (deviceRotation) {
|
||||
Surface.ROTATION_0 -> Orientation.PORTRAIT
|
||||
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
|
||||
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
|
||||
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
|
||||
else -> Orientation.PORTRAIT
|
||||
}
|
||||
|
||||
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
|
||||
|
||||
continuation.resume(bitmap.transformBitmap(actualOrientation))
|
||||
} else {
|
||||
continuation.resumeWithException(
|
||||
RuntimeException("PixelCopy failed with error code $copyResult")
|
||||
|
||||
@@ -8,12 +8,14 @@ import com.facebook.common.statfs.StatFsHelper
|
||||
import com.mrousavy.camera.extensions.getRecommendedBitRate
|
||||
import com.mrousavy.camera.types.Orientation
|
||||
import com.mrousavy.camera.types.RecordVideoOptions
|
||||
import com.mrousavy.camera.types.StreamSegmentType
|
||||
import com.mrousavy.camera.utils.FileUtils
|
||||
import java.io.File
|
||||
import android.os.Environment
|
||||
import java.text.SimpleDateFormat
|
||||
import java.util.Locale
|
||||
import java.util.Date
|
||||
|
||||
class RecordingSession(
|
||||
context: Context,
|
||||
val cameraId: String,
|
||||
@@ -26,7 +28,7 @@ class RecordingSession(
|
||||
private val filePath: String,
|
||||
private val callback: (video: Video) -> Unit,
|
||||
private val onError: (error: CameraError) -> Unit,
|
||||
private val allCallbacks: CameraSession.Callback,
|
||||
private val allCallbacks: CameraSession.Callback
|
||||
) {
|
||||
companion object {
|
||||
private const val TAG = "RecordingSession"
|
||||
@@ -34,14 +36,35 @@ class RecordingSession(
|
||||
private const val AUDIO_SAMPLING_RATE = 44_100
|
||||
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
|
||||
private const val AUDIO_CHANNELS = 1
|
||||
|
||||
// Segment duration in seconds (matching iOS default of 6 seconds)
|
||||
private const val SEGMENT_DURATION_SECONDS = 6
|
||||
}
|
||||
|
||||
data class Video(val path: String, val durationMs: Long, val size: Size)
|
||||
|
||||
private val outputPath: File = File(filePath)
|
||||
// Normalize path - expo-file-system passes file:// URIs but File expects raw paths
|
||||
// Handle both file:// and file:/ variants
|
||||
private val outputPath: File = File(filePath.replace(Regex("^file:/+"), "/"))
|
||||
|
||||
private val bitRate = getBitRate()
|
||||
private val recorder = ChunkedRecordingManager.fromParams(
|
||||
|
||||
// Use FragmentedRecordingManager for HLS-compatible fMP4 output,
|
||||
// or fall back to ChunkedRecordingManager for regular MP4 chunks
|
||||
private val recorder: ChunkedRecorderInterface = if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
|
||||
FragmentedRecordingManager.fromParams(
|
||||
allCallbacks,
|
||||
size,
|
||||
enableAudio,
|
||||
fps,
|
||||
cameraOrientation,
|
||||
bitRate,
|
||||
options,
|
||||
outputPath,
|
||||
SEGMENT_DURATION_SECONDS
|
||||
)
|
||||
} else {
|
||||
ChunkedRecordingManager.fromParams(
|
||||
allCallbacks,
|
||||
size,
|
||||
enableAudio,
|
||||
@@ -51,6 +74,7 @@ class RecordingSession(
|
||||
options,
|
||||
outputPath
|
||||
)
|
||||
}
|
||||
private var startTime: Long? = null
|
||||
val surface: Surface
|
||||
get() {
|
||||
@@ -59,7 +83,7 @@ class RecordingSession(
|
||||
|
||||
fun start() {
|
||||
synchronized(this) {
|
||||
Log.i(TAG, "Starting RecordingSession..")
|
||||
Log.i(TAG, "Starting RecordingSession with ${options.streamSegmentType} recorder..")
|
||||
startTime = System.currentTimeMillis()
|
||||
recorder.start()
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ class RecordVideoOptions(map: ReadableMap) {
|
||||
var videoBitRateOverride: Double? = null
|
||||
var videoBitRateMultiplier: Double? = null
|
||||
var orientation: Orientation? = null
|
||||
var streamSegmentType: StreamSegmentType = StreamSegmentType.FRAGMENTED_MP4
|
||||
|
||||
init {
|
||||
if (map.hasKey("fileType")) {
|
||||
@@ -29,5 +30,8 @@ class RecordVideoOptions(map: ReadableMap) {
|
||||
if (map.hasKey("orientation")) {
|
||||
orientation = Orientation.fromUnionValue(map.getString("orientation"))
|
||||
}
|
||||
if (map.hasKey("streamSegmentType")) {
|
||||
streamSegmentType = StreamSegmentType.fromUnionValue(map.getString("streamSegmentType"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
package com.mrousavy.camera.types
|
||||
|
||||
enum class StreamSegmentType(override val unionValue: String) : JSUnionValue {
|
||||
FRAGMENTED_MP4("FRAGMENTED_MP4"),
|
||||
RB_CHUNKED_MP4("RB_CHUNKED_MP4");
|
||||
|
||||
companion object : JSUnionValue.Companion<StreamSegmentType> {
|
||||
override fun fromUnionValue(unionValue: String?): StreamSegmentType =
|
||||
when (unionValue) {
|
||||
"FRAGMENTED_MP4" -> FRAGMENTED_MP4
|
||||
"RB_CHUNKED_MP4" -> RB_CHUNKED_MP4
|
||||
else -> FRAGMENTED_MP4 // Default to fMP4
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,9 @@ extension CameraSession {
|
||||
onError: @escaping (_ error: CameraError) -> Void) {
|
||||
// Run on Camera Queue
|
||||
CameraQueues.cameraQueue.async {
|
||||
// Normalize path - expo-file-system passes file:// URIs but FileManager expects raw paths
|
||||
let normalizedPath = filePath.hasPrefix("file://") ? String(filePath.dropFirst(7)) : filePath
|
||||
|
||||
let start = DispatchTime.now()
|
||||
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
||||
|
||||
@@ -38,11 +41,27 @@ extension CameraSession {
|
||||
// Callback for when new chunks are ready
|
||||
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
|
||||
guard let delegate = self.delegate else {
|
||||
ReactLogger.log(level: .warning, message: "Chunk ready but delegate is nil, dropping chunk: \(chunk)")
|
||||
return
|
||||
}
|
||||
delegate.onVideoChunkReady(chunk: chunk)
|
||||
}
|
||||
|
||||
// Callback for when a chunk write fails (e.g. init file write failure)
|
||||
let onChunkError: (Error) -> Void = { error in
|
||||
ReactLogger.log(level: .error, message: "Chunk write error, stopping recording: \(error.localizedDescription)")
|
||||
// Stop recording immediately
|
||||
if let session = self.recordingSession {
|
||||
session.stop(clock: self.captureSession.clock)
|
||||
}
|
||||
// Surface error to RN
|
||||
if let cameraError = error as? CameraError {
|
||||
onError(cameraError)
|
||||
} else {
|
||||
onError(.capture(.fileError))
|
||||
}
|
||||
}
|
||||
|
||||
// Callback for when the recording ends
|
||||
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
||||
defer {
|
||||
@@ -82,22 +101,23 @@ extension CameraSession {
|
||||
}
|
||||
}
|
||||
|
||||
if !FileManager.default.fileExists(atPath: filePath) {
|
||||
if !FileManager.default.fileExists(atPath: normalizedPath) {
|
||||
do {
|
||||
try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true)
|
||||
try FileManager.default.createDirectory(atPath: normalizedPath, withIntermediateDirectories: true)
|
||||
} catch {
|
||||
onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription)))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)")
|
||||
ReactLogger.log(level: .info, message: "Will record to temporary file: \(normalizedPath)")
|
||||
|
||||
do {
|
||||
// Create RecordingSession for the temp file
|
||||
let recordingSession = try RecordingSession(outputDiretory: filePath,
|
||||
let recordingSession = try RecordingSession(outputDiretory: normalizedPath,
|
||||
fileType: options.fileType,
|
||||
onChunkReady: onChunkReady,
|
||||
onChunkError: onChunkError,
|
||||
completion: onFinish)
|
||||
|
||||
// Init Audio + Activate Audio Session (optional)
|
||||
|
||||
@@ -24,12 +24,14 @@ class ChunkedRecorder: NSObject {
|
||||
|
||||
let outputURL: URL
|
||||
let onChunkReady: ((Chunk) -> Void)
|
||||
let onError: ((Error) -> Void)?
|
||||
|
||||
private var chunkIndex: UInt64 = 0
|
||||
|
||||
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
|
||||
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void), onError: ((Error) -> Void)? = nil) throws {
|
||||
self.outputURL = outputURL
|
||||
self.onChunkReady = onChunkReady
|
||||
self.onError = onError
|
||||
guard FileManager.default.fileExists(atPath: outputURL.path) else {
|
||||
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
|
||||
}
|
||||
@@ -56,14 +58,19 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
|
||||
|
||||
private func saveInitSegment(_ data: Data) {
|
||||
let url = outputURL.appendingPathComponent("init.mp4")
|
||||
save(data: data, url: url)
|
||||
do {
|
||||
try data.write(to: url)
|
||||
onChunkReady(url: url, type: .initialization)
|
||||
} catch {
|
||||
ReactLogger.log(level: .error, message: "Failed to write init file \(url): \(error.localizedDescription)")
|
||||
onError?(CameraError.capture(.fileError))
|
||||
}
|
||||
}
|
||||
|
||||
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
|
||||
let name = "\(chunkIndex).mp4"
|
||||
let url = outputURL.appendingPathComponent(name)
|
||||
save(data: data, url: url)
|
||||
if save(data: data, url: url) {
|
||||
let duration = report?
|
||||
.trackReports
|
||||
.filter { $0.mediaType == .video }
|
||||
@@ -72,12 +79,15 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
|
||||
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
|
||||
chunkIndex += 1
|
||||
}
|
||||
}
|
||||
|
||||
private func save(data: Data, url: URL) {
|
||||
private func save(data: Data, url: URL) -> Bool {
|
||||
do {
|
||||
try data.write(to: url)
|
||||
return true
|
||||
} catch {
|
||||
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -74,12 +74,13 @@ class RecordingSession {
|
||||
init(outputDiretory: String,
|
||||
fileType: AVFileType,
|
||||
onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void),
|
||||
onChunkError: ((Error) -> Void)? = nil,
|
||||
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
|
||||
completionHandler = completion
|
||||
|
||||
do {
|
||||
let outputURL = URL(fileURLWithPath: outputDiretory)
|
||||
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady)
|
||||
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady, onError: onChunkError)
|
||||
assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!)
|
||||
assetWriter.shouldOptimizeForNetworkUse = false
|
||||
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS
|
||||
|
||||
@@ -41,6 +41,17 @@ export interface RecordVideoOptions {
|
||||
* @default 'normal'
|
||||
*/
|
||||
videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number
|
||||
/**
|
||||
* The stream segment type for recording on Android.
|
||||
* - `FRAGMENTED_MP4`: HLS-compatible segments (init.mp4 + numbered segments)
|
||||
* - `RB_CHUNKED_MP4`: Legacy chunked MP4 format
|
||||
*
|
||||
* iOS always uses FRAGMENTED_MP4 regardless of this setting.
|
||||
*
|
||||
* @platform android
|
||||
* @default 'FRAGMENTED_MP4'
|
||||
*/
|
||||
streamSegmentType?: 'FRAGMENTED_MP4' | 'RB_CHUNKED_MP4'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user