Compare commits

..

1 Commits

Author SHA1 Message Date
ac06fa5f56 Bumps and fixes for react native version bump 2025-08-12 15:36:55 -06:00
23 changed files with 92 additions and 1475 deletions

View File

@@ -19,7 +19,9 @@ endif()
# Add react-native-vision-camera sources
set(SOURCES
add_library(
${PACKAGE_NAME}
SHARED
# Shared C++
../cpp/MutableRawBuffer.cpp
# Java JNI
@@ -29,33 +31,17 @@ set(SOURCES
src/main/cpp/OpenGLContext.cpp
src/main/cpp/OpenGLRenderer.cpp
src/main/cpp/MutableJByteBuffer.cpp
)
# Only add Frame Processor sources if enabled
if (ENABLE_FRAME_PROCESSORS)
list(APPEND SOURCES
src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/JSIJNIConversion.cpp
src/main/cpp/frameprocessor/VisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp
src/main/cpp/frameprocessor/java-bindings/JFrame.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
)
endif()
add_library(
${PACKAGE_NAME}
SHARED
${SOURCES}
)
# Force 16KB page alignment for Android 15+ compatibility
set_target_properties(${PACKAGE_NAME} PROPERTIES
LINK_FLAGS "-Wl,-z,max-page-size=16384"
# Frame Processor
src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/JSIJNIConversion.cpp
src/main/cpp/frameprocessor/VisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp
src/main/cpp/frameprocessor/java-bindings/JFrame.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
)
# Header Search Paths (includes)
@@ -74,13 +60,13 @@ target_include_directories(
# Link everything together
target_link_libraries(
${PACKAGE_NAME}
${LOG_LIB} # <-- Logcat logger
android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI
ReactAndroid::reactnative # <-- RN: React Native JNI bindings (RN 0.76+)
fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline)
${LOG_LIB} # <-- Logcat logger
android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI
# ReactAndroid::reactnativejni # <-- Temporarily disabled for RN 0.79+ compatibility
fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline)
)
# Optionally also add Frame Processors here

View File

@@ -133,16 +133,8 @@ android {
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_17
targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = "17"
freeCompilerArgs += [
"-opt-in=kotlin.RequiresOptIn",
"-opt-in=com.facebook.react.annotations.UnstableReactNativeAPI"
]
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
externalNativeBuild {
@@ -165,7 +157,6 @@ android {
"**/libhermes-executor-debug.so",
"**/libhermes_executor.so",
"**/libreactnativejni.so",
"**/libreactnative.so",
"**/libturbomodulejsijni.so",
"**/libreact_nativemodule_core.so",
"**/libjscexecutor.so"

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@@ -9,13 +9,11 @@
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
return facebook::jni::initialize(vm, [] {
// VideoPipeline is needed for video recording even without Frame Processors
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Frame Processor JNI bindings - only register when Frame Processors are enabled
vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives();
vision::JSharedArray::registerNatives();
#endif

View File

@@ -40,26 +40,15 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event)
}
fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
Log.e(CameraView.TAG, "invokeOnError(...):")
val event = Arguments.createMap()
event.putInt("index", index)
event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
}
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace()

View File

@@ -271,12 +271,8 @@ class CameraView(context: Context) :
invokeOnStopped()
}
override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
override fun onVideoChunkReady(filepath: File, index: Int) {
invokeOnChunkReady(filepath, index)
}
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,8 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap()
.build()
override fun getName(): String = TAG

View File

@@ -31,12 +31,10 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
init {
try {
// Load the native part of VisionCamera.
// Includes the OpenGL VideoPipeline (needed for video recording)
// Frame Processors remain disabled for RN 0.79+ compatibility
// Includes the OpenGL VideoPipeline, as well as Frame Processor JSI bindings
System.loadLibrary("VisionCamera")
Log.i(TAG, "VisionCamera native library loaded successfully")
} catch (e: UnsatisfiedLinkError) {
Log.e(TAG, "Failed to load VisionCamera C++ library!", e)
Log.e(VisionCameraProxy.TAG, "Failed to load VisionCamera C++ library!", e)
throw e
}
}
@@ -75,11 +73,15 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
@ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean {
// Frame Processors are disabled for React Native 0.79+ compatibility
Log.i(TAG, "Frame Processor bindings not installed - Frame Processors disabled for RN 0.79+ compatibility")
return false
}
fun installFrameProcessorBindings(): Boolean =
try {
val proxy = VisionCameraProxy(reactApplicationContext)
VisionCameraInstaller.install(proxy)
true
} catch (e: Error) {
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
false
}
@ReactMethod
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
@@ -155,7 +157,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
private fun canRequestPermission(permission: String): Boolean {
val activity = reactApplicationContext.currentActivity as? PermissionAwareActivity
val activity = currentActivity as? PermissionAwareActivity
return activity?.shouldShowRequestPermissionRationale(permission) ?: false
}

View File

@@ -15,7 +15,6 @@ import android.util.Log
import android.util.Size
import android.view.Surface
import android.view.SurfaceHolder
import android.view.WindowManager
import androidx.core.content.ContextCompat
import com.google.mlkit.vision.barcode.common.Barcode
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
@@ -426,23 +425,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
val fps = configuration?.fps ?: 30
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientation for video recording.
// Surface.ROTATION_90 = device rotated 90° CCW = phone top on LEFT = LANDSCAPE_LEFT
// Surface.ROTATION_270 = device rotated 90° CW = phone top on RIGHT = LANDSCAPE_RIGHT
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "startRecording: orientation=${recordingOrientation.toDegrees()}° (deviceRotation=$deviceRotation)")
val recording = RecordingSession(
context,
cameraId,
@@ -450,7 +432,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableAudio,
fps,
videoOutput.enableHdr,
recordingOrientation,
orientation,
options,
filePath,
callback,
@@ -515,8 +497,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized()
fun onStarted()
fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onVideoChunkReady(filepath: File, index: Int)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
}
}

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback(), ChunkedRecorderInterface {
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
@@ -30,15 +30,8 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
iFrameInterval: Int = 5
): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
// Use cameraOrientation (from WindowManager) for rotation metadata
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
// Note: MediaMuxer.setOrientationHint() uses opposite convention from HlsMuxer's rotation matrix
// We need to invert the rotation: 90 <-> 270, while 0 and 180 stay the same
val orientationDegrees = when (cameraOrientation.toDegrees()) {
90 -> 270
270 -> 90
else -> cameraOrientation.toDegrees()
}
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees();
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
@@ -62,12 +55,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, orientation: $orientationDegrees")
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(
codec, outputDirectory, orientationDegrees, iFrameInterval, callbacks
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks
)
}
}
@@ -80,7 +73,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000
override val surface: Surface = encoder.createInputSurface()
val surface: Surface = encoder.createInputSurface()
init {
if (!this.outputDirectory.exists()) {
@@ -98,14 +91,11 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxer.start()
}
fun writeSample(buffer: java.nio.ByteBuffer, bufferInfo: BufferInfo) {
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
}
fun finish() {
muxer.stop()
muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
callbacks.onVideoChunkReady(filepath, chunkIndex)
}
}
@@ -115,12 +105,6 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxerContext?.finish()
chunkIndex++
val format = this.encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create muxer: encodedFormat is null (onOutputFormatChanged not called yet)")
return
}
val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName)
Log.i(TAG, "Creating new muxer for file: $newFileName")
@@ -130,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, format, this.callbacks
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks
)
}
@@ -139,16 +123,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
}
private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
val context = muxerContext ?: return 0L
return bufferInfo.presentationTimeUs - context.startTimeUs
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs
}
override fun start() {
fun start() {
encoder.start()
recording = true
}
override fun finish() {
fun finish() {
synchronized(this) {
muxerContext?.finish()
recording = false
@@ -172,13 +155,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo)
}
val context = muxerContext
if (context == null) {
Log.e(TAG, "Cannot write sample data: muxerContext is null")
encoder.releaseOutputBuffer(index, false)
return
}
context.writeSample(encodedData, bufferInfo)
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}

View File

@@ -1,15 +0,0 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -1,176 +0,0 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val muxer: HlsMuxer
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
// Use cameraOrientation (from WindowManager) for rotation metadata
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
val orientationDegrees = cameraOrientation.toDegrees()
// Swap dimensions based on camera orientation, same as ChunkedRecordingManager
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees°")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
// Use 30fps as conservative default since many Android devices can't sustain
// higher frame rates at high resolutions. This affects:
// - Encoder: bitrate allocation and I-frame interval calculation
// - HlsMuxer: timescale for accurate sample durations
// The actual frame timing comes from camera timestamps regardless of this setting.
val effectiveFps = 30
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val muxer = HlsMuxer(
outputDirectory = outputDirectory,
callback = object : HlsMuxer.Callback {
override fun onInitSegmentReady(file: File) {
callbacks.onInitSegmentReady(file)
}
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
callbacks.onVideoChunkReady(file, index, durationUs)
}
},
orientationDegrees = orientationDegrees,
fps = effectiveFps
)
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
return FragmentedRecordingManager(codec, muxer)
}
}
private var recording = false
private var muxerStarted = false
private var trackIndex = -1
override val surface: Surface = encoder.createInputSurface()
init {
encoder.setCallback(this)
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
if (muxerStarted) {
muxer.stop()
muxer.release()
}
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
if (!muxerStarted) {
encoder.releaseOutputBuffer(index, false)
return
}
val buffer = encoder.getOutputBuffer(index)
if (buffer == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
try {
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
synchronized(this) {
Log.i(TAG, "Output format changed: $format")
trackIndex = muxer.addTrack(format)
muxer.start()
muxerStarted = true
}
}
}

View File

@@ -9,10 +9,8 @@ import android.os.Looper
import android.util.Log
import android.util.Size
import android.view.PixelCopy
import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.WindowManager
import com.facebook.react.bridge.UiThreadUtil
import com.mrousavy.camera.extensions.resize
import com.mrousavy.camera.extensions.rotatedBy
@@ -152,8 +150,6 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
val width = frame.width()
val height = frame.height()
// Create bitmap matching surface frame dimensions for PixelCopy
// The original code swapped dimensions assuming landscape input - keep that for consistency
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
// Use a coroutine to suspend until the PixelCopy request is complete
@@ -163,23 +159,7 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
bitmap,
{ copyResult ->
if (copyResult == PixelCopy.SUCCESS) {
// Get actual device rotation from WindowManager instead of relying on
// the orientation prop, which may not update on Android when rotating
// between landscape-left and landscape-right.
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val actualOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
continuation.resume(bitmap.transformBitmap(actualOrientation))
continuation.resume(rotateBitmap90CounterClockwise(bitmap))
} else {
continuation.resumeWithException(
RuntimeException("PixelCopy failed with error code $copyResult")

View File

@@ -8,14 +8,12 @@ import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.types.StreamSegmentType
import com.mrousavy.camera.utils.FileUtils
import java.io.File
import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
class RecordingSession(
context: Context,
val cameraId: String,
@@ -28,7 +26,7 @@ class RecordingSession(
private val filePath: String,
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback
private val allCallbacks: CameraSession.Callback,
) {
companion object {
private const val TAG = "RecordingSession"
@@ -36,45 +34,23 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
}
data class Video(val path: String, val durationMs: Long, val size: Size)
// Normalize path - expo-file-system passes file:// URIs but File expects raw paths
// Handle both file:// and file:/ variants
private val outputPath: File = File(filePath.replace(Regex("^file:/+"), "/"))
private val outputPath: File = File(filePath)
private val bitRate = getBitRate()
// Use FragmentedRecordingManager for HLS-compatible fMP4 output,
// or fall back to ChunkedRecordingManager for regular MP4 chunks
private val recorder: ChunkedRecorderInterface = if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
FragmentedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
}
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
private var startTime: Long? = null
val surface: Surface
get() {
@@ -83,7 +59,7 @@ class RecordingSession(
fun start() {
synchronized(this) {
Log.i(TAG, "Starting RecordingSession with ${options.streamSegmentType} recorder..")
Log.i(TAG, "Starting RecordingSession..")
startTime = System.currentTimeMillis()
recorder.start()
}

View File

@@ -9,6 +9,7 @@ import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.UiThreadUtil
// import com.facebook.react.turbomodule.core.CallInvokerHolderImpl // Commented out due to RN 0.79+ compatibility
import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.CameraView
import com.mrousavy.camera.core.ViewNotFoundError
@@ -77,9 +78,6 @@ class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
FrameProcessorPluginRegistry.getPlugin(name, this, options)
// private C++ funcs
// Frame Processors are disabled - native registration is skipped via VISION_CAMERA_ENABLE_FRAME_PROCESSORS=OFF
// This method is never called or registered, kept for reference only
// @DoNotStrip
// @Keep
// private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: Any, scheduler: VisionCameraScheduler): HybridData
// Commented out due to React Native 0.79+ API compatibility issues
// private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData
}

View File

@@ -9,7 +9,6 @@ class RecordVideoOptions(map: ReadableMap) {
var videoBitRateOverride: Double? = null
var videoBitRateMultiplier: Double? = null
var orientation: Orientation? = null
var streamSegmentType: StreamSegmentType = StreamSegmentType.FRAGMENTED_MP4
init {
if (map.hasKey("fileType")) {
@@ -30,8 +29,5 @@ class RecordVideoOptions(map: ReadableMap) {
if (map.hasKey("orientation")) {
orientation = Orientation.fromUnionValue(map.getString("orientation"))
}
if (map.hasKey("streamSegmentType")) {
streamSegmentType = StreamSegmentType.fromUnionValue(map.getString("streamSegmentType"))
}
}
}

View File

@@ -1,15 +0,0 @@
package com.mrousavy.camera.types
enum class StreamSegmentType(override val unionValue: String) : JSUnionValue {
FRAGMENTED_MP4("FRAGMENTED_MP4"),
RB_CHUNKED_MP4("RB_CHUNKED_MP4");
companion object : JSUnionValue.Companion<StreamSegmentType> {
override fun fromUnionValue(unionValue: String?): StreamSegmentType =
when (unionValue) {
"FRAGMENTED_MP4" -> FRAGMENTED_MP4
"RB_CHUNKED_MP4" -> RB_CHUNKED_MP4
else -> FRAGMENTED_MP4 // Default to fMP4
}
}
}

View File

@@ -11,6 +11,6 @@ inline fun withPromise(promise: Promise, closure: () -> Any?) {
} catch (e: Throwable) {
e.printStackTrace()
val error = if (e is CameraError) e else UnknownCameraError(e)
promise.reject("${error.domain}/${error.id}", error.message ?: "Unknown error", error.cause)
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
}
}

View File

@@ -20,9 +20,6 @@ extension CameraSession {
onError: @escaping (_ error: CameraError) -> Void) {
// Run on Camera Queue
CameraQueues.cameraQueue.async {
// Normalize path - expo-file-system passes file:// URIs but FileManager expects raw paths
let normalizedPath = filePath.hasPrefix("file://") ? String(filePath.dropFirst(7)) : filePath
let start = DispatchTime.now()
ReactLogger.log(level: .info, message: "Starting Video recording...")
@@ -41,27 +38,11 @@ extension CameraSession {
// Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else {
ReactLogger.log(level: .warning, message: "Chunk ready but delegate is nil, dropping chunk: \(chunk)")
return
}
delegate.onVideoChunkReady(chunk: chunk)
}
// Callback for when a chunk write fails (e.g. init file write failure)
let onChunkError: (Error) -> Void = { error in
ReactLogger.log(level: .error, message: "Chunk write error, stopping recording: \(error.localizedDescription)")
// Stop recording immediately
if let session = self.recordingSession {
session.stop(clock: self.captureSession.clock)
}
// Surface error to RN
if let cameraError = error as? CameraError {
onError(cameraError)
} else {
onError(.capture(.fileError))
}
}
// Callback for when the recording ends
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer {
@@ -101,23 +82,22 @@ extension CameraSession {
}
}
if !FileManager.default.fileExists(atPath: normalizedPath) {
if !FileManager.default.fileExists(atPath: filePath) {
do {
try FileManager.default.createDirectory(atPath: normalizedPath, withIntermediateDirectories: true)
try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true)
} catch {
onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription)))
return
}
}
ReactLogger.log(level: .info, message: "Will record to temporary file: \(normalizedPath)")
ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)")
do {
// Create RecordingSession for the temp file
let recordingSession = try RecordingSession(outputDiretory: normalizedPath,
let recordingSession = try RecordingSession(outputDiretory: filePath,
fileType: options.fileType,
onChunkReady: onChunkReady,
onChunkError: onChunkError,
completion: onFinish)
// Init Audio + Activate Audio Session (optional)

View File

@@ -24,14 +24,12 @@ class ChunkedRecorder: NSObject {
let outputURL: URL
let onChunkReady: ((Chunk) -> Void)
let onError: ((Error) -> Void)?
private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void), onError: ((Error) -> Void)? = nil) throws {
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
self.outputURL = outputURL
self.onChunkReady = onChunkReady
self.onError = onError
guard FileManager.default.fileExists(atPath: outputURL.path) else {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
}
@@ -58,36 +56,28 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4")
do {
try data.write(to: url)
onChunkReady(url: url, type: .initialization)
} catch {
ReactLogger.log(level: .error, message: "Failed to write init file \(url): \(error.localizedDescription)")
onError?(CameraError.capture(.fileError))
}
save(data: data, url: url)
onChunkReady(url: url, type: .initialization)
}
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name)
if save(data: data, url: url) {
let duration = report?
.trackReports
.filter { $0.mediaType == .video }
.first?
.duration
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1
}
save(data: data, url: url)
let duration = report?
.trackReports
.filter { $0.mediaType == .video }
.first?
.duration
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1
}
private func save(data: Data, url: URL) -> Bool {
private func save(data: Data, url: URL) {
do {
try data.write(to: url)
return true
} catch {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
return false
}
}

View File

@@ -74,13 +74,12 @@ class RecordingSession {
init(outputDiretory: String,
fileType: AVFileType,
onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void),
onChunkError: ((Error) -> Void)? = nil,
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
completionHandler = completion
do {
let outputURL = URL(fileURLWithPath: outputDiretory)
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady, onError: onChunkError)
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady)
assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!)
assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS

View File

@@ -41,17 +41,6 @@ export interface RecordVideoOptions {
* @default 'normal'
*/
videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number
/**
* The stream segment type for recording on Android.
* - `FRAGMENTED_MP4`: HLS-compatible segments (init.mp4 + numbered segments)
* - `RB_CHUNKED_MP4`: Legacy chunked MP4 format
*
* iOS always uses FRAGMENTED_MP4 regardless of this setting.
*
* @platform android
* @default 'FRAGMENTED_MP4'
*/
streamSegmentType?: 'FRAGMENTED_MP4' | 'RB_CHUNKED_MP4'
}
/**