Compare commits

..

1 Commits

Author SHA1 Message Date
ac06fa5f56 Bumps and fixes for react native version bump 2025-08-12 15:36:55 -06:00
16 changed files with 67 additions and 536 deletions

View File

@@ -19,7 +19,9 @@ endif()
# Add react-native-vision-camera sources
set(SOURCES
add_library(
${PACKAGE_NAME}
SHARED
# Shared C++
../cpp/MutableRawBuffer.cpp
# Java JNI
@@ -29,33 +31,17 @@ set(SOURCES
src/main/cpp/OpenGLContext.cpp
src/main/cpp/OpenGLRenderer.cpp
src/main/cpp/MutableJByteBuffer.cpp
)
# Only add Frame Processor sources if enabled
if (ENABLE_FRAME_PROCESSORS)
list(APPEND SOURCES
src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/JSIJNIConversion.cpp
src/main/cpp/frameprocessor/VisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp
src/main/cpp/frameprocessor/java-bindings/JFrame.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
)
endif()
add_library(
${PACKAGE_NAME}
SHARED
${SOURCES}
)
# Force 16KB page alignment for Android 15+ compatibility
set_target_properties(${PACKAGE_NAME} PROPERTIES
LINK_FLAGS "-Wl,-z,max-page-size=16384"
# Frame Processor
src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/JSIJNIConversion.cpp
src/main/cpp/frameprocessor/VisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp
src/main/cpp/frameprocessor/java-bindings/JFrame.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
)
# Header Search Paths (includes)
@@ -74,13 +60,13 @@ target_include_directories(
# Link everything together
target_link_libraries(
${PACKAGE_NAME}
${LOG_LIB} # <-- Logcat logger
android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI
ReactAndroid::reactnative # <-- RN: React Native JNI bindings (RN 0.76+)
fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline)
${LOG_LIB} # <-- Logcat logger
android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI
# ReactAndroid::reactnativejni # <-- Temporarily disabled for RN 0.79+ compatibility
fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline)
)
# Optionally also add Frame Processors here

View File

@@ -133,16 +133,8 @@ android {
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_17
targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = "17"
freeCompilerArgs += [
"-opt-in=kotlin.RequiresOptIn",
"-opt-in=com.facebook.react.annotations.UnstableReactNativeAPI"
]
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
externalNativeBuild {
@@ -165,7 +157,6 @@ android {
"**/libhermes-executor-debug.so",
"**/libhermes_executor.so",
"**/libreactnativejni.so",
"**/libreactnative.so",
"**/libturbomodulejsijni.so",
"**/libreact_nativemodule_core.so",
"**/libjscexecutor.so"
@@ -178,10 +169,6 @@ dependencies {
implementation "com.facebook.react:react-android:+"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3"
// Media3 muxer for fragmented MP4 (HLS-compatible) recording
implementation "androidx.media3:media3-muxer:1.5.0"
implementation "androidx.media3:media3-common:1.5.0"
if (enableCodeScanner) {
// User enabled code-scanner, so we bundle the 2.4 MB model in the app.
implementation 'com.google.mlkit:barcode-scanning:17.2.0'

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@@ -9,13 +9,11 @@
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
return facebook::jni::initialize(vm, [] {
// VideoPipeline is needed for video recording even without Frame Processors
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Frame Processor JNI bindings - only register when Frame Processors are enabled
vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives();
vision::JSharedArray::registerNatives();
#endif

View File

@@ -40,26 +40,15 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event)
}
fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
Log.e(CameraView.TAG, "invokeOnError(...):")
val event = Arguments.createMap()
event.putInt("index", index)
event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
}
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace()

View File

@@ -271,12 +271,8 @@ class CameraView(context: Context) :
invokeOnStopped()
}
override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
override fun onVideoChunkReady(filepath: File, index: Int) {
invokeOnChunkReady(filepath, index)
}
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,8 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap()
.build()
override fun getName(): String = TAG

View File

@@ -31,12 +31,10 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
init {
try {
// Load the native part of VisionCamera.
// Includes the OpenGL VideoPipeline (needed for video recording)
// Frame Processors remain disabled for RN 0.79+ compatibility
// Includes the OpenGL VideoPipeline, as well as Frame Processor JSI bindings
System.loadLibrary("VisionCamera")
Log.i(TAG, "VisionCamera native library loaded successfully")
} catch (e: UnsatisfiedLinkError) {
Log.e(TAG, "Failed to load VisionCamera C++ library!", e)
Log.e(VisionCameraProxy.TAG, "Failed to load VisionCamera C++ library!", e)
throw e
}
}
@@ -75,11 +73,15 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
@ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean {
// Frame Processors are disabled for React Native 0.79+ compatibility
Log.i(TAG, "Frame Processor bindings not installed - Frame Processors disabled for RN 0.79+ compatibility")
return false
}
fun installFrameProcessorBindings(): Boolean =
try {
val proxy = VisionCameraProxy(reactApplicationContext)
VisionCameraInstaller.install(proxy)
true
} catch (e: Error) {
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
false
}
@ReactMethod
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
@@ -155,7 +157,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
private fun canRequestPermission(permission: String): Boolean {
val activity = reactApplicationContext.currentActivity as? PermissionAwareActivity
val activity = currentActivity as? PermissionAwareActivity
return activity?.shouldShowRequestPermissionRationale(permission) ?: false
}

View File

@@ -15,7 +15,6 @@ import android.util.Log
import android.util.Size
import android.view.Surface
import android.view.SurfaceHolder
import android.view.WindowManager
import androidx.core.content.ContextCompat
import com.google.mlkit.vision.barcode.common.Barcode
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
@@ -426,21 +425,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
val fps = configuration?.fps ?: 30
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording:
// - Counter-clockwise (ROTATION_90) → 270° hint
// - Clockwise (ROTATION_270) → 90° hint
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
else -> Orientation.PORTRAIT
}
val recording = RecordingSession(
context,
cameraId,
@@ -513,8 +497,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized()
fun onStarted()
fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onVideoChunkReady(filepath: File, index: Int)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
}
}

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback(), ChunkedRecorderInterface {
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
@@ -73,7 +73,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000
override val surface: Surface = encoder.createInputSurface()
val surface: Surface = encoder.createInputSurface()
init {
if (!this.outputDirectory.exists()) {
@@ -95,9 +95,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
fun finish() {
muxer.stop()
muxer.release()
// Calculate duration from start time - this is approximate
// The new FragmentedRecordingManager provides accurate duration
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
callbacks.onVideoChunkReady(filepath, chunkIndex)
}
}
@@ -107,12 +105,6 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxerContext?.finish()
chunkIndex++
val format = this.encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create muxer: encodedFormat is null (onOutputFormatChanged not called yet)")
return
}
val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName)
Log.i(TAG, "Creating new muxer for file: $newFileName")
@@ -122,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, format, this.callbacks
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks
)
}
@@ -131,16 +123,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
}
private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
val context = muxerContext ?: return 0L
return bufferInfo.presentationTimeUs - context.startTimeUs
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs
}
override fun start() {
fun start() {
encoder.start()
recording = true
}
override fun finish() {
fun finish() {
synchronized(this) {
muxerContext?.finish()
recording = false
@@ -164,13 +155,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo)
}
val context = muxerContext
if (context == null) {
Log.e(TAG, "Cannot write sample data: muxerContext is null")
encoder.releaseOutputBuffer(index, false)
return
}
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo)
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}

View File

@@ -1,15 +0,0 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -1,332 +0,0 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import androidx.media3.common.Format
import androidx.media3.common.MimeTypes
import androidx.media3.common.util.UnstableApi
import androidx.media3.muxer.FragmentedMp4Muxer
import androidx.media3.muxer.Muxer
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.io.FileOutputStream
import java.nio.ByteBuffer
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* This produces output similar to the iOS implementation:
* - An initialization segment (init.mp4) containing codec configuration
* - Numbered data segments (0.mp4, 1.mp4, ...) containing media data
*
* Uses AndroidX Media3's FragmentedMp4Muxer which produces proper fMP4 output.
*/
@UnstableApi
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val outputDirectory: File,
private val orientationDegrees: Int,
private val targetSegmentDurationUs: Long,
private val callbacks: CameraSession.Callback
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = 6
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
// I-frame interval affects segment boundaries
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return FragmentedRecordingManager(
codec,
outputDirectory,
recordingOrientationDegrees,
segmentDurationSeconds * 1_000_000L,
callbacks
)
}
}
// State management
private var chunkIndex = 0
private var encodedFormat: MediaFormat? = null
private var recording = false
// Segment tracking
private var segmentContext: SegmentContext? = null
private var initSegmentEmitted = false
override val surface: Surface = encoder.createInputSurface()
init {
if (!outputDirectory.exists()) {
outputDirectory.mkdirs()
}
encoder.setCallback(this)
}
/**
* Context for a single data segment being written.
* Init segments are created separately via createInitSegment().
*/
private inner class SegmentContext(
private val format: MediaFormat,
private val segmentIndex: Int
) {
private val filename = "$segmentIndex.mp4"
private val file = File(outputDirectory, filename)
private val outputStream = FileOutputStream(file)
private val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
private lateinit var videoTrack: Muxer.TrackToken
private var startTimeUs: Long = -1L
private var lastTimeUs: Long = 0L
private var sampleCount = 0
init {
val media3Format = convertToMedia3Format(format)
videoTrack = muxer.addTrack(media3Format)
Log.d(TAG, "Created segment context: $filename")
}
fun writeSample(buffer: ByteBuffer, bufferInfo: BufferInfo): Boolean {
if (startTimeUs < 0) {
startTimeUs = bufferInfo.presentationTimeUs
}
lastTimeUs = bufferInfo.presentationTimeUs
val isKeyFrame = (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
sampleCount++
// Check if we should start a new segment at the next keyframe
if (isKeyFrame && sampleCount > 1) {
val segmentDurationUs = bufferInfo.presentationTimeUs - startTimeUs
if (segmentDurationUs >= targetSegmentDurationUs) {
return true // Signal to create new segment
}
}
return false
}
fun finish(): Long {
try {
muxer.close()
outputStream.close()
} catch (e: Exception) {
Log.e(TAG, "Error closing segment", e)
}
val durationUs = if (lastTimeUs > startTimeUs) lastTimeUs - startTimeUs else 0L
callbacks.onVideoChunkReady(file, segmentIndex, durationUs)
Log.d(TAG, "Finished segment: $filename, samples=$sampleCount, duration=${durationUs/1000}ms")
return durationUs
}
}
private fun createNewSegment() {
val format = encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create segment: encodedFormat is null")
return
}
// Close previous segment
segmentContext?.finish()
// Create new data segment (init segments are created separately)
segmentContext = SegmentContext(format, chunkIndex)
chunkIndex++
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
segmentContext?.finish()
segmentContext = null
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
val encodedData = encoder.getOutputBuffer(index)
if (encodedData == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
// Wait until init segment is emitted (happens in onOutputFormatChanged)
if (!initSegmentEmitted) {
encoder.releaseOutputBuffer(index, false)
return
}
// Create first data segment if needed
if (segmentContext == null) {
createNewSegment()
}
val context = segmentContext
if (context == null) {
encoder.releaseOutputBuffer(index, false)
return
}
try {
val shouldStartNewSegment = context.writeSample(encodedData, bufferInfo)
if (shouldStartNewSegment) {
createNewSegment()
// Write this keyframe to the new segment as well
segmentContext?.writeSample(encodedData, bufferInfo)
}
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
Log.i(TAG, "Output format changed: $format")
encodedFormat = format
// Create the init segment immediately when we get the format
// This produces an fMP4 file with just ftyp + moov (no samples)
if (!initSegmentEmitted) {
createInitSegment(format)
initSegmentEmitted = true
}
}
/**
* Creates an initialization segment containing only codec configuration (ftyp + moov).
* This is done by creating a muxer, adding the track, and immediately closing it
* without writing any samples.
*/
private fun createInitSegment(format: MediaFormat) {
val initFile = File(outputDirectory, "init.mp4")
try {
val outputStream = FileOutputStream(initFile)
val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
// Convert and add the track
val media3Format = convertToMedia3Format(format)
muxer.addTrack(media3Format)
// Close immediately - this writes just the header (ftyp + moov)
muxer.close()
outputStream.close()
Log.d(TAG, "Created init segment: ${initFile.absolutePath}")
callbacks.onInitSegmentReady(initFile)
} catch (e: Exception) {
Log.e(TAG, "Error creating init segment", e)
}
}
private fun convertToMedia3Format(mediaFormat: MediaFormat): Format {
val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME) ?: MimeTypes.VIDEO_H264
val width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH)
val height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)
val bitRate = try { mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE) } catch (e: Exception) { -1 }
val frameRate = try { mediaFormat.getInteger(MediaFormat.KEY_FRAME_RATE) } catch (e: Exception) { -1 }
// Get CSD (Codec Specific Data) if available - required for init segment
val csd0 = mediaFormat.getByteBuffer("csd-0")
val csd1 = mediaFormat.getByteBuffer("csd-1")
val initData = mutableListOf<ByteArray>()
csd0?.let {
val bytes = ByteArray(it.remaining())
it.duplicate().get(bytes)
initData.add(bytes)
}
csd1?.let {
val bytes = ByteArray(it.remaining())
it.duplicate().get(bytes)
initData.add(bytes)
}
return Format.Builder()
.setSampleMimeType(mimeType)
.setWidth(width)
.setHeight(height)
.setRotationDegrees(orientationDegrees)
.apply {
if (bitRate > 0) setAverageBitrate(bitRate)
if (frameRate > 0) setFrameRate(frameRate.toFloat())
if (initData.isNotEmpty()) setInitializationData(initData)
}
.build()
}
}

View File

@@ -9,10 +9,8 @@ import android.os.Looper
import android.util.Log
import android.util.Size
import android.view.PixelCopy
import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.WindowManager
import com.facebook.react.bridge.UiThreadUtil
import com.mrousavy.camera.extensions.resize
import com.mrousavy.camera.extensions.rotatedBy
@@ -152,8 +150,6 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
val width = frame.width()
val height = frame.height()
// Create bitmap matching surface frame dimensions for PixelCopy
// The original code swapped dimensions assuming landscape input - keep that for consistency
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
// Use a coroutine to suspend until the PixelCopy request is complete
@@ -163,23 +159,7 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
bitmap,
{ copyResult ->
if (copyResult == PixelCopy.SUCCESS) {
// Get actual device rotation from WindowManager instead of relying on
// the orientation prop, which may not update on Android when rotating
// between landscape-left and landscape-right.
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val actualOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
continuation.resume(bitmap.transformBitmap(actualOrientation))
continuation.resume(rotateBitmap90CounterClockwise(bitmap))
} else {
continuation.resumeWithException(
RuntimeException("PixelCopy failed with error code $copyResult")

View File

@@ -4,7 +4,6 @@ import android.content.Context
import android.util.Log
import android.util.Size
import android.view.Surface
import androidx.media3.common.util.UnstableApi
import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation
@@ -15,8 +14,6 @@ import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
@UnstableApi
class RecordingSession(
context: Context,
val cameraId: String,
@@ -30,8 +27,6 @@ class RecordingSession(
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback,
// Use the new FragmentedMp4Muxer-based recorder for HLS-compatible output
private val useFragmentedMp4: Boolean = true
) {
companion object {
private const val TAG = "RecordingSession"
@@ -39,9 +34,6 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
}
data class Video(val path: String, val durationMs: Long, val size: Size)
@@ -49,33 +41,16 @@ class RecordingSession(
private val outputPath: File = File(filePath)
private val bitRate = getBitRate()
// Use FragmentedRecordingManager for HLS-compatible fMP4 output,
// or fall back to ChunkedRecordingManager for regular MP4 chunks
private val recorder: ChunkedRecorderInterface = if (useFragmentedMp4) {
FragmentedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
}
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
private var startTime: Long? = null
val surface: Surface
get() {

View File

@@ -9,6 +9,7 @@ import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.UiThreadUtil
// import com.facebook.react.turbomodule.core.CallInvokerHolderImpl // Commented out due to RN 0.79+ compatibility
import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.CameraView
import com.mrousavy.camera.core.ViewNotFoundError
@@ -77,9 +78,6 @@ class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
FrameProcessorPluginRegistry.getPlugin(name, this, options)
// private C++ funcs
// Frame Processors are disabled - native registration is skipped via VISION_CAMERA_ENABLE_FRAME_PROCESSORS=OFF
// This method is never called or registered, kept for reference only
// @DoNotStrip
// @Keep
// private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: Any, scheduler: VisionCameraScheduler): HybridData
// Commented out due to React Native 0.79+ API compatibility issues
// private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData
}

View File

@@ -11,6 +11,6 @@ inline fun withPromise(promise: Promise, closure: () -> Any?) {
} catch (e: Throwable) {
e.printStackTrace()
val error = if (e is CameraError) e else UnknownCameraError(e)
promise.reject("${error.domain}/${error.id}", error.message ?: "Unknown error", error.cause)
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
}
}