Compare commits

..

24 Commits

Author SHA1 Message Date
e60c1a4eb1 Write our own muxer to make hls uupload actually work 2025-12-21 16:45:04 -08:00
a2d218580c feat: Add fragmented MP4 (fMP4) support for Android
Implements HLS-compatible fragmented MP4 recording on Android using
AndroidX Media3 FragmentedMp4Muxer, matching the iOS implementation.

Changes:
- Add FragmentedRecordingManager for fMP4 segment output
- Add ChunkedRecorderInterface to abstract recorder implementations
- Add onInitSegmentReady callback for init segment (init.mp4)
- Update onVideoChunkReady to include segment duration
- RecordingSession now uses FragmentedRecordingManager by default

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-18 12:29:03 -08:00
61863149c0 flip orientation in camera session 2025-12-11 14:17:34 -08:00
09b50938d2 get orientation change from WindowManager for android 2025-12-11 13:02:00 -08:00
a158ed8350 Merge pull request 'Bump react native w/ api 35 compatability' (#10) from bump-react-native-with-api35 into main
Reviewed-on: #10
2025-12-11 18:04:51 +00:00
Dean
e7b295546a fix: Add null safety checks in ChunkedRecordingManager
Replace !! operators with proper null checks to prevent
NullPointerExceptions when encodedFormat or muxerContext are null.
This can happen if createNextMuxer is called before
onOutputFormatChanged sets the format.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-11 10:04:18 -08:00
Dean
d87ed8ced2 fix: Handle null error message in promise rejection
Prevents crash when an exception with null message is caught and rejected
through the React Native bridge.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-11 09:48:32 -08:00
f055119735 respect frame processor flag when compiling and force 16kb page alignment 2025-12-01 11:24:24 -07:00
35d80b13d6 disable frame processor jni bindings, preserve video pipeline registration 2025-11-24 10:30:33 -08:00
3d09106e45 skip native library loading for frame processors, wip: failing to launch app 2025-11-17 18:58:13 -08:00
b523e1884f Fix React Native 0.79 Kotlin compilation errors
- Fix currentActivity reference in CameraViewModule
- Convert Map to MutableMap in CameraViewManager for RN 0.79 compatibility
2025-11-17 13:38:30 -08:00
5fcc1a4f77 Bumps and fixes for react native version bump 2025-11-17 13:35:59 -08:00
364171a107 Update Java/Kotlin versions and add opt-in flags for RN 0.79 2025-11-17 13:35:25 -08:00
f90e11897f Fix CMake target for React Native 0.79 compatibility 2025-11-17 11:45:07 -08:00
4798aad464 Merge pull request 'fix/android-api-35-bitmap-config' (#9) from fix/android-api-35-bitmap-config into main
Reviewed-on: #9
2025-10-29 03:22:49 +00:00
Dean
2c8d503e66 Fix Bitmap.Config null-safety for Android API 35 2025-10-28 13:54:54 -07:00
5b52acda26 Gross hack to make things sort of work 2024-11-10 17:51:15 -07:00
17f675657e WIP 2024-11-09 19:52:05 -07:00
c64516693c Merge pull request 'Fix Preview View Aspect Ratio Orientation Issues in android' (#8) from ivan/fix-android-preview-view-aspect-ratio-orientation-issues into main
Reviewed-on: #8
2024-10-12 16:21:18 -06:00
e9f08ef488 Fix Preview View Aspect Ratio Orientation Issues in android 2024-10-12 16:20:23 -06:00
bf122db919 Merge pull request 'Ensure custom exposure mode is supported' (#7) from ivan/ensure-capture-mode-is-supported into main
Reviewed-on: #7
2024-10-10 15:18:12 -06:00
3319e48f7d Ensure custom exposure mode is supported 2024-10-10 15:17:55 -06:00
58714f9dac Merge pull request 'iOS Camera Settings' (#6) from volodymyr/ios-camera-settings into main
Reviewed-on: #6
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2024-10-10 15:12:32 -06:00
8991779851 iOS Camera Settings 2024-10-08 15:53:47 +02:00
32 changed files with 3868 additions and 3279 deletions

View File

@@ -1,5 +0,0 @@
use flake . --impure
if [ -f .envrc.local ]; then
source .envrc.local
fi

View File

@@ -19,9 +19,7 @@ endif()
# Add react-native-vision-camera sources # Add react-native-vision-camera sources
add_library( set(SOURCES
${PACKAGE_NAME}
SHARED
# Shared C++ # Shared C++
../cpp/MutableRawBuffer.cpp ../cpp/MutableRawBuffer.cpp
# Java JNI # Java JNI
@@ -31,17 +29,33 @@ add_library(
src/main/cpp/OpenGLContext.cpp src/main/cpp/OpenGLContext.cpp
src/main/cpp/OpenGLRenderer.cpp src/main/cpp/OpenGLRenderer.cpp
src/main/cpp/MutableJByteBuffer.cpp src/main/cpp/MutableJByteBuffer.cpp
# Frame Processor )
src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp # Only add Frame Processor sources if enabled
src/main/cpp/frameprocessor/JSIJNIConversion.cpp if (ENABLE_FRAME_PROCESSORS)
src/main/cpp/frameprocessor/VisionCameraProxy.cpp list(APPEND SOURCES
src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/java-bindings/JFrame.cpp src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp src/main/cpp/frameprocessor/JSIJNIConversion.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp src/main/cpp/frameprocessor/VisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp src/main/cpp/frameprocessor/java-bindings/JSharedArray.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp src/main/cpp/frameprocessor/java-bindings/JFrame.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessor.cpp
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
)
endif()
add_library(
${PACKAGE_NAME}
SHARED
${SOURCES}
)
# Force 16KB page alignment for Android 15+ compatibility
set_target_properties(${PACKAGE_NAME} PROPERTIES
LINK_FLAGS "-Wl,-z,max-page-size=16384"
) )
# Header Search Paths (includes) # Header Search Paths (includes)
@@ -60,13 +74,13 @@ target_include_directories(
# Link everything together # Link everything together
target_link_libraries( target_link_libraries(
${PACKAGE_NAME} ${PACKAGE_NAME}
${LOG_LIB} # <-- Logcat logger ${LOG_LIB} # <-- Logcat logger
android # <-- Android JNI core android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI ReactAndroid::jsi # <-- RN: JSI
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings ReactAndroid::reactnative # <-- RN: React Native JNI bindings (RN 0.76+)
fbjni::fbjni # <-- fbjni fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline) GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline) EGL # <-- OpenGL (EGL) (for VideoPipeline)
) )
# Optionally also add Frame Processors here # Optionally also add Frame Processors here

View File

@@ -133,8 +133,16 @@ android {
} }
compileOptions { compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8 sourceCompatibility JavaVersion.VERSION_17
targetCompatibility JavaVersion.VERSION_1_8 targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = "17"
freeCompilerArgs += [
"-opt-in=kotlin.RequiresOptIn",
"-opt-in=com.facebook.react.annotations.UnstableReactNativeAPI"
]
} }
externalNativeBuild { externalNativeBuild {
@@ -157,6 +165,7 @@ android {
"**/libhermes-executor-debug.so", "**/libhermes-executor-debug.so",
"**/libhermes_executor.so", "**/libhermes_executor.so",
"**/libreactnativejni.so", "**/libreactnativejni.so",
"**/libreactnative.so",
"**/libturbomodulejsijni.so", "**/libturbomodulejsijni.so",
"**/libreact_nativemodule_core.so", "**/libreact_nativemodule_core.so",
"**/libjscexecutor.so" "**/libjscexecutor.so"

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-all.zip
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists

View File

@@ -9,11 +9,13 @@
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) { JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
return facebook::jni::initialize(vm, [] { return facebook::jni::initialize(vm, [] {
// VideoPipeline is needed for video recording even without Frame Processors
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Frame Processor JNI bindings - only register when Frame Processors are enabled
vision::VisionCameraInstaller::registerNatives(); vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives(); vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives(); vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives(); vision::JFrameProcessor::registerNatives();
vision::JSharedArray::registerNatives(); vision::JSharedArray::registerNatives();
#endif #endif

View File

@@ -40,15 +40,26 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event) this.sendEvent(event)
} }
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) { fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
val event = Arguments.createMap() val event = Arguments.createMap()
event.putInt("index", index) event.putInt("index", index)
event.putString("filepath", filepath.toString()) event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event) reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
} }
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) { fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):") Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace() error.printStackTrace()

View File

@@ -13,69 +13,36 @@ import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.WritableMap import com.facebook.react.bridge.WritableMap
import com.mrousavy.camera.core.CameraSession import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.core.InsufficientStorageError import com.mrousavy.camera.core.InsufficientStorageError
import com.mrousavy.camera.utils.FileUtils
import com.mrousavy.camera.types.Flash import com.mrousavy.camera.types.Flash
import com.mrousavy.camera.types.QualityPrioritization
import com.mrousavy.camera.utils.* import com.mrousavy.camera.utils.*
import java.io.File import java.io.File
import java.io.FileOutputStream import java.io.FileOutputStream
import java.io.IOException import java.io.IOException
import kotlinx.coroutines.* import kotlinx.coroutines.*
private const val TAG = "CameraView.takePhoto" private const val TAG = "CameraView.takeSnapshot"
@SuppressLint("UnsafeOptInUsageError") @SuppressLint("UnsafeOptInUsageError")
suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap { suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap {
val options = optionsMap.toHashMap() val options = optionsMap.toHashMap()
Log.i(TAG, "Taking photo... Options: $options") Log.i(TAG, "Taking snapshot... Options: $options")
val bitmap = previewView.getBitmap() ?: throw Error()
val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced" val file = FileUtils.createTempFile(context, "png");
val flash = options["flash"] as? String ?: "off"
val enableAutoStabilization = options["enableAutoStabilization"] == true
val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true
val enablePrecapture = options["enablePrecapture"] as? Boolean ?: false
// TODO: Implement Red Eye Reduction // Write snapshot to .jpg file
options["enableAutoRedEyeReduction"] FileUtils.writeBitmapTofile(bitmap, file, 100)
val flashMode = Flash.fromUnionValue(flash) Log.i(TAG, "Successfully saved snapshot to file!")
val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization)
val photo = cameraSession.takePhoto( // Parse output data
qualityPrioritizationMode, val map = Arguments.createMap()
flashMode, map.putString("path", file.absolutePath)
enableShutterSound, map.putInt("width", bitmap.width)
enableAutoStabilization, map.putInt("height", bitmap.height)
enablePrecapture, map.putBoolean("isMirrored", false)
orientation return map
)
photo.use {
Log.i(TAG, "Successfully captured ${photo.image.width} x ${photo.image.height} photo!")
val cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId!!)
val path = try {
savePhotoToFile(context, cameraCharacteristics, photo)
} catch (e: IOException) {
if (e.message?.contains("no space left", true) == true) {
throw InsufficientStorageError()
} else {
throw e
}
}
Log.i(TAG, "Successfully saved photo to file! $path")
val map = Arguments.createMap()
map.putString("path", path)
map.putInt("width", photo.image.width)
map.putInt("height", photo.image.height)
map.putString("orientation", photo.orientation.unionValue)
map.putBoolean("isRawPhoto", photo.format == ImageFormat.RAW_SENSOR)
map.putBoolean("isMirrored", photo.isMirrored)
return map
}
} }
private fun writePhotoToFile(photo: CameraSession.CapturedPhoto, file: File) { private fun writePhotoToFile(photo: CameraSession.CapturedPhoto, file: File) {

View File

@@ -102,7 +102,7 @@ class CameraView(context: Context) :
// session // session
internal val cameraSession: CameraSession internal val cameraSession: CameraSession
private val previewView: PreviewView val previewView: PreviewView
private var currentConfigureCall: Long = System.currentTimeMillis() private var currentConfigureCall: Long = System.currentTimeMillis()
internal var frameProcessor: FrameProcessor? = null internal var frameProcessor: FrameProcessor? = null
@@ -271,8 +271,12 @@ class CameraView(context: Context) :
invokeOnStopped() invokeOnStopped()
} }
override fun onVideoChunkReady(filepath: File, index: Int) { override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index) invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
} }
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) { override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,7 +32,8 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError")) .put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned")) .put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady")) .put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.build() .put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap()
override fun getName(): String = TAG override fun getName(): String = TAG

View File

@@ -31,10 +31,12 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
init { init {
try { try {
// Load the native part of VisionCamera. // Load the native part of VisionCamera.
// Includes the OpenGL VideoPipeline, as well as Frame Processor JSI bindings // Includes the OpenGL VideoPipeline (needed for video recording)
// Frame Processors remain disabled for RN 0.79+ compatibility
System.loadLibrary("VisionCamera") System.loadLibrary("VisionCamera")
Log.i(TAG, "VisionCamera native library loaded successfully")
} catch (e: UnsatisfiedLinkError) { } catch (e: UnsatisfiedLinkError) {
Log.e(VisionCameraProxy.TAG, "Failed to load VisionCamera C++ library!", e) Log.e(TAG, "Failed to load VisionCamera C++ library!", e)
throw e throw e
} }
} }
@@ -73,15 +75,11 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
} }
@ReactMethod(isBlockingSynchronousMethod = true) @ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean = fun installFrameProcessorBindings(): Boolean {
try { // Frame Processors are disabled for React Native 0.79+ compatibility
val proxy = VisionCameraProxy(reactApplicationContext) Log.i(TAG, "Frame Processor bindings not installed - Frame Processors disabled for RN 0.79+ compatibility")
VisionCameraInstaller.install(proxy) return false
true }
} catch (e: Error) {
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
false
}
@ReactMethod @ReactMethod
fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) { fun takePhoto(viewTag: Int, options: ReadableMap, promise: Promise) {
@@ -157,7 +155,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
} }
private fun canRequestPermission(permission: String): Boolean { private fun canRequestPermission(permission: String): Boolean {
val activity = currentActivity as? PermissionAwareActivity val activity = reactApplicationContext.currentActivity as? PermissionAwareActivity
return activity?.shouldShowRequestPermissionRationale(permission) ?: false return activity?.shouldShowRequestPermissionRationale(permission) ?: false
} }

View File

@@ -15,6 +15,7 @@ import android.util.Log
import android.util.Size import android.util.Size
import android.view.Surface import android.view.Surface
import android.view.SurfaceHolder import android.view.SurfaceHolder
import android.view.WindowManager
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import com.google.mlkit.vision.barcode.common.Barcode import com.google.mlkit.vision.barcode.common.Barcode
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
@@ -425,6 +426,21 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
val fps = configuration?.fps ?: 30 val fps = configuration?.fps ?: 30
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording:
// - Counter-clockwise (ROTATION_90) → 270° hint
// - Clockwise (ROTATION_270) → 90° hint
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
else -> Orientation.PORTRAIT
}
val recording = RecordingSession( val recording = RecordingSession(
context, context,
cameraId, cameraId,
@@ -497,7 +513,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized() fun onInitialized()
fun onStarted() fun onStarted()
fun onStopped() fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int) fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
} }
} }

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) : class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback() { MediaCodec.Callback(), ChunkedRecorderInterface {
companion object { companion object {
private const val TAG = "ChunkedRecorder" private const val TAG = "ChunkedRecorder"
@@ -73,7 +73,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000 private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface() override val surface: Surface = encoder.createInputSurface()
init { init {
if (!this.outputDirectory.exists()) { if (!this.outputDirectory.exists()) {
@@ -95,7 +95,9 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
fun finish() { fun finish() {
muxer.stop() muxer.stop()
muxer.release() muxer.release()
callbacks.onVideoChunkReady(filepath, chunkIndex) // Calculate duration from start time - this is approximate
// The new FragmentedRecordingManager provides accurate duration
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
} }
} }
@@ -105,6 +107,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxerContext?.finish() muxerContext?.finish()
chunkIndex++ chunkIndex++
val format = this.encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create muxer: encodedFormat is null (onOutputFormatChanged not called yet)")
return
}
val newFileName = "$chunkIndex.mp4" val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName) val newOutputFile = File(this.outputDirectory, newFileName)
Log.i(TAG, "Creating new muxer for file: $newFileName") Log.i(TAG, "Creating new muxer for file: $newFileName")
@@ -114,7 +122,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
) )
muxer.setOrientationHint(orientationHint) muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext( muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, format, this.callbacks
) )
} }
@@ -123,15 +131,16 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
} }
private fun chunkLengthUs(bufferInfo: BufferInfo): Long { private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs val context = muxerContext ?: return 0L
return bufferInfo.presentationTimeUs - context.startTimeUs
} }
fun start() { override fun start() {
encoder.start() encoder.start()
recording = true recording = true
} }
fun finish() { override fun finish() {
synchronized(this) { synchronized(this) {
muxerContext?.finish() muxerContext?.finish()
recording = false recording = false
@@ -155,7 +164,13 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) { if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo) this.createNextMuxer(bufferInfo)
} }
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo) val context = muxerContext
if (context == null) {
Log.e(TAG, "Cannot write sample data: muxerContext is null")
encoder.releaseOutputBuffer(index, false)
return
}
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
} }
} }

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -0,0 +1,174 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* Uses HlsMuxer (following Android's MediaMuxer pattern) to produce:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val muxer: HlsMuxer
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
private const val DEFAULT_SEGMENT_DURATION_SECONDS = 6
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
// Use size dimensions directly - the encoder output format will have the actual dimensions
// Don't swap based on orientation here; the camera pipeline handles that
val width = size.width
val height = size.height
Log.d(TAG, "Input size: ${size.width}x${size.height}, " +
"cameraOrientation: $cameraOrientation ($cameraOrientationDegrees°), " +
"recordingOrientation: $recordingOrientationDegrees°")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
val effectiveFps = fps ?: 30
format.setInteger(MediaFormat.KEY_FRAME_RATE, effectiveFps)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, orientation: $recordingOrientationDegrees")
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
// Create muxer with callbacks and orientation
val muxer = HlsMuxer(
outputDirectory = outputDirectory,
callback = object : HlsMuxer.Callback {
override fun onInitSegmentReady(file: File) {
callbacks.onInitSegmentReady(file)
}
override fun onMediaSegmentReady(file: File, index: Int, durationUs: Long) {
callbacks.onVideoChunkReady(file, index, durationUs)
}
},
orientationDegrees = recordingOrientationDegrees
)
muxer.setSegmentDuration(segmentDurationSeconds * 1_000_000L)
Log.d(TAG, "Created HlsMuxer with orientation: $recordingOrientationDegrees degrees")
return FragmentedRecordingManager(codec, muxer)
}
}
private var recording = false
private var muxerStarted = false
private var trackIndex = -1
override val surface: Surface = encoder.createInputSurface()
init {
encoder.setCallback(this)
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
if (muxerStarted) {
muxer.stop()
muxer.release()
}
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
if (!muxerStarted) {
encoder.releaseOutputBuffer(index, false)
return
}
val buffer = encoder.getOutputBuffer(index)
if (buffer == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
try {
muxer.writeSampleData(trackIndex, buffer, bufferInfo)
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
synchronized(this) {
Log.i(TAG, "Output format changed: $format")
trackIndex = muxer.addTrack(format)
muxer.start()
muxerStarted = true
}
}
}

View File

@@ -0,0 +1,857 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaFormat
import android.util.Log
import java.io.ByteArrayOutputStream
import java.io.DataOutputStream
import java.io.File
import java.io.FileOutputStream
import java.nio.ByteBuffer
/**
* A muxer for creating HLS-compatible fragmented MP4 output.
*
* Follows the same pattern as Android's MediaMuxer:
* 1. Create muxer with output directory
* 2. addTrack() with MediaFormat
* 3. start() - writes init.mp4
* 4. writeSampleData() for each encoded sample
* 5. stop() - finalizes last segment
* 6. release() - cleanup
*
* Produces:
* - init.mp4: Initialization segment (ftyp + moov with mvex)
* - 0.mp4, 1.mp4, ...: Media segments (moof + mdat)
*/
class HlsMuxer(
private val outputDirectory: File,
private val callback: Callback,
private val orientationDegrees: Int = 0
) {
companion object {
private const val TAG = "HlsMuxer"
private const val DEFAULT_SEGMENT_DURATION_US = 6_000_000L // 6 seconds
}
interface Callback {
fun onInitSegmentReady(file: File)
fun onMediaSegmentReady(file: File, index: Int, durationUs: Long)
}
// Configuration
private var targetSegmentDurationUs: Long = DEFAULT_SEGMENT_DURATION_US
private var timescale: Int = 30000 // Default, updated from format
// State
private var state = State.UNINITIALIZED
private var trackFormat: MediaFormat? = null
private var sequenceNumber = 1
private var segmentIndex = 0
// Current segment data
private val pendingSamples = mutableListOf<Sample>()
private var segmentStartTimeUs = -1L
private var lastPresentationTimeUs = 0L
private enum class State {
UNINITIALIZED,
INITIALIZED,
STARTED,
STOPPED,
RELEASED
}
private data class Sample(
val data: ByteArray,
val presentationTimeUs: Long,
var durationUs: Long,
val isKeyFrame: Boolean
)
// ==================== Annex-B to AVCC Conversion ====================
/**
* Converts H.264 data from Annex-B format to AVCC format.
*
* Annex-B uses start codes (00 00 00 01 or 00 00 01) to delimit NAL units.
* AVCC uses 4-byte big-endian length prefixes before each NAL unit.
*
* This conversion is required because:
* - MediaCodec outputs Annex-B format
* - fMP4/HLS requires AVCC format (as specified in avcC box with NAL length size = 4)
*/
private fun convertAnnexBToAvcc(annexBData: ByteArray): ByteArray {
val nalUnits = parseAnnexBNalUnits(annexBData)
if (nalUnits.isEmpty()) {
Log.w(TAG, "No NAL units found in sample, returning original data")
return annexBData
}
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
for (nalUnit in nalUnits) {
// Write 4-byte big-endian length prefix
dos.writeInt(nalUnit.size)
// Write NAL unit data (without start code)
dos.write(nalUnit)
}
return output.toByteArray()
}
/**
* Parses Annex-B formatted data into individual NAL units.
* Returns list of NAL unit byte arrays (without start codes).
*/
private fun parseAnnexBNalUnits(data: ByteArray): List<ByteArray> {
val nalUnits = mutableListOf<ByteArray>()
var i = 0
while (i < data.size) {
// Find start code
val startCodeLength = findStartCode(data, i)
if (startCodeLength == 0) {
// No start code found at current position
// This might happen if data doesn't start with a start code
if (nalUnits.isEmpty() && i == 0) {
// Data might already be in AVCC format or malformed
// Try to detect AVCC format (first 4 bytes would be a reasonable length)
if (data.size >= 4) {
val possibleLength = ((data[0].toInt() and 0xFF) shl 24) or
((data[1].toInt() and 0xFF) shl 16) or
((data[2].toInt() and 0xFF) shl 8) or
(data[3].toInt() and 0xFF)
if (possibleLength > 0 && possibleLength <= data.size - 4) {
// Looks like AVCC format already, return original
Log.d(TAG, "Data appears to already be in AVCC format")
return emptyList()
}
}
}
i++
continue
}
val nalStart = i + startCodeLength
// Find end of this NAL unit (start of next, or end of data)
var nalEnd = data.size
var j = nalStart
while (j < data.size - 2) {
val nextStartCode = findStartCode(data, j)
if (nextStartCode > 0) {
nalEnd = j
break
}
j++
}
if (nalEnd > nalStart) {
nalUnits.add(data.copyOfRange(nalStart, nalEnd))
}
i = nalEnd
}
return nalUnits
}
/**
* Checks for Annex-B start code at given position.
* Returns start code length (3 or 4) or 0 if no start code found.
*/
private fun findStartCode(data: ByteArray, offset: Int): Int {
if (offset + 4 <= data.size &&
data[offset] == 0.toByte() &&
data[offset + 1] == 0.toByte() &&
data[offset + 2] == 0.toByte() &&
data[offset + 3] == 1.toByte()) {
return 4 // 4-byte start code: 00 00 00 01
}
if (offset + 3 <= data.size &&
data[offset] == 0.toByte() &&
data[offset + 1] == 0.toByte() &&
data[offset + 2] == 1.toByte()) {
return 3 // 3-byte start code: 00 00 01
}
return 0
}
/**
* Sets the target segment duration.
* Must be called before start().
*/
fun setSegmentDuration(durationUs: Long) {
check(state == State.UNINITIALIZED || state == State.INITIALIZED) {
"Cannot set segment duration after start()"
}
targetSegmentDurationUs = durationUs
}
/**
* Adds a track to the muxer.
*
* @param format The MediaFormat describing the track
* @return Track index (always 0 for now, single video track)
*/
fun addTrack(format: MediaFormat): Int {
check(state == State.UNINITIALIZED) { "addTrack() must be called before start()" }
trackFormat = format
// Extract timescale from frame rate
val fps = try {
format.getInteger(MediaFormat.KEY_FRAME_RATE)
} catch (e: Exception) {
30
}
timescale = fps * 1000 // Use fps * 1000 for good precision
state = State.INITIALIZED
val formatWidth = try { format.getInteger(MediaFormat.KEY_WIDTH) } catch (e: Exception) { -1 }
val formatHeight = try { format.getInteger(MediaFormat.KEY_HEIGHT) } catch (e: Exception) { -1 }
Log.d(TAG, "Added track: ${format.getString(MediaFormat.KEY_MIME)}, " +
"encoder output: ${formatWidth}x${formatHeight}, " +
"timescale=$timescale, orientation=$orientationDegrees°")
return 0 // Single track, index 0
}
/**
* Starts the muxer, writing the initialization segment.
*/
fun start() {
check(state == State.INITIALIZED) { "Must call addTrack() before start()" }
val format = trackFormat ?: throw IllegalStateException("No track format")
// Create output directory if needed
if (!outputDirectory.exists()) {
outputDirectory.mkdirs()
}
// Write init segment
val initBytes = buildInitSegment(format)
val initFile = File(outputDirectory, "init.mp4")
FileOutputStream(initFile).use { it.write(initBytes) }
Log.d(TAG, "Created init segment: ${initFile.absolutePath} (${initBytes.size} bytes)")
callback.onInitSegmentReady(initFile)
state = State.STARTED
}
/**
* Writes sample data to the muxer.
*
* @param trackIndex Track index (must be 0)
* @param buffer The encoded sample data
* @param bufferInfo Sample metadata (size, presentation time, flags)
*/
fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) {
check(state == State.STARTED) { "Must call start() before writeSampleData()" }
check(trackIndex == 0) { "Invalid track index: $trackIndex" }
// Skip codec config data (already in init segment)
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
return
}
val isKeyFrame = (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
val presentationTimeUs = bufferInfo.presentationTimeUs
// Initialize segment start time
if (segmentStartTimeUs < 0) {
segmentStartTimeUs = presentationTimeUs
}
// Check if we should finalize current segment (at keyframe boundaries)
if (isKeyFrame && pendingSamples.isNotEmpty()) {
val segmentDurationUs = presentationTimeUs - segmentStartTimeUs
if (segmentDurationUs >= targetSegmentDurationUs) {
finalizeCurrentSegment()
segmentStartTimeUs = presentationTimeUs
}
}
// Copy buffer data and convert from Annex-B to AVCC format
val rawData = ByteArray(bufferInfo.size)
buffer.position(bufferInfo.offset)
buffer.limit(bufferInfo.offset + bufferInfo.size)
buffer.get(rawData)
// Convert Annex-B (start codes) to AVCC (length prefixes)
val data = convertAnnexBToAvcc(rawData)
// Update duration of previous sample
if (pendingSamples.isNotEmpty()) {
val lastSample = pendingSamples.last()
lastSample.durationUs = presentationTimeUs - lastSample.presentationTimeUs
}
// Estimate duration (will be corrected by next sample)
val estimatedDurationUs = if (lastPresentationTimeUs > 0) {
presentationTimeUs - lastPresentationTimeUs
} else {
1_000_000L / 30 // Assume 30fps
}
pendingSamples.add(Sample(
data = data,
presentationTimeUs = presentationTimeUs,
durationUs = estimatedDurationUs,
isKeyFrame = isKeyFrame
))
lastPresentationTimeUs = presentationTimeUs
}
/**
* Stops the muxer, finalizing any pending segment.
*/
fun stop() {
check(state == State.STARTED) { "Muxer not started" }
if (pendingSamples.isNotEmpty()) {
finalizeCurrentSegment()
}
state = State.STOPPED
Log.d(TAG, "Muxer stopped, wrote $segmentIndex segments")
}
/**
* Releases resources.
*/
fun release() {
if (state == State.STARTED) {
stop()
}
pendingSamples.clear()
state = State.RELEASED
}
/**
* Finalizes the current segment and writes it to disk.
*/
private fun finalizeCurrentSegment() {
if (pendingSamples.isEmpty()) return
try {
val baseDecodeTimeUs = pendingSamples.first().presentationTimeUs
val fragmentBytes = buildMediaSegment(pendingSamples, sequenceNumber, baseDecodeTimeUs)
val segmentFile = File(outputDirectory, "$segmentIndex.mp4")
FileOutputStream(segmentFile).use { it.write(fragmentBytes) }
// Calculate duration
val firstPts = pendingSamples.first().presentationTimeUs
val lastSample = pendingSamples.last()
val durationUs = (lastSample.presentationTimeUs - firstPts) + lastSample.durationUs
Log.d(TAG, "Created segment $segmentIndex: samples=${pendingSamples.size}, " +
"duration=${durationUs / 1000}ms, size=${fragmentBytes.size} bytes")
callback.onMediaSegmentReady(segmentFile, segmentIndex, durationUs)
segmentIndex++
sequenceNumber++
pendingSamples.clear()
} catch (e: Exception) {
Log.e(TAG, "Error finalizing segment $segmentIndex", e)
}
}
// ==================== Init Segment Building ====================
/**
* Builds the initialization segment (ftyp + moov).
*/
private fun buildInitSegment(format: MediaFormat): ByteArray {
val width = format.getInteger(MediaFormat.KEY_WIDTH)
val height = format.getInteger(MediaFormat.KEY_HEIGHT)
val sps = format.getByteBuffer("csd-0")?.let { extractNalUnit(it) }
?: throw IllegalArgumentException("Missing SPS (csd-0)")
val pps = format.getByteBuffer("csd-1")?.let { extractNalUnit(it) }
?: throw IllegalArgumentException("Missing PPS (csd-1)")
val output = ByteArrayOutputStream()
// ftyp
output.write(buildFtypBox())
// moov
output.write(buildMoovBox(width, height, sps, pps))
return output.toByteArray()
}
private fun extractNalUnit(buffer: ByteBuffer): ByteArray {
val data = ByteArray(buffer.remaining())
buffer.duplicate().get(data)
// Strip start code prefix (0x00000001 or 0x000001)
return when {
data.size >= 4 && data[0] == 0.toByte() && data[1] == 0.toByte() &&
data[2] == 0.toByte() && data[3] == 1.toByte() -> data.copyOfRange(4, data.size)
data.size >= 3 && data[0] == 0.toByte() && data[1] == 0.toByte() &&
data[2] == 1.toByte() -> data.copyOfRange(3, data.size)
else -> data
}
}
private fun buildFtypBox(): ByteArray {
val brands = listOf("isom", "iso5", "iso6", "avc1", "mp41", "dash")
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
val size = 8 + 4 + 4 + (brands.size * 4)
dos.writeInt(size)
dos.writeBytes("ftyp")
dos.writeBytes("isom") // major brand
dos.writeInt(0x200) // minor version
brands.forEach { dos.writeBytes(it) }
return output.toByteArray()
}
private fun buildMoovBox(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val content = ByteArrayOutputStream()
content.write(buildMvhdBox())
content.write(buildTrakBox(width, height, sps, pps))
content.write(buildMvexBox())
return wrapBox("moov", content.toByteArray())
}
private fun buildMvhdBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // creation time
dos.writeInt(0) // modification time
dos.writeInt(timescale) // timescale
dos.writeInt(0) // duration
dos.writeInt(0x00010000) // rate = 1.0
dos.writeShort(0x0100) // volume = 1.0
dos.writeShort(0) // reserved
dos.writeInt(0) // reserved
dos.writeInt(0) // reserved
// Unity matrix
dos.writeInt(0x00010000); dos.writeInt(0); dos.writeInt(0)
dos.writeInt(0); dos.writeInt(0x00010000); dos.writeInt(0)
dos.writeInt(0); dos.writeInt(0); dos.writeInt(0x40000000)
repeat(6) { dos.writeInt(0) } // pre-defined
dos.writeInt(2) // next track ID
return wrapBox("mvhd", output.toByteArray())
}
private fun buildTrakBox(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val content = ByteArrayOutputStream()
content.write(buildTkhdBox(width, height))
content.write(buildMdiaBox(width, height, sps, pps))
return wrapBox("trak", content.toByteArray())
}
private fun buildTkhdBox(width: Int, height: Int): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0x00000007) // version 0, flags (enabled, in movie, in preview)
dos.writeInt(0) // creation time
dos.writeInt(0) // modification time
dos.writeInt(1) // track ID
dos.writeInt(0) // reserved
dos.writeInt(0) // duration
dos.writeInt(0) // reserved
dos.writeInt(0) // reserved
dos.writeShort(0) // layer
dos.writeShort(0) // alternate group
dos.writeShort(0) // volume (0 for video)
dos.writeShort(0) // reserved
// Rotation matrix - use identity and rely on correct dimensions from encoder
// The encoder output format already has the correct dimensions for the content
writeRotationMatrix(dos)
// Use dimensions as-is from encoder output format
dos.writeInt(width shl 16) // width (16.16 fixed point)
dos.writeInt(height shl 16) // height (16.16 fixed point)
Log.d(TAG, "tkhd: ${width}x${height}, rotation=$orientationDegrees")
return wrapBox("tkhd", output.toByteArray())
}
/**
* Writes the 3x3 transformation matrix for video rotation.
* Uses simple rotation values - the encoder already outputs correctly oriented frames.
*/
private fun writeRotationMatrix(dos: DataOutputStream) {
// Fixed-point constants
val one = 0x00010000 // 1.0 in 16.16
val w = 0x40000000 // 1.0 in 2.30
// Identity matrix - no transformation
// Most HLS players handle rotation via the dimensions themselves
// or we can add rotation metadata separately if needed
dos.writeInt(one) // a = 1
dos.writeInt(0) // b = 0
dos.writeInt(0) // u = 0
dos.writeInt(0) // c = 0
dos.writeInt(one) // d = 1
dos.writeInt(0) // v = 0
dos.writeInt(0) // x = 0
dos.writeInt(0) // y = 0
dos.writeInt(w) // w = 1
}
private fun buildMdiaBox(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val content = ByteArrayOutputStream()
content.write(buildMdhdBox())
content.write(buildHdlrBox())
content.write(buildMinfBox(width, height, sps, pps))
return wrapBox("mdia", content.toByteArray())
}
private fun buildMdhdBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // creation time
dos.writeInt(0) // modification time
dos.writeInt(timescale) // timescale
dos.writeInt(0) // duration
dos.writeShort(0x55C4) // language: "und"
dos.writeShort(0) // pre-defined
return wrapBox("mdhd", output.toByteArray())
}
private fun buildHdlrBox(): ByteArray {
val name = "VideoHandler"
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // pre-defined
dos.writeBytes("vide") // handler type
dos.writeInt(0) // reserved
dos.writeInt(0) // reserved
dos.writeInt(0) // reserved
dos.writeBytes(name)
dos.writeByte(0) // null terminator
return wrapBox("hdlr", output.toByteArray())
}
private fun buildMinfBox(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val content = ByteArrayOutputStream()
content.write(buildVmhdBox())
content.write(buildDinfBox())
content.write(buildStblBox(width, height, sps, pps))
return wrapBox("minf", content.toByteArray())
}
private fun buildVmhdBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(1) // version 0, flags = 1
dos.writeShort(0) // graphics mode
dos.writeShort(0) // opcolor[0]
dos.writeShort(0) // opcolor[1]
dos.writeShort(0) // opcolor[2]
return wrapBox("vmhd", output.toByteArray())
}
private fun buildDinfBox(): ByteArray {
val dref = buildDrefBox()
return wrapBox("dinf", dref)
}
private fun buildDrefBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(1) // entry count
// url box (self-contained)
dos.writeInt(12)
dos.writeBytes("url ")
dos.writeInt(1) // flags: self-contained
return wrapBox("dref", output.toByteArray())
}
private fun buildStblBox(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val content = ByteArrayOutputStream()
content.write(buildStsdBox(width, height, sps, pps))
content.write(buildEmptySttsBox())
content.write(buildEmptyStscBox())
content.write(buildEmptyStszBox())
content.write(buildEmptyStcoBox())
return wrapBox("stbl", content.toByteArray())
}
private fun buildStsdBox(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(1) // entry count
output.write(buildAvc1Box(width, height, sps, pps))
return wrapBox("stsd", output.toByteArray())
}
private fun buildAvc1Box(width: Int, height: Int, sps: ByteArray, pps: ByteArray): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
repeat(6) { dos.writeByte(0) } // reserved
dos.writeShort(1) // data reference index
dos.writeShort(0) // pre-defined
dos.writeShort(0) // reserved
repeat(3) { dos.writeInt(0) } // pre-defined
dos.writeShort(width) // width
dos.writeShort(height) // height
dos.writeInt(0x00480000) // horiz resolution (72 dpi)
dos.writeInt(0x00480000) // vert resolution (72 dpi)
dos.writeInt(0) // reserved
dos.writeShort(1) // frame count
repeat(32) { dos.writeByte(0) } // compressor name
dos.writeShort(0x0018) // depth (24 bit)
dos.writeShort(-1) // pre-defined
output.write(buildAvcCBox(sps, pps))
return wrapBox("avc1", output.toByteArray())
}
private fun buildAvcCBox(sps: ByteArray, pps: ByteArray): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
val profileIdc = if (sps.isNotEmpty()) sps[0].toInt() and 0xFF else 0x42
val profileCompat = if (sps.size > 1) sps[1].toInt() and 0xFF else 0x00
val levelIdc = if (sps.size > 2) sps[2].toInt() and 0xFF else 0x1F
dos.writeByte(1) // configuration version
dos.writeByte(profileIdc) // AVC profile
dos.writeByte(profileCompat)// profile compatibility
dos.writeByte(levelIdc) // AVC level
dos.writeByte(0xFF) // 6 bits reserved + 2 bits NAL length - 1
dos.writeByte(0xE1) // 3 bits reserved + 5 bits SPS count
dos.writeShort(sps.size) // SPS length
dos.write(sps) // SPS data
dos.writeByte(1) // PPS count
dos.writeShort(pps.size) // PPS length
dos.write(pps) // PPS data
return wrapBox("avcC", output.toByteArray())
}
private fun buildEmptySttsBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // entry count
return wrapBox("stts", output.toByteArray())
}
private fun buildEmptyStscBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // entry count
return wrapBox("stsc", output.toByteArray())
}
private fun buildEmptyStszBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // sample size (0 = variable)
dos.writeInt(0) // sample count
return wrapBox("stsz", output.toByteArray())
}
private fun buildEmptyStcoBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(0) // entry count
return wrapBox("stco", output.toByteArray())
}
private fun buildMvexBox(): ByteArray {
return wrapBox("mvex", buildTrexBox())
}
private fun buildTrexBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(1) // track ID
dos.writeInt(1) // default sample description index
dos.writeInt(0) // default sample duration
dos.writeInt(0) // default sample size
dos.writeInt(0) // default sample flags
return wrapBox("trex", output.toByteArray())
}
// ==================== Media Segment Building ====================
/**
* Builds a media segment (moof + mdat).
*/
private fun buildMediaSegment(
samples: List<Sample>,
sequenceNumber: Int,
baseDecodeTimeUs: Long
): ByteArray {
val output = ByteArrayOutputStream()
// Build mdat content first to know sizes
val mdatContent = ByteArrayOutputStream()
for (sample in samples) {
mdatContent.write(sample.data)
}
val mdatPayload = mdatContent.toByteArray()
// Build moof
val moofBox = buildMoofBox(samples, sequenceNumber, baseDecodeTimeUs, mdatPayload.size)
output.write(moofBox)
// Build mdat
output.write(wrapBox("mdat", mdatPayload))
return output.toByteArray()
}
private fun buildMoofBox(
samples: List<Sample>,
sequenceNumber: Int,
baseDecodeTimeUs: Long,
mdatPayloadSize: Int
): ByteArray {
// Calculate sizes to determine data offset
val mfhdBox = buildMfhdBox(sequenceNumber)
val tfhdSize = 8 + 8 // box header + content (version/flags + track_id)
val tfdtSize = 8 + 12 // box header + version 1 content
val trunSize = 8 + 12 + (samples.size * 12) // header + fixed + per-sample (no composition offset)
val trafSize = 8 + tfhdSize + tfdtSize + trunSize
val moofSize = 8 + mfhdBox.size + trafSize
val dataOffset = moofSize + 8 // moof size + mdat header
val content = ByteArrayOutputStream()
content.write(mfhdBox)
content.write(buildTrafBox(samples, baseDecodeTimeUs, dataOffset))
return wrapBox("moof", content.toByteArray())
}
private fun buildMfhdBox(sequenceNumber: Int): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(0) // version & flags
dos.writeInt(sequenceNumber)
return wrapBox("mfhd", output.toByteArray())
}
private fun buildTrafBox(samples: List<Sample>, baseDecodeTimeUs: Long, dataOffset: Int): ByteArray {
val content = ByteArrayOutputStream()
content.write(buildTfhdBox())
content.write(buildTfdtBox(baseDecodeTimeUs))
content.write(buildTrunBox(samples, dataOffset))
return wrapBox("traf", content.toByteArray())
}
private fun buildTfhdBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
// Flags: default-base-is-moof (0x020000)
dos.writeInt(0x00020000)
dos.writeInt(1) // track ID
return wrapBox("tfhd", output.toByteArray())
}
private fun buildTfdtBox(baseDecodeTimeUs: Long): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
// Convert to timescale units
val baseMediaDecodeTime = (baseDecodeTimeUs * timescale) / 1_000_000
// Version 1 for 64-bit time
dos.writeInt(0x01000000)
dos.writeLong(baseMediaDecodeTime)
return wrapBox("tfdt", output.toByteArray())
}
private fun buildTrunBox(samples: List<Sample>, dataOffset: Int): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
// Flags: data-offset + sample-duration + sample-size + sample-flags
val flags = 0x000001 or 0x000100 or 0x000200 or 0x000400
dos.writeInt(flags)
dos.writeInt(samples.size)
dos.writeInt(dataOffset)
for (sample in samples) {
// Convert duration to timescale units
val durationInTimescale = ((sample.durationUs * timescale) / 1_000_000).toInt()
dos.writeInt(durationInTimescale)
dos.writeInt(sample.data.size)
dos.writeInt(buildSampleFlags(sample.isKeyFrame))
}
return wrapBox("trun", output.toByteArray())
}
private fun buildSampleFlags(isKeyFrame: Boolean): Int {
return if (isKeyFrame) {
// sample_depends_on=2 (no dependencies), not a difference sample
0x02000000
} else {
// sample_depends_on=1 (depends on others), is a difference sample
0x01010000
}
}
// ==================== Utilities ====================
private fun wrapBox(type: String, content: ByteArray): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(8 + content.size)
dos.writeBytes(type)
dos.write(content)
return output.toByteArray()
}
}

View File

@@ -2,19 +2,85 @@ package com.mrousavy.camera.core
import android.annotation.SuppressLint import android.annotation.SuppressLint
import android.content.Context import android.content.Context
import android.content.res.Configuration
import android.graphics.Point import android.graphics.Point
import android.os.Handler
import android.os.Looper
import android.util.Log import android.util.Log
import android.util.Size import android.util.Size
import android.view.PixelCopy
import android.view.Surface
import android.view.SurfaceHolder import android.view.SurfaceHolder
import android.view.SurfaceView import android.view.SurfaceView
import android.view.WindowManager
import com.facebook.react.bridge.UiThreadUtil import com.facebook.react.bridge.UiThreadUtil
import com.mrousavy.camera.extensions.resize import com.mrousavy.camera.extensions.resize
import com.mrousavy.camera.extensions.rotatedBy import com.mrousavy.camera.extensions.rotatedBy
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.ResizeMode import com.mrousavy.camera.types.ResizeMode
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
import kotlin.math.roundToInt import kotlin.math.roundToInt
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
import android.graphics.Bitmap
import android.graphics.Matrix
fun rotateBitmap90CounterClockwise(source: Bitmap): Bitmap {
val width = source.width
val height = source.height
// Create a new Bitmap with swapped width and height
val rotatedBitmap = Bitmap.createBitmap(height, width, source.config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
// Set the pixel in the new position
rotatedBitmap.setPixel(y, width - 1 - x, source.getPixel(x, y))
}
}
return rotatedBitmap
}
fun Bitmap.transformBitmap(orientation: Orientation): Bitmap {
return when (orientation) {
Orientation.PORTRAIT -> this // No transformation needed
Orientation.LANDSCAPE_LEFT -> {
// Transpose (swap width and height)
val transposedBitmap = Bitmap.createBitmap(height, width, config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
transposedBitmap.setPixel(y, width - 1 - x, getPixel(x, y))
}
}
transposedBitmap
}
Orientation.PORTRAIT_UPSIDE_DOWN -> {
// Invert vertically and horizontally (180-degree rotation)
val invertedBitmap = Bitmap.createBitmap(width, height, config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
invertedBitmap.setPixel(width - 1 - x, height - 1 - y, getPixel(x, y))
}
}
invertedBitmap
}
Orientation.LANDSCAPE_RIGHT -> {
// Transpose (swap width and height) and invert vertically
val transposedBitmap = Bitmap.createBitmap(height, width, config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
transposedBitmap.setPixel(height - 1 - y, x, getPixel(x, y))
}
}
transposedBitmap
}
}
}
@SuppressLint("ViewConstructor") @SuppressLint("ViewConstructor")
class PreviewView(context: Context, callback: SurfaceHolder.Callback) : class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
@@ -80,6 +146,52 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
} }
} }
suspend fun getBitmap(): Bitmap? = withContext(Dispatchers.Main) {
val frame = holder.getSurfaceFrame()
val width = frame.width()
val height = frame.height()
// Create bitmap matching surface frame dimensions for PixelCopy
// The original code swapped dimensions assuming landscape input - keep that for consistency
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
// Use a coroutine to suspend until the PixelCopy request is complete
suspendCancellableCoroutine<Bitmap?> { continuation ->
PixelCopy.request(
holder.surface,
bitmap,
{ copyResult ->
if (copyResult == PixelCopy.SUCCESS) {
// Get actual device rotation from WindowManager instead of relying on
// the orientation prop, which may not update on Android when rotating
// between landscape-left and landscape-right.
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val actualOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
continuation.resume(bitmap.transformBitmap(actualOrientation))
} else {
continuation.resumeWithException(
RuntimeException("PixelCopy failed with error code $copyResult")
)
}
},
Handler(Looper.getMainLooper())
)
}
}
fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point { fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point {
val sensorOrientation = cameraDeviceDetails.sensorOrientation val sensorOrientation = cameraDeviceDetails.sensorOrientation
val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height()) val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height())
@@ -97,22 +209,14 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
} }
} }
override fun requestLayout() {
super.requestLayout()
// Manually trigger measure & layout, as RN on Android skips those.
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
post {
measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY))
layout(left, top, right, bottom)
}
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size { private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize var contentSize = contentSize
// Swap dimensions if orientation is landscape var androidOrientation = context.getResources().getConfiguration().orientation;
if (orientation.isLandscape()) {
if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) {
contentSize = Size(contentSize.height, contentSize.width) contentSize = Size(contentSize.height, contentSize.width)
} }
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) { if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {

View File

@@ -14,6 +14,7 @@ import android.os.Environment
import java.text.SimpleDateFormat import java.text.SimpleDateFormat
import java.util.Locale import java.util.Locale
import java.util.Date import java.util.Date
class RecordingSession( class RecordingSession(
context: Context, context: Context,
val cameraId: String, val cameraId: String,
@@ -27,6 +28,8 @@ class RecordingSession(
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit, private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback, private val allCallbacks: CameraSession.Callback,
// Use FragmentedRecordingManager for HLS-compatible fMP4 output
private val useFragmentedMp4: Boolean = true
) { ) {
companion object { companion object {
private const val TAG = "RecordingSession" private const val TAG = "RecordingSession"
@@ -34,6 +37,9 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100 private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1 private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
} }
data class Video(val path: String, val durationMs: Long, val size: Size) data class Video(val path: String, val durationMs: Long, val size: Size)
@@ -41,16 +47,33 @@ class RecordingSession(
private val outputPath: File = File(filePath) private val outputPath: File = File(filePath)
private val bitRate = getBitRate() private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks, // Use FragmentedRecordingManager for HLS-compatible fMP4 output,
size, // or fall back to ChunkedRecordingManager for regular MP4 chunks
enableAudio, private val recorder: ChunkedRecorderInterface = if (useFragmentedMp4) {
fps, FragmentedRecordingManager.fromParams(
cameraOrientation, allCallbacks,
bitRate, size,
options, enableAudio,
outputPath fps,
) cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
}
private var startTime: Long? = null private var startTime: Long? = null
val surface: Surface val surface: Surface
get() { get() {

View File

@@ -1,3 +1,5 @@
@file:Suppress("DEPRECATION")
package com.mrousavy.camera.frameprocessor package com.mrousavy.camera.frameprocessor
import android.util.Log import android.util.Log
@@ -7,7 +9,6 @@ import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.ReactApplicationContext import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.UiThreadUtil import com.facebook.react.bridge.UiThreadUtil
import com.facebook.react.turbomodule.core.CallInvokerHolderImpl
import com.facebook.react.uimanager.UIManagerHelper import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.CameraView import com.mrousavy.camera.CameraView
import com.mrousavy.camera.core.ViewNotFoundError import com.mrousavy.camera.core.ViewNotFoundError
@@ -21,19 +22,26 @@ class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
@DoNotStrip @DoNotStrip
@Keep @Keep
private var mHybridData: HybridData private var mHybridData: HybridData?
private var mContext: WeakReference<ReactApplicationContext> private var mContext: WeakReference<ReactApplicationContext>
private var mScheduler: VisionCameraScheduler private var mScheduler: VisionCameraScheduler
val context: ReactApplicationContext val context: ReactApplicationContext
get() = reactContext get() = reactContext
init { init {
val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl // TODO: Fix for React Native 0.79+ - these APIs are now framework-only
val jsRuntimeHolder = // Since Frame Processors are disabled anyway (react-native-worklets-core not found),
context.javaScriptContextHolder?.get() ?: throw Error("JSI Runtime is null! VisionCamera does not yet support bridgeless mode..") // we'll disable this functionality to allow the build to complete
Log.w(TAG, "Frame Processor initialization disabled due to React Native 0.79+ API compatibility issues")
mScheduler = VisionCameraScheduler() mScheduler = VisionCameraScheduler()
mContext = WeakReference(context) mContext = WeakReference(context)
mHybridData = initHybrid(jsRuntimeHolder, jsCallInvokerHolder, mScheduler) // Disable Frame Processor functionality since APIs are not compatible
mHybridData = null
// Original code that fails with RN 0.79+:
// val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl
// val jsRuntimeHolder = context.javaScriptContextHolder?.get() ?: throw Error("JSI Runtime is null!")
// mHybridData = initHybrid(jsRuntimeHolder, jsCallInvokerHolder, mScheduler)
} }
@UiThread @UiThread
@@ -69,5 +77,9 @@ class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
FrameProcessorPluginRegistry.getPlugin(name, this, options) FrameProcessorPluginRegistry.getPlugin(name, this, options)
// private C++ funcs // private C++ funcs
private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData // Frame Processors are disabled - native registration is skipped via VISION_CAMERA_ENABLE_FRAME_PROCESSORS=OFF
// This method is never called or registered, kept for reference only
// @DoNotStrip
// @Keep
// private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: Any, scheduler: VisionCameraScheduler): HybridData
} }

View File

@@ -1,13 +1,33 @@
package com.mrousavy.camera.utils package com.mrousavy.camera.utils
import android.content.Context import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.util.Size
import java.io.File import java.io.File
import java.io.FileOutputStream
class FileUtils { class FileUtils {
companion object { companion object {
fun createTempFile(context: Context, extension: String): File = fun writeBitmapTofile(bitmap: Bitmap, file: File, quality: Int) {
File.createTempFile("mrousavy", extension, context.cacheDir).also { FileOutputStream(file).use { stream ->
it.deleteOnExit() bitmap.compress(Bitmap.CompressFormat.JPEG, 50, stream)
} }
}
fun getImageSize(imagePath: String): Size {
val bitmapOptions = BitmapFactory.Options().also {
it.inJustDecodeBounds = true
}
BitmapFactory.decodeFile(imagePath, bitmapOptions)
val width = bitmapOptions.outWidth
val height = bitmapOptions.outHeight
return Size(width, height)
}
fun createTempFile(context: Context, extension: String): File =
File.createTempFile("mrousavy", extension, context.cacheDir).also {
it.deleteOnExit()
}
} }
} }

View File

@@ -11,6 +11,6 @@ inline fun withPromise(promise: Promise, closure: () -> Any?) {
} catch (e: Throwable) { } catch (e: Throwable) {
e.printStackTrace() e.printStackTrace()
val error = if (e is CameraError) e else UnknownCameraError(e) val error = if (e is CameraError) e else UnknownCameraError(e)
promise.reject("${error.domain}/${error.id}", error.message, error.cause) promise.reject("${error.domain}/${error.id}", error.message ?: "Unknown error", error.cause)
} }
} }

View File

@@ -17,8 +17,8 @@
"@react-native-community/blur": "^4.3.2", "@react-native-community/blur": "^4.3.2",
"@react-navigation/native": "^6.1.7", "@react-navigation/native": "^6.1.7",
"@react-navigation/native-stack": "^6.9.13", "@react-navigation/native-stack": "^6.9.13",
"react": "^18.2.0", "react": "^19.1.1",
"react-native": "^0.72.3", "react-native": "^0.81.0",
"react-native-fast-image": "^8.6.3", "react-native-fast-image": "^8.6.3",
"react-native-gesture-handler": "^2.12.1", "react-native-gesture-handler": "^2.12.1",
"react-native-mmkv": "^2.10.2", "react-native-mmkv": "^2.10.2",
@@ -42,7 +42,7 @@
"@types/react-native-vector-icons": "^6.4.13", "@types/react-native-vector-icons": "^6.4.13",
"@types/react-native-video": "^5.0.15", "@types/react-native-video": "^5.0.15",
"babel-plugin-module-resolver": "^5.0.0", "babel-plugin-module-resolver": "^5.0.0",
"eslint": "^8.46.0", "eslint": "^9.33.0",
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"metro-react-native-babel-preset": "^0.77.0", "metro-react-native-babel-preset": "^0.77.0",
"prettier": "^3.2.4", "prettier": "^3.2.4",

View File

@@ -47,7 +47,7 @@ export const RecordingButton: React.FC<RecordingButtonProps> = ({ style, camera,
onMediaCaptured(video, 'video') onMediaCaptured(video, 'video')
onStoppedRecording() onStoppedRecording()
}, },
}) }, 'video.mp4')
console.log('called startRecording()!') console.log('called startRecording()!')
isRecording.current = true isRecording.current = true
setRecordingState(true) setRecordingState(true)

File diff suppressed because it is too large Load Diff

189
package/flake.lock generated
View File

@@ -1,189 +0,0 @@
{
"nodes": {
"android-nixpkgs": {
"inputs": {
"devshell": "devshell",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1701980274,
"narHash": "sha256-uKJIFvsahbWw52TsIht7g2iosXBgJDRMSMoCE1fvEAk=",
"owner": "tadfisher",
"repo": "android-nixpkgs",
"rev": "bce9d437ed54ee1425b66442a12814fee4cdbd51",
"type": "github"
},
"original": {
"owner": "tadfisher",
"repo": "android-nixpkgs",
"type": "github"
}
},
"devshell": {
"inputs": {
"nixpkgs": [
"android-nixpkgs",
"nixpkgs"
],
"systems": "systems"
},
"locked": {
"lastModified": 1701787589,
"narHash": "sha256-ce+oQR4Zq9VOsLoh9bZT8Ip9PaMLcjjBUHVPzW5d7Cw=",
"owner": "numtide",
"repo": "devshell",
"rev": "44ddedcbcfc2d52a76b64fb6122f209881bd3e1e",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "devshell",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1694102001,
"narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1701718080,
"narHash": "sha256-6ovz0pG76dE0P170pmmZex1wWcQoeiomUZGggfH9XPs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2c7f3c0fb7c08a0814627611d9d7d45ab6d75335",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1702312524,
"narHash": "sha256-gkZJRDBUCpTPBvQk25G0B7vfbpEYM5s5OZqghkjZsnE=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a9bf124c46ef298113270b1f84a164865987a91c",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"android-nixpkgs": "android-nixpkgs",
"flake-utils": "flake-utils_2",
"gitignore": "gitignore",
"nixpkgs": "nixpkgs_2"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,77 +0,0 @@
{
description = "Sample Nix ts-node build";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
gitignore = {
url = "github:hercules-ci/gitignore.nix";
inputs.nixpkgs.follows = "nixpkgs";
};
android-nixpkgs = {
url = "github:tadfisher/android-nixpkgs";
};
};
outputs = {
self,
nixpkgs,
flake-utils,
gitignore,
android-nixpkgs,
...
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
nodejs = pkgs.nodejs-18_x;
# NOTE: this does not work
appBuild = pkgs.stdenv.mkDerivation {
name = "example-ts-node";
version = "0.1.0";
src = gitignore.lib.gitignoreSource ./.; # uses the gitignore in the repo to only copy files git would see
buildInputs = [nodejs];
# https://nixos.org/manual/nixpkgs/stable/#sec-stdenv-phases
buildPhase = ''
# each phase has pre/postHooks. When you make your own phase be sure to still call the hooks
runHook preBuild
npm ci
npm run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
cp -r node_modules $out/node_modules
cp package.json $out/package.json
cp -r dist $out/dist
runHook postInstall
'';
};
android-sdk = android-nixpkgs.sdk.${system} (sdkPkgs:
with sdkPkgs; [
cmdline-tools-latest
build-tools-30-0-3
build-tools-33-0-0
build-tools-33-0-1
build-tools-34-0-0
platform-tools
platforms-android-33
platforms-android-34
emulator
ndk-23-1-7779620
cmake-3-22-1
system-images-android-33-google-apis-x86-64
system-images-android-34-google-apis-x86-64
]);
in
with pkgs; {
defaultPackage = appBuild;
devShell = mkShell {
buildInputs = [nodejs yarn watchman gradle_7 alejandra nodePackages.prettier ktlint kotlin-language-server];
ANDROID_SDK_BIN = android-sdk;
shellHook = ''
export JAVA_HOME=${pkgs.jdk17.home}
source ${android-sdk.out}/nix-support/setup-hook
export PATH=${android-sdk}/bin:$PATH
ORG_GRADLE_PROJECT_ANDROID_HOME="$ANDROID_HOME"
'';
};
});
}

View File

@@ -50,4 +50,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) { func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise) cameraSession.resumeRecording(promise: promise)
} }
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
} }

View File

@@ -86,5 +86,13 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve : (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject : (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject); : (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end @end

View File

@@ -111,6 +111,18 @@ final class CameraViewManager: RCTViewManager {
} }
} }
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private // MARK: Private
private func getCameraView(withTag tag: NSNumber) -> CameraView { private func getCameraView(withTag tag: NSNumber) -> CameraView {

View File

@@ -18,8 +18,6 @@ extension CameraSession {
filePath: String, filePath: String,
onVideoRecorded: @escaping (_ video: Video) -> Void, onVideoRecorded: @escaping (_ video: Video) -> Void,
onError: @escaping (_ error: CameraError) -> Void) { onError: @escaping (_ error: CameraError) -> Void) {
lockCurrentExposure(for: captureSession)
// Run on Camera Queue // Run on Camera Queue
CameraQueues.cameraQueue.async { CameraQueues.cameraQueue.async {
let start = DispatchTime.now() let start = DispatchTime.now()
@@ -194,13 +192,19 @@ extension CameraSession {
} }
} }
func lockCurrentExposure(for session: AVCaptureSession) { func lockCurrentExposure(promise: Promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else { CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available") print("No capture device available")
return return
} }
do { guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration // Lock the device for configuration
try captureDevice.lockForConfiguration() try captureDevice.lockForConfiguration()
@@ -210,18 +214,45 @@ extension CameraSession {
// Check if the device supports custom exposure settings // Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) { if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode // Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil) captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values") ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else { } else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported") ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
} }
// Unlock the device after configuration // Unlock the device after configuration
captureDevice.unlockForConfiguration() captureDevice.unlockForConfiguration()
} catch { } catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)") ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
} }
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
} }
} }

View File

@@ -87,16 +87,15 @@
"devDependencies": { "devDependencies": {
"@expo/config-plugins": "^7.2.5", "@expo/config-plugins": "^7.2.5",
"@jamesacarr/eslint-formatter-github-actions": "^0.2.0", "@jamesacarr/eslint-formatter-github-actions": "^0.2.0",
"@react-native/eslint-config": "^0.72.2", "@react-native/eslint-config": "^0.81.0",
"@react-native/typescript-config": "^0.74.0", "@react-native/typescript-config": "^0.81.0",
"@release-it/conventional-changelog": "^7.0.0", "@release-it/conventional-changelog": "^7.0.0",
"@types/react": "^18.2.19", "@types/react": "^18.2.19",
"@types/react-native": "^0.72.2",
"eslint": "^8.46.0", "eslint": "^8.46.0",
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"prettier": "^3.0.1", "prettier": "^3.0.1",
"react": "^18.2.0", "react": "^19.1.1",
"react-native": "^0.72.3", "react-native": "^0.81.0",
"react-native-builder-bob": "^0.21.3", "react-native-builder-bob": "^0.21.3",
"react-native-worklets-core": "^0.3.0", "react-native-worklets-core": "^0.3.0",
"release-it": "^16.1.3", "release-it": "^16.1.3",

View File

@@ -319,6 +319,22 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e) throw tryParseNativeCameraError(e)
} }
} }
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion //#endregion
//#region Static Functions (NativeModule) //#region Static Functions (NativeModule)

File diff suppressed because it is too large Load Diff