Compare commits

..

3 Commits

Author SHA1 Message Date
Rui Rodrigues
695e317a77 apply patch to opt in for old API
d021c76f4b
https://github.com/mrousavy/react-native-vision-camera/issues/2851
2024-08-22 09:23:03 +01:00
Rui Rodrigues
5e8f7055e2 gradle upgrade 2024-08-22 09:09:43 +01:00
Rui Rodrigues
031aa9d43a remove package from AndroidManifest.xml and update java version to 17 2024-08-21 17:02:04 +01:00
32 changed files with 3262 additions and 3190 deletions

5
package/.envrc Normal file
View File

@@ -0,0 +1,5 @@
use flake . --impure
if [ -f .envrc.local ]; then
source .envrc.local
fi

View File

@@ -19,7 +19,9 @@ endif()
# Add react-native-vision-camera sources
set(SOURCES
add_library(
${PACKAGE_NAME}
SHARED
# Shared C++
../cpp/MutableRawBuffer.cpp
# Java JNI
@@ -29,11 +31,7 @@ set(SOURCES
src/main/cpp/OpenGLContext.cpp
src/main/cpp/OpenGLRenderer.cpp
src/main/cpp/MutableJByteBuffer.cpp
)
# Only add Frame Processor sources if enabled
if (ENABLE_FRAME_PROCESSORS)
list(APPEND SOURCES
# Frame Processor
src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
src/main/cpp/frameprocessor/JSIJNIConversion.cpp
@@ -45,18 +43,6 @@ if (ENABLE_FRAME_PROCESSORS)
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
)
endif()
add_library(
${PACKAGE_NAME}
SHARED
${SOURCES}
)
# Force 16KB page alignment for Android 15+ compatibility
set_target_properties(${PACKAGE_NAME} PROPERTIES
LINK_FLAGS "-Wl,-z,max-page-size=16384"
)
# Header Search Paths (includes)
target_include_directories(
@@ -77,7 +63,7 @@ target_link_libraries(
${LOG_LIB} # <-- Logcat logger
android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI
ReactAndroid::reactnative # <-- RN: React Native JNI bindings (RN 0.76+)
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings
fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline)

View File

@@ -16,7 +16,7 @@ buildscript {
}
dependencies {
classpath "com.android.tools.build:gradle:7.4.2"
classpath "com.android.tools.build:gradle:8.5.2"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
@@ -137,14 +137,6 @@ android {
targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = "17"
freeCompilerArgs += [
"-opt-in=kotlin.RequiresOptIn",
"-opt-in=com.facebook.react.annotations.UnstableReactNativeAPI"
]
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"
@@ -165,7 +157,6 @@ android {
"**/libhermes-executor-debug.so",
"**/libhermes_executor.so",
"**/libreactnativejni.so",
"**/libreactnative.so",
"**/libturbomodulejsijni.so",
"**/libreact_nativemodule_core.so",
"**/libjscexecutor.so"
@@ -178,10 +169,6 @@ dependencies {
implementation "com.facebook.react:react-android:+"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3"
// Media3 muxer for fragmented MP4 (HLS-compatible) recording
implementation "androidx.media3:media3-muxer:1.5.0"
implementation "androidx.media3:media3-common:1.5.0"
if (enableCodeScanner) {
// User enabled code-scanner, so we bundle the 2.4 MB model in the app.
implementation 'com.google.mlkit:barcode-scanning:17.2.0'

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@@ -1,4 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mrousavy.camera">
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
</manifest>

View File

@@ -9,13 +9,11 @@
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
return facebook::jni::initialize(vm, [] {
// VideoPipeline is needed for video recording even without Frame Processors
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Frame Processor JNI bindings - only register when Frame Processors are enabled
vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives();
vision::JSharedArray::registerNatives();
#endif

View File

@@ -40,26 +40,15 @@ fun CameraView.invokeOnStopped() {
this.sendEvent(event)
}
fun CameraView.invokeOnChunkReady(filepath: File, index: Int, durationUs: Long?) {
Log.i(CameraView.TAG, "invokeOnChunkReady(...): index=$index, filepath=$filepath, durationUs=$durationUs")
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
Log.e(CameraView.TAG, "invokeOnError(...):")
val event = Arguments.createMap()
event.putInt("index", index)
event.putString("filepath", filepath.toString())
if (durationUs != null) {
event.putDouble("duration", durationUs / 1_000_000.0) // Convert microseconds to seconds
}
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
}
fun CameraView.invokeOnInitReady(filepath: File) {
Log.i(CameraView.TAG, "invokeOnInitReady(...): filepath=$filepath")
val event = Arguments.createMap()
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onInitReady", event)
}
fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace()

View File

@@ -13,37 +13,70 @@ import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.WritableMap
import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.core.InsufficientStorageError
import com.mrousavy.camera.utils.FileUtils
import com.mrousavy.camera.types.Flash
import com.mrousavy.camera.types.QualityPrioritization
import com.mrousavy.camera.utils.*
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import kotlinx.coroutines.*
private const val TAG = "CameraView.takeSnapshot"
private const val TAG = "CameraView.takePhoto"
@SuppressLint("UnsafeOptInUsageError")
suspend fun CameraView.takePhoto(optionsMap: ReadableMap): WritableMap {
val options = optionsMap.toHashMap()
Log.i(TAG, "Taking snapshot... Options: $options")
val bitmap = previewView.getBitmap() ?: throw Error()
Log.i(TAG, "Taking photo... Options: $options")
val file = FileUtils.createTempFile(context, "png");
val qualityPrioritization = options["qualityPrioritization"] as? String ?: "balanced"
val flash = options["flash"] as? String ?: "off"
val enableAutoStabilization = options["enableAutoStabilization"] == true
val enableShutterSound = options["enableShutterSound"] as? Boolean ?: true
val enablePrecapture = options["enablePrecapture"] as? Boolean ?: false
// Write snapshot to .jpg file
FileUtils.writeBitmapTofile(bitmap, file, 100)
// TODO: Implement Red Eye Reduction
options["enableAutoRedEyeReduction"]
Log.i(TAG, "Successfully saved snapshot to file!")
val flashMode = Flash.fromUnionValue(flash)
val qualityPrioritizationMode = QualityPrioritization.fromUnionValue(qualityPrioritization)
val photo = cameraSession.takePhoto(
qualityPrioritizationMode,
flashMode,
enableShutterSound,
enableAutoStabilization,
enablePrecapture,
orientation
)
photo.use {
Log.i(TAG, "Successfully captured ${photo.image.width} x ${photo.image.height} photo!")
val cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId!!)
val path = try {
savePhotoToFile(context, cameraCharacteristics, photo)
} catch (e: IOException) {
if (e.message?.contains("no space left", true) == true) {
throw InsufficientStorageError()
} else {
throw e
}
}
Log.i(TAG, "Successfully saved photo to file! $path")
// Parse output data
val map = Arguments.createMap()
map.putString("path", file.absolutePath)
map.putInt("width", bitmap.width)
map.putInt("height", bitmap.height)
map.putBoolean("isMirrored", false)
map.putString("path", path)
map.putInt("width", photo.image.width)
map.putInt("height", photo.image.height)
map.putString("orientation", photo.orientation.unionValue)
map.putBoolean("isRawPhoto", photo.format == ImageFormat.RAW_SENSOR)
map.putBoolean("isMirrored", photo.isMirrored)
return map
}
}
private fun writePhotoToFile(photo: CameraSession.CapturedPhoto, file: File) {
val byteBuffer = photo.image.planes[0].buffer

View File

@@ -102,7 +102,7 @@ class CameraView(context: Context) :
// session
internal val cameraSession: CameraSession
val previewView: PreviewView
private val previewView: PreviewView
private var currentConfigureCall: Long = System.currentTimeMillis()
internal var frameProcessor: FrameProcessor? = null
@@ -271,12 +271,8 @@ class CameraView(context: Context) :
invokeOnStopped()
}
override fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?) {
invokeOnChunkReady(filepath, index, durationUs)
}
override fun onInitSegmentReady(filepath: File) {
invokeOnInitReady(filepath)
override fun onVideoChunkReady(filepath: File, index: Int) {
invokeOnChunkReady(filepath, index)
}
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {

View File

@@ -32,8 +32,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.put("onInitReady", MapBuilder.of("registrationName", "onInitReady"))
.build()?.toMutableMap()
.build()
override fun getName(): String = TAG

View File

@@ -31,12 +31,10 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
init {
try {
// Load the native part of VisionCamera.
// Includes the OpenGL VideoPipeline (needed for video recording)
// Frame Processors remain disabled for RN 0.79+ compatibility
// Includes the OpenGL VideoPipeline, as well as Frame Processor JSI bindings
System.loadLibrary("VisionCamera")
Log.i(TAG, "VisionCamera native library loaded successfully")
} catch (e: UnsatisfiedLinkError) {
Log.e(TAG, "Failed to load VisionCamera C++ library!", e)
Log.e(VisionCameraProxy.TAG, "Failed to load VisionCamera C++ library!", e)
throw e
}
}
@@ -75,10 +73,14 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
@ReactMethod(isBlockingSynchronousMethod = true)
fun installFrameProcessorBindings(): Boolean {
// Frame Processors are disabled for React Native 0.79+ compatibility
Log.i(TAG, "Frame Processor bindings not installed - Frame Processors disabled for RN 0.79+ compatibility")
return false
fun installFrameProcessorBindings(): Boolean =
try {
val proxy = VisionCameraProxy(reactApplicationContext)
VisionCameraInstaller.install(proxy)
true
} catch (e: Error) {
Log.e(TAG, "Failed to install Frame Processor JSI Bindings!", e)
false
}
@ReactMethod
@@ -155,7 +157,7 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
}
private fun canRequestPermission(permission: String): Boolean {
val activity = reactApplicationContext.currentActivity as? PermissionAwareActivity
val activity = currentActivity as? PermissionAwareActivity
return activity?.shouldShowRequestPermissionRationale(permission) ?: false
}

View File

@@ -15,7 +15,6 @@ import android.util.Log
import android.util.Size
import android.view.Surface
import android.view.SurfaceHolder
import android.view.WindowManager
import androidx.core.content.ContextCompat
import com.google.mlkit.vision.barcode.common.Barcode
import com.mrousavy.camera.core.capture.RepeatingCaptureRequest
@@ -426,21 +425,6 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
val fps = configuration?.fps ?: 30
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording:
// - Counter-clockwise (ROTATION_90) → 270° hint
// - Clockwise (ROTATION_270) → 90° hint
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
else -> Orientation.PORTRAIT
}
val recording = RecordingSession(
context,
cameraId,
@@ -513,8 +497,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized()
fun onStarted()
fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int, durationUs: Long?)
fun onInitSegmentReady(filepath: File)
fun onVideoChunkReady(filepath: File, index: Int)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
}
}

View File

@@ -14,7 +14,7 @@ import java.io.File
import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback(), ChunkedRecorderInterface {
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
@@ -73,7 +73,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private val targetDurationUs = iFrameInterval * 1000000
override val surface: Surface = encoder.createInputSurface()
val surface: Surface = encoder.createInputSurface()
init {
if (!this.outputDirectory.exists()) {
@@ -95,9 +95,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
fun finish() {
muxer.stop()
muxer.release()
// Calculate duration from start time - this is approximate
// The new FragmentedRecordingManager provides accurate duration
callbacks.onVideoChunkReady(filepath, chunkIndex, null)
callbacks.onVideoChunkReady(filepath, chunkIndex)
}
}
@@ -107,12 +105,6 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxerContext?.finish()
chunkIndex++
val format = this.encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create muxer: encodedFormat is null (onOutputFormatChanged not called yet)")
return
}
val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName)
Log.i(TAG, "Creating new muxer for file: $newFileName")
@@ -122,7 +114,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, format, this.callbacks
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!, this.callbacks
)
}
@@ -131,16 +123,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
}
private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
val context = muxerContext ?: return 0L
return bufferInfo.presentationTimeUs - context.startTimeUs
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs
}
override fun start() {
fun start() {
encoder.start()
recording = true
}
override fun finish() {
fun finish() {
synchronized(this) {
muxerContext?.finish()
recording = false
@@ -164,13 +155,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo)
}
val context = muxerContext
if (context == null) {
Log.e(TAG, "Cannot write sample data: muxerContext is null")
encoder.releaseOutputBuffer(index, false)
return
}
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo)
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}

View File

@@ -1,15 +0,0 @@
package com.mrousavy.camera.core
import android.view.Surface
/**
* Common interface for chunked video recorders.
* Implemented by both ChunkedRecordingManager (regular MP4) and
* FragmentedRecordingManager (HLS-compatible fMP4).
*/
interface ChunkedRecorderInterface {
val surface: Surface
fun start()
fun finish()
}

View File

@@ -1,332 +0,0 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.util.Log
import android.util.Size
import android.view.Surface
import androidx.media3.common.Format
import androidx.media3.common.MimeTypes
import androidx.media3.common.util.UnstableApi
import androidx.media3.muxer.FragmentedMp4Muxer
import androidx.media3.muxer.Muxer
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.io.FileOutputStream
import java.nio.ByteBuffer
/**
* A recording manager that produces HLS-compatible fragmented MP4 segments.
*
* This produces output similar to the iOS implementation:
* - An initialization segment (init.mp4) containing codec configuration
* - Numbered data segments (0.mp4, 1.mp4, ...) containing media data
*
* Uses AndroidX Media3's FragmentedMp4Muxer which produces proper fMP4 output.
*/
@UnstableApi
class FragmentedRecordingManager(
private val encoder: MediaCodec,
private val outputDirectory: File,
private val orientationDegrees: Int,
private val targetSegmentDurationUs: Long,
private val callbacks: CameraSession.Callback
) : MediaCodec.Callback(), ChunkedRecorderInterface {
companion object {
private const val TAG = "FragmentedRecorder"
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
segmentDurationSeconds: Int = 6
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
// I-frame interval affects segment boundaries
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return FragmentedRecordingManager(
codec,
outputDirectory,
recordingOrientationDegrees,
segmentDurationSeconds * 1_000_000L,
callbacks
)
}
}
// State management
private var chunkIndex = 0
private var encodedFormat: MediaFormat? = null
private var recording = false
// Segment tracking
private var segmentContext: SegmentContext? = null
private var initSegmentEmitted = false
override val surface: Surface = encoder.createInputSurface()
init {
if (!outputDirectory.exists()) {
outputDirectory.mkdirs()
}
encoder.setCallback(this)
}
/**
* Context for a single data segment being written.
* Init segments are created separately via createInitSegment().
*/
private inner class SegmentContext(
private val format: MediaFormat,
private val segmentIndex: Int
) {
private val filename = "$segmentIndex.mp4"
private val file = File(outputDirectory, filename)
private val outputStream = FileOutputStream(file)
private val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
private lateinit var videoTrack: Muxer.TrackToken
private var startTimeUs: Long = -1L
private var lastTimeUs: Long = 0L
private var sampleCount = 0
init {
val media3Format = convertToMedia3Format(format)
videoTrack = muxer.addTrack(media3Format)
Log.d(TAG, "Created segment context: $filename")
}
fun writeSample(buffer: ByteBuffer, bufferInfo: BufferInfo): Boolean {
if (startTimeUs < 0) {
startTimeUs = bufferInfo.presentationTimeUs
}
lastTimeUs = bufferInfo.presentationTimeUs
val isKeyFrame = (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
sampleCount++
// Check if we should start a new segment at the next keyframe
if (isKeyFrame && sampleCount > 1) {
val segmentDurationUs = bufferInfo.presentationTimeUs - startTimeUs
if (segmentDurationUs >= targetSegmentDurationUs) {
return true // Signal to create new segment
}
}
return false
}
fun finish(): Long {
try {
muxer.close()
outputStream.close()
} catch (e: Exception) {
Log.e(TAG, "Error closing segment", e)
}
val durationUs = if (lastTimeUs > startTimeUs) lastTimeUs - startTimeUs else 0L
callbacks.onVideoChunkReady(file, segmentIndex, durationUs)
Log.d(TAG, "Finished segment: $filename, samples=$sampleCount, duration=${durationUs/1000}ms")
return durationUs
}
}
private fun createNewSegment() {
val format = encodedFormat
if (format == null) {
Log.e(TAG, "Cannot create segment: encodedFormat is null")
return
}
// Close previous segment
segmentContext?.finish()
// Create new data segment (init segments are created separately)
segmentContext = SegmentContext(format, chunkIndex)
chunkIndex++
}
override fun start() {
encoder.start()
recording = true
}
override fun finish() {
synchronized(this) {
recording = false
segmentContext?.finish()
segmentContext = null
try {
encoder.stop()
encoder.release()
} catch (e: Exception) {
Log.e(TAG, "Error stopping encoder", e)
}
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
// Not used for Surface input
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
encoder.releaseOutputBuffer(index, false)
return
}
val encodedData = encoder.getOutputBuffer(index)
if (encodedData == null) {
Log.e(TAG, "getOutputBuffer returned null")
encoder.releaseOutputBuffer(index, false)
return
}
// Wait until init segment is emitted (happens in onOutputFormatChanged)
if (!initSegmentEmitted) {
encoder.releaseOutputBuffer(index, false)
return
}
// Create first data segment if needed
if (segmentContext == null) {
createNewSegment()
}
val context = segmentContext
if (context == null) {
encoder.releaseOutputBuffer(index, false)
return
}
try {
val shouldStartNewSegment = context.writeSample(encodedData, bufferInfo)
if (shouldStartNewSegment) {
createNewSegment()
// Write this keyframe to the new segment as well
segmentContext?.writeSample(encodedData, bufferInfo)
}
} catch (e: Exception) {
Log.e(TAG, "Error writing sample", e)
}
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
Log.i(TAG, "Output format changed: $format")
encodedFormat = format
// Create the init segment immediately when we get the format
// This produces an fMP4 file with just ftyp + moov (no samples)
if (!initSegmentEmitted) {
createInitSegment(format)
initSegmentEmitted = true
}
}
/**
* Creates an initialization segment containing only codec configuration (ftyp + moov).
* This is done by creating a muxer, adding the track, and immediately closing it
* without writing any samples.
*/
private fun createInitSegment(format: MediaFormat) {
val initFile = File(outputDirectory, "init.mp4")
try {
val outputStream = FileOutputStream(initFile)
val muxer = FragmentedMp4Muxer.Builder(outputStream).build()
// Convert and add the track
val media3Format = convertToMedia3Format(format)
muxer.addTrack(media3Format)
// Close immediately - this writes just the header (ftyp + moov)
muxer.close()
outputStream.close()
Log.d(TAG, "Created init segment: ${initFile.absolutePath}")
callbacks.onInitSegmentReady(initFile)
} catch (e: Exception) {
Log.e(TAG, "Error creating init segment", e)
}
}
private fun convertToMedia3Format(mediaFormat: MediaFormat): Format {
val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME) ?: MimeTypes.VIDEO_H264
val width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH)
val height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)
val bitRate = try { mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE) } catch (e: Exception) { -1 }
val frameRate = try { mediaFormat.getInteger(MediaFormat.KEY_FRAME_RATE) } catch (e: Exception) { -1 }
// Get CSD (Codec Specific Data) if available - required for init segment
val csd0 = mediaFormat.getByteBuffer("csd-0")
val csd1 = mediaFormat.getByteBuffer("csd-1")
val initData = mutableListOf<ByteArray>()
csd0?.let {
val bytes = ByteArray(it.remaining())
it.duplicate().get(bytes)
initData.add(bytes)
}
csd1?.let {
val bytes = ByteArray(it.remaining())
it.duplicate().get(bytes)
initData.add(bytes)
}
return Format.Builder()
.setSampleMimeType(mimeType)
.setWidth(width)
.setHeight(height)
.setRotationDegrees(orientationDegrees)
.apply {
if (bitRate > 0) setAverageBitrate(bitRate)
if (frameRate > 0) setFrameRate(frameRate.toFloat())
if (initData.isNotEmpty()) setInitializationData(initData)
}
.build()
}
}

View File

@@ -2,85 +2,19 @@ package com.mrousavy.camera.core
import android.annotation.SuppressLint
import android.content.Context
import android.content.res.Configuration
import android.graphics.Point
import android.os.Handler
import android.os.Looper
import android.util.Log
import android.util.Size
import android.view.PixelCopy
import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.WindowManager
import com.facebook.react.bridge.UiThreadUtil
import com.mrousavy.camera.extensions.resize
import com.mrousavy.camera.extensions.rotatedBy
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.ResizeMode
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
import kotlin.math.roundToInt
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlinx.coroutines.withContext
import android.graphics.Bitmap
import android.graphics.Matrix
fun rotateBitmap90CounterClockwise(source: Bitmap): Bitmap {
val width = source.width
val height = source.height
// Create a new Bitmap with swapped width and height
val rotatedBitmap = Bitmap.createBitmap(height, width, source.config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
// Set the pixel in the new position
rotatedBitmap.setPixel(y, width - 1 - x, source.getPixel(x, y))
}
}
return rotatedBitmap
}
fun Bitmap.transformBitmap(orientation: Orientation): Bitmap {
return when (orientation) {
Orientation.PORTRAIT -> this // No transformation needed
Orientation.LANDSCAPE_LEFT -> {
// Transpose (swap width and height)
val transposedBitmap = Bitmap.createBitmap(height, width, config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
transposedBitmap.setPixel(y, width - 1 - x, getPixel(x, y))
}
}
transposedBitmap
}
Orientation.PORTRAIT_UPSIDE_DOWN -> {
// Invert vertically and horizontally (180-degree rotation)
val invertedBitmap = Bitmap.createBitmap(width, height, config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
invertedBitmap.setPixel(width - 1 - x, height - 1 - y, getPixel(x, y))
}
}
invertedBitmap
}
Orientation.LANDSCAPE_RIGHT -> {
// Transpose (swap width and height) and invert vertically
val transposedBitmap = Bitmap.createBitmap(height, width, config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
transposedBitmap.setPixel(height - 1 - y, x, getPixel(x, y))
}
}
transposedBitmap
}
}
}
@SuppressLint("ViewConstructor")
class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
@@ -146,52 +80,6 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
}
}
suspend fun getBitmap(): Bitmap? = withContext(Dispatchers.Main) {
val frame = holder.getSurfaceFrame()
val width = frame.width()
val height = frame.height()
// Create bitmap matching surface frame dimensions for PixelCopy
// The original code swapped dimensions assuming landscape input - keep that for consistency
val bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888)
// Use a coroutine to suspend until the PixelCopy request is complete
suspendCancellableCoroutine<Bitmap?> { continuation ->
PixelCopy.request(
holder.surface,
bitmap,
{ copyResult ->
if (copyResult == PixelCopy.SUCCESS) {
// Get actual device rotation from WindowManager instead of relying on
// the orientation prop, which may not update on Android when rotating
// between landscape-left and landscape-right.
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val actualOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT
else -> Orientation.PORTRAIT
}
Log.i(TAG, "getBitmap: orientation prop = $orientation, deviceRotation = $deviceRotation, actualOrientation = $actualOrientation")
continuation.resume(bitmap.transformBitmap(actualOrientation))
} else {
continuation.resumeWithException(
RuntimeException("PixelCopy failed with error code $copyResult")
)
}
},
Handler(Looper.getMainLooper())
)
}
}
fun convertLayerPointToCameraCoordinates(point: Point, cameraDeviceDetails: CameraDeviceDetails): Point {
val sensorOrientation = cameraDeviceDetails.sensorOrientation
val cameraSize = Size(cameraDeviceDetails.activeSize.width(), cameraDeviceDetails.activeSize.height())
@@ -209,14 +97,22 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
}
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize
var androidOrientation = context.getResources().getConfiguration().orientation;
if (androidOrientation == Configuration.ORIENTATION_LANDSCAPE) {
contentSize = Size(contentSize.height, contentSize.width)
override fun requestLayout() {
super.requestLayout()
// Manually trigger measure & layout, as RN on Android skips those.
// See this issue: https://github.com/facebook/react-native/issues/17968#issuecomment-721958427
post {
measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY))
layout(left, top, right, bottom)
}
}
private fun getSize(contentSize: Size, containerSize: Size, resizeMode: ResizeMode): Size {
var contentSize = contentSize
// Swap dimensions if orientation is landscape
if (orientation.isLandscape()) {
contentSize = Size(contentSize.height, contentSize.width)
}
val contentAspectRatio = contentSize.width.toDouble() / contentSize.height
val containerAspectRatio = containerSize.width.toDouble() / containerSize.height
if (!(contentAspectRatio > 0 && containerAspectRatio > 0)) {

View File

@@ -4,7 +4,6 @@ import android.content.Context
import android.util.Log
import android.util.Size
import android.view.Surface
import androidx.media3.common.util.UnstableApi
import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation
@@ -15,8 +14,6 @@ import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
@UnstableApi
class RecordingSession(
context: Context,
val cameraId: String,
@@ -30,8 +27,6 @@ class RecordingSession(
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback,
// Use the new FragmentedMp4Muxer-based recorder for HLS-compatible output
private val useFragmentedMp4: Boolean = true
) {
companion object {
private const val TAG = "RecordingSession"
@@ -39,9 +34,6 @@ class RecordingSession(
private const val AUDIO_SAMPLING_RATE = 44_100
private const val AUDIO_BIT_RATE = 16 * AUDIO_SAMPLING_RATE
private const val AUDIO_CHANNELS = 1
// Segment duration in seconds (matching iOS default of 6 seconds)
private const val SEGMENT_DURATION_SECONDS = 6
}
data class Video(val path: String, val durationMs: Long, val size: Size)
@@ -49,23 +41,7 @@ class RecordingSession(
private val outputPath: File = File(filePath)
private val bitRate = getBitRate()
// Use FragmentedRecordingManager for HLS-compatible fMP4 output,
// or fall back to ChunkedRecordingManager for regular MP4 chunks
private val recorder: ChunkedRecorderInterface = if (useFragmentedMp4) {
FragmentedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath,
SEGMENT_DURATION_SECONDS
)
} else {
ChunkedRecordingManager.fromParams(
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
@@ -75,7 +51,6 @@ class RecordingSession(
options,
outputPath
)
}
private var startTime: Long? = null
val surface: Surface
get() {

View File

@@ -1,5 +1,3 @@
@file:Suppress("DEPRECATION")
package com.mrousavy.camera.frameprocessor
import android.util.Log
@@ -9,11 +7,14 @@ import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.facebook.react.bridge.ReactApplicationContext
import com.facebook.react.bridge.UiThreadUtil
import com.facebook.react.common.annotations.FrameworkAPI
import com.facebook.react.turbomodule.core.CallInvokerHolderImpl
import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.CameraView
import com.mrousavy.camera.core.ViewNotFoundError
import java.lang.ref.WeakReference
@OptIn(FrameworkAPI::class)
@Suppress("KotlinJniMissingFunction") // we use fbjni.
class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
companion object {
@@ -22,26 +23,19 @@ class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
@DoNotStrip
@Keep
private var mHybridData: HybridData?
private var mHybridData: HybridData
private var mContext: WeakReference<ReactApplicationContext>
private var mScheduler: VisionCameraScheduler
val context: ReactApplicationContext
get() = reactContext
init {
// TODO: Fix for React Native 0.79+ - these APIs are now framework-only
// Since Frame Processors are disabled anyway (react-native-worklets-core not found),
// we'll disable this functionality to allow the build to complete
Log.w(TAG, "Frame Processor initialization disabled due to React Native 0.79+ API compatibility issues")
val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl
val jsRuntimeHolder =
context.javaScriptContextHolder?.get() ?: throw Error("JSI Runtime is null! VisionCamera does not yet support bridgeless mode..")
mScheduler = VisionCameraScheduler()
mContext = WeakReference(context)
// Disable Frame Processor functionality since APIs are not compatible
mHybridData = null
// Original code that fails with RN 0.79+:
// val jsCallInvokerHolder = context.catalystInstance.jsCallInvokerHolder as CallInvokerHolderImpl
// val jsRuntimeHolder = context.javaScriptContextHolder?.get() ?: throw Error("JSI Runtime is null!")
// mHybridData = initHybrid(jsRuntimeHolder, jsCallInvokerHolder, mScheduler)
mHybridData = initHybrid(jsRuntimeHolder, jsCallInvokerHolder, mScheduler)
}
@UiThread
@@ -77,9 +71,5 @@ class VisionCameraProxy(private val reactContext: ReactApplicationContext) {
FrameProcessorPluginRegistry.getPlugin(name, this, options)
// private C++ funcs
// Frame Processors are disabled - native registration is skipped via VISION_CAMERA_ENABLE_FRAME_PROCESSORS=OFF
// This method is never called or registered, kept for reference only
// @DoNotStrip
// @Keep
// private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: Any, scheduler: VisionCameraScheduler): HybridData
private external fun initHybrid(jsContext: Long, jsCallInvokerHolder: CallInvokerHolderImpl, scheduler: VisionCameraScheduler): HybridData
}

View File

@@ -1,30 +1,10 @@
package com.mrousavy.camera.utils
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.util.Size
import java.io.File
import java.io.FileOutputStream
class FileUtils {
companion object {
fun writeBitmapTofile(bitmap: Bitmap, file: File, quality: Int) {
FileOutputStream(file).use { stream ->
bitmap.compress(Bitmap.CompressFormat.JPEG, 50, stream)
}
}
fun getImageSize(imagePath: String): Size {
val bitmapOptions = BitmapFactory.Options().also {
it.inJustDecodeBounds = true
}
BitmapFactory.decodeFile(imagePath, bitmapOptions)
val width = bitmapOptions.outWidth
val height = bitmapOptions.outHeight
return Size(width, height)
}
fun createTempFile(context: Context, extension: String): File =
File.createTempFile("mrousavy", extension, context.cacheDir).also {
it.deleteOnExit()

View File

@@ -11,6 +11,6 @@ inline fun withPromise(promise: Promise, closure: () -> Any?) {
} catch (e: Throwable) {
e.printStackTrace()
val error = if (e is CameraError) e else UnknownCameraError(e)
promise.reject("${error.domain}/${error.id}", error.message ?: "Unknown error", error.cause)
promise.reject("${error.domain}/${error.id}", error.message, error.cause)
}
}

View File

@@ -17,8 +17,8 @@
"@react-native-community/blur": "^4.3.2",
"@react-navigation/native": "^6.1.7",
"@react-navigation/native-stack": "^6.9.13",
"react": "^19.1.1",
"react-native": "^0.81.0",
"react": "^18.2.0",
"react-native": "^0.72.3",
"react-native-fast-image": "^8.6.3",
"react-native-gesture-handler": "^2.12.1",
"react-native-mmkv": "^2.10.2",
@@ -42,7 +42,7 @@
"@types/react-native-vector-icons": "^6.4.13",
"@types/react-native-video": "^5.0.15",
"babel-plugin-module-resolver": "^5.0.0",
"eslint": "^9.33.0",
"eslint": "^8.46.0",
"eslint-plugin-prettier": "^5.0.0",
"metro-react-native-babel-preset": "^0.77.0",
"prettier": "^3.2.4",

View File

@@ -47,7 +47,7 @@ export const RecordingButton: React.FC<RecordingButtonProps> = ({ style, camera,
onMediaCaptured(video, 'video')
onStoppedRecording()
},
}, 'video.mp4')
})
console.log('called startRecording()!')
isRecording.current = true
setRecordingState(true)

File diff suppressed because it is too large Load Diff

189
package/flake.lock generated Normal file
View File

@@ -0,0 +1,189 @@
{
"nodes": {
"android-nixpkgs": {
"inputs": {
"devshell": "devshell",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1701980274,
"narHash": "sha256-uKJIFvsahbWw52TsIht7g2iosXBgJDRMSMoCE1fvEAk=",
"owner": "tadfisher",
"repo": "android-nixpkgs",
"rev": "bce9d437ed54ee1425b66442a12814fee4cdbd51",
"type": "github"
},
"original": {
"owner": "tadfisher",
"repo": "android-nixpkgs",
"type": "github"
}
},
"devshell": {
"inputs": {
"nixpkgs": [
"android-nixpkgs",
"nixpkgs"
],
"systems": "systems"
},
"locked": {
"lastModified": 1701787589,
"narHash": "sha256-ce+oQR4Zq9VOsLoh9bZT8Ip9PaMLcjjBUHVPzW5d7Cw=",
"owner": "numtide",
"repo": "devshell",
"rev": "44ddedcbcfc2d52a76b64fb6122f209881bd3e1e",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "devshell",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1694102001,
"narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1701718080,
"narHash": "sha256-6ovz0pG76dE0P170pmmZex1wWcQoeiomUZGggfH9XPs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2c7f3c0fb7c08a0814627611d9d7d45ab6d75335",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1702312524,
"narHash": "sha256-gkZJRDBUCpTPBvQk25G0B7vfbpEYM5s5OZqghkjZsnE=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a9bf124c46ef298113270b1f84a164865987a91c",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"android-nixpkgs": "android-nixpkgs",
"flake-utils": "flake-utils_2",
"gitignore": "gitignore",
"nixpkgs": "nixpkgs_2"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

77
package/flake.nix Normal file
View File

@@ -0,0 +1,77 @@
{
description = "Sample Nix ts-node build";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
gitignore = {
url = "github:hercules-ci/gitignore.nix";
inputs.nixpkgs.follows = "nixpkgs";
};
android-nixpkgs = {
url = "github:tadfisher/android-nixpkgs";
};
};
outputs = {
self,
nixpkgs,
flake-utils,
gitignore,
android-nixpkgs,
...
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
nodejs = pkgs.nodejs-18_x;
# NOTE: this does not work
appBuild = pkgs.stdenv.mkDerivation {
name = "example-ts-node";
version = "0.1.0";
src = gitignore.lib.gitignoreSource ./.; # uses the gitignore in the repo to only copy files git would see
buildInputs = [nodejs];
# https://nixos.org/manual/nixpkgs/stable/#sec-stdenv-phases
buildPhase = ''
# each phase has pre/postHooks. When you make your own phase be sure to still call the hooks
runHook preBuild
npm ci
npm run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
cp -r node_modules $out/node_modules
cp package.json $out/package.json
cp -r dist $out/dist
runHook postInstall
'';
};
android-sdk = android-nixpkgs.sdk.${system} (sdkPkgs:
with sdkPkgs; [
cmdline-tools-latest
build-tools-30-0-3
build-tools-33-0-0
build-tools-33-0-1
build-tools-34-0-0
platform-tools
platforms-android-33
platforms-android-34
emulator
ndk-23-1-7779620
cmake-3-22-1
system-images-android-33-google-apis-x86-64
system-images-android-34-google-apis-x86-64
]);
in
with pkgs; {
defaultPackage = appBuild;
devShell = mkShell {
buildInputs = [nodejs yarn watchman gradle_7 alejandra nodePackages.prettier ktlint kotlin-language-server];
ANDROID_SDK_BIN = android-sdk;
shellHook = ''
export JAVA_HOME=${pkgs.jdk17.home}
source ${android-sdk.out}/nix-support/setup-hook
export PATH=${android-sdk}/bin:$PATH
ORG_GRADLE_PROJECT_ANDROID_HOME="$ANDROID_HOME"
'';
};
});
}

View File

@@ -50,12 +50,4 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
func resumeRecording(promise: Promise) {
cameraSession.resumeRecording(promise: promise)
}
func lockExposure(promise: Promise) {
cameraSession.lockCurrentExposure(promise: promise)
}
func unlockExposure(promise: Promise) {
cameraSession.unlockCurrentExposure(promise: promise)
}
}

View File

@@ -86,13 +86,5 @@ RCT_EXTERN_METHOD(focus
: (NSDictionary*)point resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(lockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
RCT_EXTERN_METHOD(unlockCurrentExposure
: (nonnull NSNumber*)node resolve
: (RCTPromiseResolveBlock)resolve reject
: (RCTPromiseRejectBlock)reject);
@end

View File

@@ -111,18 +111,6 @@ final class CameraViewManager: RCTViewManager {
}
}
@objc
final func lockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.lockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func unlockCurrentExposure(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.unlockExposure(promise: Promise(resolver: resolve, rejecter: reject))
}
// MARK: Private
private func getCameraView(withTag tag: NSNumber) -> CameraView {

View File

@@ -191,68 +191,4 @@ extension CameraSession {
}
}
}
func lockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
guard captureDevice.isExposureModeSupported(.custom) else {
ReactLogger.log(level: .info, message: "Custom exposure mode not supported")
return
}
do {
// Lock the device for configuration
try captureDevice.lockForConfiguration()
// Get the current exposure duration and ISO
let currentExposureDuration = captureDevice.exposureDuration
let currentISO = captureDevice.iso
// Check if the device supports custom exposure settings
if captureDevice.isExposureModeSupported(.custom) {
// Lock the current exposure and ISO by setting custom exposure mode
captureDevice.setExposureModeCustom(duration: currentExposureDuration, iso: currentISO, completionHandler: nil)
ReactLogger.log(level: .info, message: "Exposure and ISO locked at current values")
} else {
ReactLogger.log(level: .info, message:"Custom exposure mode not supported")
}
// Unlock the device after configuration
captureDevice.unlockForConfiguration()
} catch {
ReactLogger.log(level: .warning, message:"Error locking exposure: \(error)")
}
return nil
}
}
}
func unlockCurrentExposure(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
print("No capture device available")
return
}
do {
if captureDevice.isExposureModeSupported(.autoExpose) {
try captureDevice.lockForConfiguration()
captureDevice.exposureMode = .continuousAutoExposure
captureDevice.unlockForConfiguration()
}
} catch {
ReactLogger.log(level: .warning, message:"Error unlocking exposure: \(error)")
}
return nil
}
}
}
}

View File

@@ -87,15 +87,16 @@
"devDependencies": {
"@expo/config-plugins": "^7.2.5",
"@jamesacarr/eslint-formatter-github-actions": "^0.2.0",
"@react-native/eslint-config": "^0.81.0",
"@react-native/typescript-config": "^0.81.0",
"@react-native/eslint-config": "^0.72.2",
"@react-native/typescript-config": "^0.74.0",
"@release-it/conventional-changelog": "^7.0.0",
"@types/react": "^18.2.19",
"@types/react-native": "^0.72.2",
"eslint": "^8.46.0",
"eslint-plugin-prettier": "^5.0.0",
"prettier": "^3.0.1",
"react": "^19.1.1",
"react-native": "^0.81.0",
"react": "^18.2.0",
"react-native": "^0.72.3",
"react-native-builder-bob": "^0.21.3",
"react-native-worklets-core": "^0.3.0",
"release-it": "^16.1.3",

View File

@@ -319,22 +319,6 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
throw tryParseNativeCameraError(e)
}
}
public async lockCurrentExposure(): Promise<void> {
try {
return await CameraModule.lockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
public async unlockCurrentExposure(): Promise<void> {
try {
return await CameraModule.unlockCurrentExposure(this.handle)
} catch (e) {
throw tryParseNativeCameraError(e)
}
}
//#endregion
//#region Static Functions (NativeModule)

File diff suppressed because it is too large Load Diff