Compare commits

..

No commits in common. "fb425458904eb240466768be08352973fd2f78d8" and "fb96d000bc44dc07f81bf2ad2496ad877355a783" have entirely different histories.

19 changed files with 161 additions and 711 deletions

View File

@ -39,7 +39,7 @@ jobs:
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache - name: Restore node_modules from cache
uses: actions/cache@v4 uses: actions/cache@v4
id: yarn-cache id: yarn-cache
@ -83,7 +83,7 @@ jobs:
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache - name: Restore node_modules from cache
uses: actions/cache@v4 uses: actions/cache@v4
id: yarn-cache id: yarn-cache

View File

@ -30,7 +30,7 @@ jobs:
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache - name: Restore node_modules from cache
uses: actions/cache@v4 uses: actions/cache@v4
id: yarn-cache id: yarn-cache
@ -54,9 +54,12 @@ jobs:
working-directory: package/example/ios working-directory: package/example/ios
- name: Restore Pods cache - name: Restore Pods cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: package/example/ios/Pods path: |
package/example/ios/Pods
~/Library/Caches/CocoaPods
~/.cocoapods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-pods- ${{ runner.os }}-pods-
@ -87,7 +90,7 @@ jobs:
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache - name: Restore node_modules from cache
uses: actions/cache@v4 uses: actions/cache@v4
id: yarn-cache id: yarn-cache
@ -113,9 +116,12 @@ jobs:
working-directory: package/example/ios working-directory: package/example/ios
- name: Restore Pods cache - name: Restore Pods cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: package/example/ios/Pods path: |
package/example/ios/Pods
~/Library/Caches/CocoaPods
~/.cocoapods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-pods- ${{ runner.os }}-pods-

View File

@ -43,7 +43,7 @@ jobs:
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache - name: Restore node_modules from cache
uses: actions/cache@v4 uses: actions/cache@v4
id: yarn-cache id: yarn-cache
@ -81,7 +81,7 @@ jobs:
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarn-cache-dir-path id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache - name: Restore node_modules from cache
uses: actions/cache@v4 uses: actions/cache@v4
id: yarn-cache id: yarn-cache

View File

@ -1,5 +0,0 @@
use flake . --impure
if [ -f .envrc.local ]; then
source .envrc.local
fi

View File

@ -15,8 +15,6 @@
#include <android/hardware_buffer.h> #include <android/hardware_buffer.h>
#include <android/hardware_buffer_jni.h> #include <android/hardware_buffer_jni.h>
#include "FinalAction.h"
namespace vision { namespace vision {
using namespace facebook; using namespace facebook;
@ -94,13 +92,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
jsi::HostFunctionType toArrayBuffer = JSI_FUNC { jsi::HostFunctionType toArrayBuffer = JSI_FUNC {
#if __ANDROID_API__ >= 26 #if __ANDROID_API__ >= 26
AHardwareBuffer* hardwareBuffer = this->frame->getHardwareBuffer(); AHardwareBuffer* hardwareBuffer = this->frame->getHardwareBuffer();
AHardwareBuffer_acquire(hardwareBuffer);
finally([&]() { AHardwareBuffer_release(hardwareBuffer); });
AHardwareBuffer_Desc bufferDescription; AHardwareBuffer_Desc bufferDescription;
AHardwareBuffer_describe(hardwareBuffer, &bufferDescription); AHardwareBuffer_describe(hardwareBuffer, &bufferDescription);
__android_log_print(ANDROID_LOG_INFO, "Frame", "Converting %i x %i @ %i HardwareBuffer...", bufferDescription.width, __android_log_print(ANDROID_LOG_INFO, "Frame", "Buffer %i x %i @ %i", bufferDescription.width, bufferDescription.height,
bufferDescription.height, bufferDescription.stride); bufferDescription.stride);
size_t size = bufferDescription.height * bufferDescription.stride; size_t size = bufferDescription.height * bufferDescription.stride;
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache"; static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
@ -122,21 +118,16 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
// Get CPU access to the HardwareBuffer (&buffer is a virtual temporary address) // Get CPU access to the HardwareBuffer (&buffer is a virtual temporary address)
void* buffer; void* buffer;
int result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_MASK, -1, nullptr, &buffer); AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_MASK, -1, nullptr, &buffer);
if (result != 0) {
throw jsi::JSError(runtime, "Failed to lock HardwareBuffer for reading!");
}
finally([&]() {
int result = AHardwareBuffer_unlock(hardwareBuffer, nullptr);
if (result != 0) {
throw jsi::JSError(runtime, "Failed to lock HardwareBuffer for reading!");
}
});
// directly write to C++ JSI ArrayBuffer // directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime); auto destinationBuffer = arrayBuffer.data(runtime);
memcpy(destinationBuffer, buffer, sizeof(uint8_t) * size); memcpy(destinationBuffer, buffer, sizeof(uint8_t) * size);
// Release HardwareBuffer again
AHardwareBuffer_unlock(hardwareBuffer, nullptr);
AHardwareBuffer_release(hardwareBuffer);
return arrayBuffer; return arrayBuffer;
#else #else
throw jsi::JSError(runtime, "Frame.toArrayBuffer() is only available if minSdkVersion is set to 26 or higher!"); throw jsi::JSError(runtime, "Frame.toArrayBuffer() is only available if minSdkVersion is set to 26 or higher!");

View File

@ -1,166 +0,0 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) :
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
fun fromParams(
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
iFrameInterval: Int = 3
): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val orientationDegrees = cameraOrientation.toDegrees()
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
// TODO: Pull this out into configuration
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.i(TAG, "Video Format: $format, orientation $cameraOrientation")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval)
}
}
// In flight details
private var currentFrameNumber: Int = 0
private var chunkIndex = -1
private var encodedFormat: MediaFormat? = null
private var recording = false;
private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface()
init {
if (!this.outputDirectory.exists()) {
this.outputDirectory.mkdirs()
}
encoder.setCallback(this)
}
// Muxer specific
private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) {
val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs
init {
muxer.start()
}
fun finish() {
muxer.stop()
muxer.release()
}
}
private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.finish()
chunkIndex++
val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName)
Log.i(TAG, "Creating new muxer for file: $newFileName")
val muxer = MediaMuxer(
newOutputFile.absolutePath,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, bufferInfo.presentationTimeUs, this.encodedFormat!!
)
}
private fun atKeyframe(bufferInfo: BufferInfo): Boolean {
return (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
}
private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs
}
fun start() {
encoder.start()
recording = true
}
fun finish() {
synchronized(this) {
muxerContext?.finish()
recording = false
muxerContext = null
encoder.stop()
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: MediaCodec.BufferInfo) {
synchronized(this) {
if (!recording) {
return
}
val encodedData: ByteBuffer = encoder.getOutputBuffer(index)
?: throw RuntimeException("getOutputBuffer was null")
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo)
}
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
// Implement error handling
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
encodedFormat = format
}
}

View File

@ -1,6 +1,9 @@
package com.mrousavy.camera.core package com.mrousavy.camera.core
import android.content.Context import android.content.Context
import android.media.MediaCodec
import android.media.MediaRecorder
import android.os.Build
import android.util.Log import android.util.Log
import android.util.Size import android.util.Size
import android.view.Surface import android.view.Surface
@ -10,10 +13,7 @@ import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.utils.FileUtils import com.mrousavy.camera.utils.FileUtils
import java.io.File import java.io.File
import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
class RecordingSession( class RecordingSession(
context: Context, context: Context,
val cameraId: String, val cameraId: String,
@ -21,7 +21,7 @@ class RecordingSession(
private val enableAudio: Boolean, private val enableAudio: Boolean,
private val fps: Int? = null, private val fps: Int? = null,
private val hdr: Boolean = false, private val hdr: Boolean = false,
private val cameraOrientation: Orientation, private val orientation: Orientation,
private val options: RecordVideoOptions, private val options: RecordVideoOptions,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit private val onError: (error: CameraError) -> Unit
@ -36,34 +36,69 @@ class RecordingSession(
data class Video(val path: String, val durationMs: Long, val size: Size) data class Video(val path: String, val durationMs: Long, val size: Size)
private val outputPath = run { private val bitRate = getBitRate()
val videoDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES) private val recorder: MediaRecorder
val sdf = SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US) private val outputFile: File
val videoFileName = "VID_${sdf.format(Date())}" private var startTime: Long? = null
File(videoDir!!, videoFileName) val surface: Surface = MediaCodec.createPersistentInputSurface()
// TODO: Implement HDR
init {
outputFile = FileUtils.createTempFile(context, options.fileType.toExtension())
Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}")
recorder = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) MediaRecorder(context) else MediaRecorder()
if (enableAudio) recorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER)
recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE)
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
recorder.setOutputFile(outputFile.absolutePath)
recorder.setVideoEncodingBitRate(bitRate)
recorder.setVideoSize(size.height, size.width)
recorder.setMaxFileSize(getMaxFileSize())
if (fps != null) recorder.setVideoFrameRate(fps)
Log.i(TAG, "Using ${options.videoCodec} Video Codec at ${bitRate / 1_000_000.0} Mbps..")
recorder.setVideoEncoder(options.videoCodec.toVideoEncoder())
if (enableAudio) {
Log.i(TAG, "Adding Audio Channel..")
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC)
recorder.setAudioEncodingBitRate(AUDIO_BIT_RATE)
recorder.setAudioSamplingRate(AUDIO_SAMPLING_RATE)
recorder.setAudioChannels(AUDIO_CHANNELS)
}
recorder.setInputSurface(surface)
// recorder.setOrientationHint(orientation.toDegrees())
recorder.setOnErrorListener { _, what, extra ->
Log.e(TAG, "MediaRecorder Error: $what ($extra)")
stop()
val name = when (what) {
MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN -> "unknown"
MediaRecorder.MEDIA_ERROR_SERVER_DIED -> "server-died"
else -> "unknown"
}
onError(RecorderError(name, extra))
}
recorder.setOnInfoListener { _, what, extra ->
Log.i(TAG, "MediaRecorder Info: $what ($extra)")
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
onError(InsufficientStorageError())
}
} }
private val bitRate = getBitRate() Log.i(TAG, "Created $this!")
private val recorder = ChunkedRecordingManager.fromParams(
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
private var startTime: Long? = null
val surface: Surface
get() {
return recorder.surface
} }
fun start() { fun start() {
synchronized(this) { synchronized(this) {
Log.i(TAG, "Starting RecordingSession..") Log.i(TAG, "Starting RecordingSession..")
startTime = System.currentTimeMillis() recorder.prepare()
recorder.start() recorder.start()
startTime = System.currentTimeMillis()
} }
} }
@ -71,29 +106,29 @@ class RecordingSession(
synchronized(this) { synchronized(this) {
Log.i(TAG, "Stopping RecordingSession..") Log.i(TAG, "Stopping RecordingSession..")
try { try {
recorder.finish() recorder.stop()
recorder.release()
} catch (e: Error) { } catch (e: Error) {
Log.e(TAG, "Failed to stop MediaRecorder!", e) Log.e(TAG, "Failed to stop MediaRecorder!", e)
} }
val stopTime = System.currentTimeMillis() val stopTime = System.currentTimeMillis()
val durationMs = stopTime - (startTime ?: stopTime) val durationMs = stopTime - (startTime ?: stopTime)
Log.i(TAG, "Finished recording video at $outputPath") callback(Video(outputFile.absolutePath, durationMs, size))
callback(Video(outputPath.absolutePath, durationMs, size))
} }
} }
fun pause() { fun pause() {
synchronized(this) { synchronized(this) {
Log.i(TAG, "Pausing Recording Session..") Log.i(TAG, "Pausing Recording Session..")
// TODO: Implement pausing recorder.pause()
} }
} }
fun resume() { fun resume() {
synchronized(this) { synchronized(this) {
Log.i(TAG, "Resuming Recording Session..") Log.i(TAG, "Resuming Recording Session..")
// TODO: Implement pausing recorder.resume()
} }
} }
@ -124,9 +159,6 @@ class RecordingSession(
override fun toString(): String { override fun toString(): String {
val audio = if (enableAudio) "with audio" else "without audio" val audio = if (enableAudio) "with audio" else "without audio"
return "${size.width} x ${size.height} @ $fps FPS ${options.videoCodec} ${options.fileType} " + return "${size.width} x ${size.height} @ $fps FPS ${options.videoCodec} ${options.fileType} " +
"$cameraOrientation ${bitRate / 1_000_000.0} Mbps RecordingSession ($audio)" "$orientation ${bitRate / 1_000_000.0} Mbps RecordingSession ($audio)"
}
fun onFrame() {
} }
} }

View File

@ -9,7 +9,6 @@ import android.os.Build
import android.util.Log import android.util.Log
import android.view.Surface import android.view.Surface
import androidx.annotation.Keep import androidx.annotation.Keep
import androidx.annotation.RequiresApi
import com.facebook.jni.HybridData import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip import com.facebook.proguard.annotations.DoNotStrip
import com.mrousavy.camera.frameprocessor.Frame import com.mrousavy.camera.frameprocessor.Frame
@ -32,7 +31,7 @@ class VideoPipeline(
val height: Int, val height: Int,
val format: PixelFormat = PixelFormat.NATIVE, val format: PixelFormat = PixelFormat.NATIVE,
private val isMirrored: Boolean = false, private val isMirrored: Boolean = false,
private val enableFrameProcessor: Boolean = false, enableFrameProcessor: Boolean = false,
private val callback: CameraSession.Callback private val callback: CameraSession.Callback
) : SurfaceTexture.OnFrameAvailableListener, ) : SurfaceTexture.OnFrameAvailableListener,
Closeable { Closeable {
@ -81,9 +80,10 @@ class VideoPipeline(
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i(TAG, "Using API 29 for GPU ImageReader...") Log.i(TAG, "Using API 29 for GPU ImageReader...")
val usageFlags = getRecommendedHardwareBufferFlags() // If we are in PRIVATE, we just pass it to the GPU as efficiently as possible - so use GPU flag.
Log.i(TAG, "Using ImageReader flags: $usageFlags") // If we are in YUV/RGB/..., we probably want to access Frame data - so use CPU flag.
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags) val usage = if (format == ImageFormat.PRIVATE) HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE else HardwareBuffer.USAGE_CPU_READ_OFTEN
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usage)
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format) imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format)
} else { } else {
Log.i(TAG, "Using legacy API for CPU ImageReader...") Log.i(TAG, "Using legacy API for CPU ImageReader...")
@ -91,7 +91,7 @@ class VideoPipeline(
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES) imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES)
} }
imageReader!!.setOnImageAvailableListener({ reader -> imageReader!!.setOnImageAvailableListener({ reader ->
// Log.i(TAG, "ImageReader::onImageAvailable!")s Log.i(TAG, "ImageReader::onImageAvailable!")
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored // TODO: Get correct orientation and isMirrored
@ -103,7 +103,7 @@ class VideoPipeline(
if (hasOutputs) { if (hasOutputs) {
// If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline // If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline
imageWriter?.queueInputImage(image) imageWriter!!.queueInputImage(image)
} }
} catch (e: Throwable) { } catch (e: Throwable) {
Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e) Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e)
@ -152,9 +152,6 @@ class VideoPipeline(
// 5. Draw it with applied rotation/mirroring // 5. Draw it with applied rotation/mirroring
onFrame(transformMatrix) onFrame(transformMatrix)
// 6. Notify the recording session.
recordingSession?.onFrame()
} }
} }
@ -185,52 +182,6 @@ class VideoPipeline(
} }
} }
/**
* Get the recommended HardwareBuffer flags for creating ImageReader instances with.
*
* Tries to use [HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE] if possible, [HardwareBuffer.USAGE_CPU_READ_OFTEN]
* or a combination of both flags if CPU access is needed ([enableFrameProcessor]), and [0] otherwise.
*/
@RequiresApi(Build.VERSION_CODES.Q)
@Suppress("LiftReturnOrAssignment")
private fun getRecommendedHardwareBufferFlags(): Long {
val cpuFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN
val gpuFlag = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE
val bothFlags = gpuFlag or cpuFlag
if (format == PixelFormat.NATIVE) {
// We don't need CPU access, so we can use GPU optimized buffers
if (supportsHardwareBufferFlags(gpuFlag)) {
// We support GPU Buffers directly and
Log.i(TAG, "GPU HardwareBuffers are supported!")
return gpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
} else {
// We are using YUV or RGB formats, so we need CPU access on the Frame
if (supportsHardwareBufferFlags(bothFlags)) {
// We support both CPU and GPU flags!
Log.i(TAG, "GPU + CPU HardwareBuffers are supported!")
return bothFlags
} else if (supportsHardwareBufferFlags(cpuFlag)) {
// We only support a CPU read flag, that's fine
Log.i(TAG, "CPU HardwareBuffers are supported!")
return cpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
}
}
@RequiresApi(Build.VERSION_CODES.Q)
private fun supportsHardwareBufferFlags(flags: Long): Boolean {
val hardwareBufferFormat = format.toHardwareBufferFormat()
return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags)
}
private external fun getInputTextureId(): Int private external fun getInputTextureId(): Int
private external fun onBeforeFrame() private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray) private external fun onFrame(transformMatrix: FloatArray)

View File

@ -8,14 +8,6 @@ enum class Orientation(override val unionValue: String) : JSUnionValue {
PORTRAIT_UPSIDE_DOWN("portrait-upside-down"), PORTRAIT_UPSIDE_DOWN("portrait-upside-down"),
LANDSCAPE_LEFT("landscape-left"); LANDSCAPE_LEFT("landscape-left");
fun isLandscape(): Boolean =
when (this) {
PORTRAIT -> false
PORTRAIT_UPSIDE_DOWN -> false
LANDSCAPE_LEFT -> true
LANDSCAPE_RIGHT -> true
}
fun toDegrees(): Int = fun toDegrees(): Int =
when (this) { when (this) {
PORTRAIT -> 0 PORTRAIT -> 0

View File

@ -4,7 +4,6 @@ import android.graphics.ImageFormat
import android.util.Log import android.util.Log
import com.mrousavy.camera.core.InvalidTypeScriptUnionError import com.mrousavy.camera.core.InvalidTypeScriptUnionError
import com.mrousavy.camera.core.PixelFormatNotSupportedError import com.mrousavy.camera.core.PixelFormatNotSupportedError
import com.mrousavy.camera.utils.HardwareBufferUtils
import com.mrousavy.camera.utils.ImageFormatUtils import com.mrousavy.camera.utils.ImageFormatUtils
enum class PixelFormat(override val unionValue: String) : JSUnionValue { enum class PixelFormat(override val unionValue: String) : JSUnionValue {
@ -20,11 +19,6 @@ enum class PixelFormat(override val unionValue: String) : JSUnionValue {
else -> throw PixelFormatNotSupportedError(this.unionValue) else -> throw PixelFormatNotSupportedError(this.unionValue)
} }
fun toHardwareBufferFormat(): Int {
val imageFormat = toImageFormat()
return HardwareBufferUtils.getHardwareBufferFormat(imageFormat)
}
companion object : JSUnionValue.Companion<PixelFormat> { companion object : JSUnionValue.Companion<PixelFormat> {
private const val TAG = "PixelFormat" private const val TAG = "PixelFormat"
fun fromImageFormat(imageFormat: Int): PixelFormat = fun fromImageFormat(imageFormat: Int): PixelFormat =

View File

@ -1,6 +1,5 @@
package com.mrousavy.camera.types package com.mrousavy.camera.types
import android.media.MediaFormat
import android.media.MediaRecorder import android.media.MediaRecorder
enum class VideoCodec(override val unionValue: String) : JSUnionValue { enum class VideoCodec(override val unionValue: String) : JSUnionValue {
@ -13,12 +12,6 @@ enum class VideoCodec(override val unionValue: String) : JSUnionValue {
H265 -> MediaRecorder.VideoEncoder.HEVC H265 -> MediaRecorder.VideoEncoder.HEVC
} }
fun toMimeType(): String =
when (this) {
H264 -> MediaFormat.MIMETYPE_VIDEO_AVC
H265 -> MediaFormat.MIMETYPE_VIDEO_HEVC
}
companion object : JSUnionValue.Companion<VideoCodec> { companion object : JSUnionValue.Companion<VideoCodec> {
override fun fromUnionValue(unionValue: String?): VideoCodec = override fun fromUnionValue(unionValue: String?): VideoCodec =
when (unionValue) { when (unionValue) {

View File

@ -1,36 +0,0 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.hardware.HardwareBuffer
import android.media.ImageReader
import android.os.Build
class HardwareBufferUtils {
companion object {
fun getHardwareBufferFormat(imageFormat: Int): Int {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
// Dynamically create an ImageReader with the target ImageFormat, and then
// get it's HardwareBuffer format to see what it uses underneath.
val imageReader = ImageReader.newInstance(1, 1, imageFormat, 1, HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)
val format = imageReader.hardwareBufferFormat
imageReader.close()
return format
}
if (imageFormat == ImageFormat.PRIVATE) {
// PRIVATE formats are opaque, their actual equivalent HardwareBuffer format is unknown.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
// We can assume that YUV 4:2:0 or RGB is used.
return HardwareBuffer.YCBCR_420_888
} else {
// Maybe assume we are on RGB if we're not on API R or above...
return HardwareBuffer.RGB_888
}
}
// According to PublicFormat.cpp in Android's codebase, the formats map 1:1 anyways..
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/native/libs/ui/PublicFormat.cpp
return imageFormat
}
}
}

View File

@ -1,34 +0,0 @@
//
// MutableRawBuffer.h
// VisionCamera
//
// Created by Marc Rousavy on 17.01.24.
// Copyright © 2024 mrousavy. All rights reserved.
//
#pragma once
namespace vision {
template <typename F> struct FinalAction {
public:
FinalAction(F f) : clean_{f} {}
~FinalAction() {
if (enabled_)
clean_();
}
void disable() {
enabled_ = false;
};
private:
F clean_;
bool enabled_ = true;
};
} // namespace vision
template <typename F> vision::FinalAction<F> finally(F f) {
return vision::FinalAction<F>(std::move(f));
}

View File

@ -10,7 +10,7 @@
# Specifies the JVM arguments used for the daemon process. # Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings. # The setting is particularly useful for tweaking memory settings.
# Default value: -Xmx10248m -XX:MaxPermSize=256m # Default value: -Xmx10248m -XX:MaxPermSize=256m
org.gradle.jvmargs=-Xms512M -Xmx4g -XX:MaxMetaspaceSize=1g -Dkotlin.daemon.jvm.options="-Xmx1g" org.gradle.jvmargs=-Xms512M -Xmx4g -XX:MaxPermSize=1024m -XX:MaxMetaspaceSize=1g -Dkotlin.daemon.jvm.options="-Xmx1g"
org.gradle.parallel=true org.gradle.parallel=true
org.gradle.daemon=true org.gradle.daemon=true
org.gradle.configureondemand=true org.gradle.configureondemand=true
@ -43,5 +43,3 @@ hermesEnabled=true
#VisionCamera_disableFrameProcessors=true #VisionCamera_disableFrameProcessors=true
# Can be set to true to include the full 2.4 MB MLKit dependency # Can be set to true to include the full 2.4 MB MLKit dependency
VisionCamera_enableCodeScanner=true VisionCamera_enableCodeScanner=true
android.aapt2FromMavenOverride=/nix/store/6nrdbhdcmrig3vr80sc7qf9lna5cs1gb-android-sdk-env/share/android-sdk/build-tools/33.0.0/aapt2

View File

@ -27,9 +27,9 @@ PODS:
- libwebp/sharpyuv (1.3.2) - libwebp/sharpyuv (1.3.2)
- libwebp/webp (1.3.2): - libwebp/webp (1.3.2):
- libwebp/sharpyuv - libwebp/sharpyuv
- MMKV (1.3.3): - MMKV (1.3.2):
- MMKVCore (~> 1.3.3) - MMKVCore (~> 1.3.2)
- MMKVCore (1.3.3) - MMKVCore (1.3.2)
- RCT-Folly (2021.07.22.00): - RCT-Folly (2021.07.22.00):
- boost - boost
- DoubleConversion - DoubleConversion
@ -337,7 +337,7 @@ PODS:
- react-native-mmkv (2.11.0): - react-native-mmkv (2.11.0):
- MMKV (>= 1.2.13) - MMKV (>= 1.2.13)
- React-Core - React-Core
- react-native-safe-area-context (4.8.2): - react-native-safe-area-context (4.8.0):
- React-Core - React-Core
- react-native-video (5.2.1): - react-native-video (5.2.1):
- React-Core - React-Core
@ -675,8 +675,8 @@ SPEC CHECKSUMS:
hermes-engine: 9180d43df05c1ed658a87cc733dc3044cf90c00a hermes-engine: 9180d43df05c1ed658a87cc733dc3044cf90c00a
libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913 libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913
libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009 libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
MMKV: f902fb6719da13c2ab0965233d8963a59416f911 MMKV: f21593c0af4b3f2a0ceb8f820f28bb639ea22bb7
MMKVCore: d26e4d3edd5cb8588c2569222cbd8be4231374e9 MMKVCore: 31b4cb83f8266467eef20a35b6d78e409a11060d
RCT-Folly: 424b8c9a7a0b9ab2886ffe9c3b041ef628fd4fb1 RCT-Folly: 424b8c9a7a0b9ab2886ffe9c3b041ef628fd4fb1
RCTRequired: 83bca1c184feb4d2e51c72c8369b83d641443f95 RCTRequired: 83bca1c184feb4d2e51c72c8369b83d641443f95
RCTTypeSafety: 13c4a87a16d7db6cd66006ce9759f073402ef85b RCTTypeSafety: 13c4a87a16d7db6cd66006ce9759f073402ef85b
@ -695,7 +695,7 @@ SPEC CHECKSUMS:
react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2 react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2
react-native-cameraroll: 4701ae7c3dbcd3f5e9e150ca17f250a276154b35 react-native-cameraroll: 4701ae7c3dbcd3f5e9e150ca17f250a276154b35
react-native-mmkv: e97c0c79403fb94577e5d902ab1ebd42b0715b43 react-native-mmkv: e97c0c79403fb94577e5d902ab1ebd42b0715b43
react-native-safe-area-context: 0ee144a6170530ccc37a0fd9388e28d06f516a89 react-native-safe-area-context: d1c8161a1e9560f7066e8926a7d825eb57c5dab5
react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253 react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253
react-native-worklets-core: a894d572639fcf37c6d284cc799882d25d00c93d react-native-worklets-core: a894d572639fcf37c6d284cc799882d25d00c93d
React-NativeModulesApple: b6868ee904013a7923128892ee4a032498a1024a React-NativeModulesApple: b6868ee904013a7923128892ee4a032498a1024a
@ -729,4 +729,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
COCOAPODS: 1.11.3 COCOAPODS: 1.14.3

View File

@ -160,9 +160,9 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
const frameProcessor = useFrameProcessor((frame) => { const frameProcessor = useFrameProcessor((frame) => {
'worklet' 'worklet'
// console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`) console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
// examplePlugin(frame) examplePlugin(frame)
// exampleKotlinSwiftPlugin(frame) exampleKotlinSwiftPlugin(frame)
}, []) }, [])
return ( return (

View File

@ -74,7 +74,7 @@ export function MediaPage({ navigation, route }: Props): React.ReactElement {
} }
}, [path, type]) }, [path, type])
const source = useMemo(() => ({ uri: `file://${path}/1.mp4` }), [path]) const source = useMemo(() => ({ uri: `file://${path}` }), [path])
const screenStyle = useMemo(() => ({ opacity: hasMediaLoaded ? 1 : 0 }), [hasMediaLoaded]) const screenStyle = useMemo(() => ({ opacity: hasMediaLoaded ? 1 : 0 }), [hasMediaLoaded])

View File

@ -1,189 +0,0 @@
{
"nodes": {
"android-nixpkgs": {
"inputs": {
"devshell": "devshell",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1701980274,
"narHash": "sha256-uKJIFvsahbWw52TsIht7g2iosXBgJDRMSMoCE1fvEAk=",
"owner": "tadfisher",
"repo": "android-nixpkgs",
"rev": "bce9d437ed54ee1425b66442a12814fee4cdbd51",
"type": "github"
},
"original": {
"owner": "tadfisher",
"repo": "android-nixpkgs",
"type": "github"
}
},
"devshell": {
"inputs": {
"nixpkgs": [
"android-nixpkgs",
"nixpkgs"
],
"systems": "systems"
},
"locked": {
"lastModified": 1701787589,
"narHash": "sha256-ce+oQR4Zq9VOsLoh9bZT8Ip9PaMLcjjBUHVPzW5d7Cw=",
"owner": "numtide",
"repo": "devshell",
"rev": "44ddedcbcfc2d52a76b64fb6122f209881bd3e1e",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "devshell",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1694102001,
"narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1701718080,
"narHash": "sha256-6ovz0pG76dE0P170pmmZex1wWcQoeiomUZGggfH9XPs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2c7f3c0fb7c08a0814627611d9d7d45ab6d75335",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1702312524,
"narHash": "sha256-gkZJRDBUCpTPBvQk25G0B7vfbpEYM5s5OZqghkjZsnE=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a9bf124c46ef298113270b1f84a164865987a91c",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"android-nixpkgs": "android-nixpkgs",
"flake-utils": "flake-utils_2",
"gitignore": "gitignore",
"nixpkgs": "nixpkgs_2"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View File

@ -1,77 +0,0 @@
{
description = "Sample Nix ts-node build";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
gitignore = {
url = "github:hercules-ci/gitignore.nix";
inputs.nixpkgs.follows = "nixpkgs";
};
android-nixpkgs = {
url = "github:tadfisher/android-nixpkgs";
};
};
outputs = {
self,
nixpkgs,
flake-utils,
gitignore,
android-nixpkgs,
...
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
nodejs = pkgs.nodejs-18_x;
# NOTE: this does not work
appBuild = pkgs.stdenv.mkDerivation {
name = "example-ts-node";
version = "0.1.0";
src = gitignore.lib.gitignoreSource ./.; # uses the gitignore in the repo to only copy files git would see
buildInputs = [nodejs];
# https://nixos.org/manual/nixpkgs/stable/#sec-stdenv-phases
buildPhase = ''
# each phase has pre/postHooks. When you make your own phase be sure to still call the hooks
runHook preBuild
npm ci
npm run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
cp -r node_modules $out/node_modules
cp package.json $out/package.json
cp -r dist $out/dist
runHook postInstall
'';
};
android-sdk = android-nixpkgs.sdk.${system} (sdkPkgs:
with sdkPkgs; [
cmdline-tools-latest
build-tools-30-0-3
build-tools-33-0-0
build-tools-33-0-1
build-tools-34-0-0
platform-tools
platforms-android-33
platforms-android-34
emulator
ndk-23-1-7779620
cmake-3-22-1
system-images-android-33-google-apis-x86-64
system-images-android-34-google-apis-x86-64
]);
in
with pkgs; {
defaultPackage = appBuild;
devShell = mkShell {
buildInputs = [nodejs yarn watchman gradle_7 alejandra nodePackages.prettier ktlint kotlin-language-server];
ANDROID_SDK_BIN = android-sdk;
shellHook = ''
export JAVA_HOME=${pkgs.jdk17.home}
source ${android-sdk.out}/nix-support/setup-hook
export PATH=${android-sdk}/bin:$PATH
ORG_GRADLE_PROJECT_ANDROID_HOME="$ANDROID_HOME"
'';
};
});
}