Compare commits

...

10 Commits

Author SHA1 Message Date
fb42545890 some camera orientation stuff, maybe toss? 2024-01-31 16:50:36 -07:00
e82f068b21 Add isLandscape to orientation 2024-01-31 16:50:36 -07:00
08f37070a4 Simplify ChunkedRecorder 2024-01-31 16:50:36 -07:00
d95057fa47 Initial chunked recording implementation 2024-01-31 16:50:34 -07:00
999e789eee Get flake working 2024-01-31 16:44:01 -07:00
Marc Rousavy
5f339c60c6
chore: Update Podfile.lock 2024-01-31 21:09:42 +01:00
Marc Rousavy
e399df5d1b
chore: Fix CI cache directory (#2492) 2024-01-31 20:48:10 +01:00
Marc Rousavy
ea568855a7
chore: Fix GitHub Action CocoaPods cache misses (#2491)
* chore: Fix GitHub Action CocoaPods cache misses

* Only use Pods directory
2024-01-31 20:48:05 +01:00
Marc Rousavy
ae75e22fc0
fix: Fix crash in toArrayBuffer() by properly acquiring a reference on AHardwareBuffer* (#2490)
* fix: Fix crash in `toArrayBuffer()` by properly acquiring a reference on `AHardwareBuffer*`

* Format

* Update Podfile.lock
2024-01-31 20:32:02 +01:00
Marc Rousavy
f896831d4a
fix: Properly check HardwareBuffer usage flags before setting them (#2488)
* fix: Properly check `HardwareBuffer` usage flags before setting them

* fix: Use GPU flag if pixel format is NATIVE

* Update VideoPipeline.kt

* Add some logs

* fix: Properly convert ImageFormat to HardwareBufferFormat

* Update Podfile.lock

* fix: Add a safe `getHardwareBufferFormat` method

* Format
2024-01-31 20:31:56 +01:00
19 changed files with 713 additions and 163 deletions

View File

@ -39,7 +39,7 @@ jobs:
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
@ -83,7 +83,7 @@ jobs:
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache

View File

@ -30,7 +30,7 @@ jobs:
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
@ -54,12 +54,9 @@ jobs:
working-directory: package/example/ios
- name: Restore Pods cache
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: |
package/example/ios/Pods
~/Library/Caches/CocoaPods
~/.cocoapods
path: package/example/ios/Pods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: |
${{ runner.os }}-pods-
@ -90,7 +87,7 @@ jobs:
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
@ -116,12 +113,9 @@ jobs:
working-directory: package/example/ios
- name: Restore Pods cache
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: |
package/example/ios/Pods
~/Library/Caches/CocoaPods
~/.cocoapods
path: package/example/ios/Pods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: |
${{ runner.os }}-pods-

View File

@ -36,39 +36,39 @@ jobs:
run:
working-directory: ./package
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: Install reviewdog
uses: reviewdog/action-setup@v1
- name: Install reviewdog
uses: reviewdog/action-setup@v1
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install node_modules
run: yarn install --frozen-lockfile
- name: Install node_modules (example/)
run: yarn install --frozen-lockfile --cwd example
- name: Install node_modules
run: yarn install --frozen-lockfile
- name: Install node_modules (example/)
run: yarn install --frozen-lockfile --cwd example
- name: Run TypeScript # Reviewdog tsc errorformat: %f:%l:%c - error TS%n: %m
run: |
yarn typescript | reviewdog -name="tsc" -efm="%f(%l,%c): error TS%n: %m" -reporter="github-pr-review" -filter-mode="nofilter" -fail-on-error -tee
env:
REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run TypeScript # Reviewdog tsc errorformat: %f:%l:%c - error TS%n: %m
run: |
yarn typescript | reviewdog -name="tsc" -efm="%f(%l,%c): error TS%n: %m" -reporter="github-pr-review" -filter-mode="nofilter" -fail-on-error -tee
env:
REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run TypeScript in example/ # Reviewdog tsc errorformat: %f:%l:%c - error TS%n: %m
run: |
cd example && yarn typescript | reviewdog -name="tsc" -efm="%f(%l,%c): error TS%n: %m" -reporter="github-pr-review" -filter-mode="nofilter" -fail-on-error -tee && cd ..
env:
REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run TypeScript in example/ # Reviewdog tsc errorformat: %f:%l:%c - error TS%n: %m
run: |
cd example && yarn typescript | reviewdog -name="tsc" -efm="%f(%l,%c): error TS%n: %m" -reporter="github-pr-review" -filter-mode="nofilter" -fail-on-error -tee && cd ..
env:
REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
lint:
name: Lint JS (eslint, prettier)
@ -77,36 +77,36 @@ jobs:
run:
working-directory: ./package
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v4
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Restore node_modules from cache
uses: actions/cache@v4
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install node_modules
run: yarn install --frozen-lockfile
- name: Install node_modules (example/)
run: yarn install --frozen-lockfile --cwd example
- name: Install node_modules
run: yarn install --frozen-lockfile
- name: Install node_modules (example/)
run: yarn install --frozen-lockfile --cwd example
- name: Run ESLint
run: yarn lint -f @jamesacarr/github-actions
- name: Run ESLint
run: yarn lint -f @jamesacarr/github-actions
- name: Run ESLint with auto-fix
run: yarn lint --fix
- name: Run ESLint with auto-fix
run: yarn lint --fix
- name: Run ESLint in example/
run: cd example && yarn lint -f @jamesacarr/github-actions && cd ..
- name: Run ESLint in example/
run: cd example && yarn lint -f @jamesacarr/github-actions && cd ..
- name: Run ESLint in example/ with auto-fix
run: cd example && yarn lint --fix && cd ..
- name: Run ESLint in example/ with auto-fix
run: cd example && yarn lint --fix && cd ..
- name: Verify no files have changed after auto-fix
run: git diff --exit-code HEAD
- name: Verify no files have changed after auto-fix
run: git diff --exit-code HEAD

5
package/.envrc Normal file
View File

@ -0,0 +1,5 @@
use flake . --impure
if [ -f .envrc.local ]; then
source .envrc.local
fi

View File

@ -15,6 +15,8 @@
#include <android/hardware_buffer.h>
#include <android/hardware_buffer_jni.h>
#include "FinalAction.h"
namespace vision {
using namespace facebook;
@ -92,11 +94,13 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
jsi::HostFunctionType toArrayBuffer = JSI_FUNC {
#if __ANDROID_API__ >= 26
AHardwareBuffer* hardwareBuffer = this->frame->getHardwareBuffer();
AHardwareBuffer_acquire(hardwareBuffer);
finally([&]() { AHardwareBuffer_release(hardwareBuffer); });
AHardwareBuffer_Desc bufferDescription;
AHardwareBuffer_describe(hardwareBuffer, &bufferDescription);
__android_log_print(ANDROID_LOG_INFO, "Frame", "Buffer %i x %i @ %i", bufferDescription.width, bufferDescription.height,
bufferDescription.stride);
__android_log_print(ANDROID_LOG_INFO, "Frame", "Converting %i x %i @ %i HardwareBuffer...", bufferDescription.width,
bufferDescription.height, bufferDescription.stride);
size_t size = bufferDescription.height * bufferDescription.stride;
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
@ -118,16 +122,21 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
// Get CPU access to the HardwareBuffer (&buffer is a virtual temporary address)
void* buffer;
AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_MASK, -1, nullptr, &buffer);
int result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_MASK, -1, nullptr, &buffer);
if (result != 0) {
throw jsi::JSError(runtime, "Failed to lock HardwareBuffer for reading!");
}
finally([&]() {
int result = AHardwareBuffer_unlock(hardwareBuffer, nullptr);
if (result != 0) {
throw jsi::JSError(runtime, "Failed to lock HardwareBuffer for reading!");
}
});
// directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime);
memcpy(destinationBuffer, buffer, sizeof(uint8_t) * size);
// Release HardwareBuffer again
AHardwareBuffer_unlock(hardwareBuffer, nullptr);
AHardwareBuffer_release(hardwareBuffer);
return arrayBuffer;
#else
throw jsi::JSError(runtime, "Frame.toArrayBuffer() is only available if minSdkVersion is set to 26 or higher!");

View File

@ -0,0 +1,166 @@
package com.mrousavy.camera.core
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.util.Log
import android.util.Size
import android.view.Surface
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.nio.ByteBuffer
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) :
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
fun fromParams(
size: Size,
enableAudio: Boolean,
fps: Int? = null,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
iFrameInterval: Int = 3
): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val orientationDegrees = cameraOrientation.toDegrees()
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
// TODO: Pull this out into configuration
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.i(TAG, "Video Format: $format, orientation $cameraOrientation")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval)
}
}
// In flight details
private var currentFrameNumber: Int = 0
private var chunkIndex = -1
private var encodedFormat: MediaFormat? = null
private var recording = false;
private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface()
init {
if (!this.outputDirectory.exists()) {
this.outputDirectory.mkdirs()
}
encoder.setCallback(this)
}
// Muxer specific
private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) {
val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs
init {
muxer.start()
}
fun finish() {
muxer.stop()
muxer.release()
}
}
private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.finish()
chunkIndex++
val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName)
Log.i(TAG, "Creating new muxer for file: $newFileName")
val muxer = MediaMuxer(
newOutputFile.absolutePath,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, bufferInfo.presentationTimeUs, this.encodedFormat!!
)
}
private fun atKeyframe(bufferInfo: BufferInfo): Boolean {
return (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
}
private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs
}
fun start() {
encoder.start()
recording = true
}
fun finish() {
synchronized(this) {
muxerContext?.finish()
recording = false
muxerContext = null
encoder.stop()
}
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: MediaCodec.BufferInfo) {
synchronized(this) {
if (!recording) {
return
}
val encodedData: ByteBuffer = encoder.getOutputBuffer(index)
?: throw RuntimeException("getOutputBuffer was null")
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo)
}
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
// Implement error handling
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
encodedFormat = format
}
}

View File

@ -1,9 +1,6 @@
package com.mrousavy.camera.core
import android.content.Context
import android.media.MediaCodec
import android.media.MediaRecorder
import android.os.Build
import android.util.Log
import android.util.Size
import android.view.Surface
@ -13,7 +10,10 @@ import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.utils.FileUtils
import java.io.File
import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
class RecordingSession(
context: Context,
val cameraId: String,
@ -21,7 +21,7 @@ class RecordingSession(
private val enableAudio: Boolean,
private val fps: Int? = null,
private val hdr: Boolean = false,
private val orientation: Orientation,
private val cameraOrientation: Orientation,
private val options: RecordVideoOptions,
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit
@ -36,69 +36,34 @@ class RecordingSession(
data class Video(val path: String, val durationMs: Long, val size: Size)
private val bitRate = getBitRate()
private val recorder: MediaRecorder
private val outputFile: File
private var startTime: Long? = null
val surface: Surface = MediaCodec.createPersistentInputSurface()
// TODO: Implement HDR
init {
outputFile = FileUtils.createTempFile(context, options.fileType.toExtension())
Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}")
recorder = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) MediaRecorder(context) else MediaRecorder()
if (enableAudio) recorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER)
recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE)
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
recorder.setOutputFile(outputFile.absolutePath)
recorder.setVideoEncodingBitRate(bitRate)
recorder.setVideoSize(size.height, size.width)
recorder.setMaxFileSize(getMaxFileSize())
if (fps != null) recorder.setVideoFrameRate(fps)
Log.i(TAG, "Using ${options.videoCodec} Video Codec at ${bitRate / 1_000_000.0} Mbps..")
recorder.setVideoEncoder(options.videoCodec.toVideoEncoder())
if (enableAudio) {
Log.i(TAG, "Adding Audio Channel..")
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC)
recorder.setAudioEncodingBitRate(AUDIO_BIT_RATE)
recorder.setAudioSamplingRate(AUDIO_SAMPLING_RATE)
recorder.setAudioChannels(AUDIO_CHANNELS)
}
recorder.setInputSurface(surface)
// recorder.setOrientationHint(orientation.toDegrees())
recorder.setOnErrorListener { _, what, extra ->
Log.e(TAG, "MediaRecorder Error: $what ($extra)")
stop()
val name = when (what) {
MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN -> "unknown"
MediaRecorder.MEDIA_ERROR_SERVER_DIED -> "server-died"
else -> "unknown"
}
onError(RecorderError(name, extra))
}
recorder.setOnInfoListener { _, what, extra ->
Log.i(TAG, "MediaRecorder Info: $what ($extra)")
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
onError(InsufficientStorageError())
}
}
Log.i(TAG, "Created $this!")
private val outputPath = run {
val videoDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES)
val sdf = SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US)
val videoFileName = "VID_${sdf.format(Date())}"
File(videoDir!!, videoFileName)
}
private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams(
size,
enableAudio,
fps,
cameraOrientation,
bitRate,
options,
outputPath
)
private var startTime: Long? = null
val surface: Surface
get() {
return recorder.surface
}
fun start() {
synchronized(this) {
Log.i(TAG, "Starting RecordingSession..")
recorder.prepare()
recorder.start()
startTime = System.currentTimeMillis()
recorder.start()
}
}
@ -106,29 +71,29 @@ class RecordingSession(
synchronized(this) {
Log.i(TAG, "Stopping RecordingSession..")
try {
recorder.stop()
recorder.release()
recorder.finish()
} catch (e: Error) {
Log.e(TAG, "Failed to stop MediaRecorder!", e)
}
val stopTime = System.currentTimeMillis()
val durationMs = stopTime - (startTime ?: stopTime)
callback(Video(outputFile.absolutePath, durationMs, size))
Log.i(TAG, "Finished recording video at $outputPath")
callback(Video(outputPath.absolutePath, durationMs, size))
}
}
fun pause() {
synchronized(this) {
Log.i(TAG, "Pausing Recording Session..")
recorder.pause()
// TODO: Implement pausing
}
}
fun resume() {
synchronized(this) {
Log.i(TAG, "Resuming Recording Session..")
recorder.resume()
// TODO: Implement pausing
}
}
@ -159,6 +124,9 @@ class RecordingSession(
override fun toString(): String {
val audio = if (enableAudio) "with audio" else "without audio"
return "${size.width} x ${size.height} @ $fps FPS ${options.videoCodec} ${options.fileType} " +
"$orientation ${bitRate / 1_000_000.0} Mbps RecordingSession ($audio)"
"$cameraOrientation ${bitRate / 1_000_000.0} Mbps RecordingSession ($audio)"
}
fun onFrame() {
}
}

View File

@ -9,6 +9,7 @@ import android.os.Build
import android.util.Log
import android.view.Surface
import androidx.annotation.Keep
import androidx.annotation.RequiresApi
import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.mrousavy.camera.frameprocessor.Frame
@ -31,7 +32,7 @@ class VideoPipeline(
val height: Int,
val format: PixelFormat = PixelFormat.NATIVE,
private val isMirrored: Boolean = false,
enableFrameProcessor: Boolean = false,
private val enableFrameProcessor: Boolean = false,
private val callback: CameraSession.Callback
) : SurfaceTexture.OnFrameAvailableListener,
Closeable {
@ -80,10 +81,9 @@ class VideoPipeline(
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
Log.i(TAG, "Using API 29 for GPU ImageReader...")
// If we are in PRIVATE, we just pass it to the GPU as efficiently as possible - so use GPU flag.
// If we are in YUV/RGB/..., we probably want to access Frame data - so use CPU flag.
val usage = if (format == ImageFormat.PRIVATE) HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE else HardwareBuffer.USAGE_CPU_READ_OFTEN
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usage)
val usageFlags = getRecommendedHardwareBufferFlags()
Log.i(TAG, "Using ImageReader flags: $usageFlags")
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES, usageFlags)
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES, format)
} else {
Log.i(TAG, "Using legacy API for CPU ImageReader...")
@ -91,7 +91,7 @@ class VideoPipeline(
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES)
}
imageReader!!.setOnImageAvailableListener({ reader ->
Log.i(TAG, "ImageReader::onImageAvailable!")
// Log.i(TAG, "ImageReader::onImageAvailable!")s
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored
@ -103,7 +103,7 @@ class VideoPipeline(
if (hasOutputs) {
// If we have outputs (e.g. a RecordingSession), pass the frame along to the OpenGL pipeline
imageWriter!!.queueInputImage(image)
imageWriter?.queueInputImage(image)
}
} catch (e: Throwable) {
Log.e(TAG, "FrameProcessor/ImageReader pipeline threw an error!", e)
@ -152,6 +152,9 @@ class VideoPipeline(
// 5. Draw it with applied rotation/mirroring
onFrame(transformMatrix)
// 6. Notify the recording session.
recordingSession?.onFrame()
}
}
@ -182,6 +185,52 @@ class VideoPipeline(
}
}
/**
* Get the recommended HardwareBuffer flags for creating ImageReader instances with.
*
* Tries to use [HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE] if possible, [HardwareBuffer.USAGE_CPU_READ_OFTEN]
* or a combination of both flags if CPU access is needed ([enableFrameProcessor]), and [0] otherwise.
*/
@RequiresApi(Build.VERSION_CODES.Q)
@Suppress("LiftReturnOrAssignment")
private fun getRecommendedHardwareBufferFlags(): Long {
val cpuFlag = HardwareBuffer.USAGE_CPU_READ_OFTEN
val gpuFlag = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE
val bothFlags = gpuFlag or cpuFlag
if (format == PixelFormat.NATIVE) {
// We don't need CPU access, so we can use GPU optimized buffers
if (supportsHardwareBufferFlags(gpuFlag)) {
// We support GPU Buffers directly and
Log.i(TAG, "GPU HardwareBuffers are supported!")
return gpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
} else {
// We are using YUV or RGB formats, so we need CPU access on the Frame
if (supportsHardwareBufferFlags(bothFlags)) {
// We support both CPU and GPU flags!
Log.i(TAG, "GPU + CPU HardwareBuffers are supported!")
return bothFlags
} else if (supportsHardwareBufferFlags(cpuFlag)) {
// We only support a CPU read flag, that's fine
Log.i(TAG, "CPU HardwareBuffers are supported!")
return cpuFlag
} else {
// no flags are supported - fall back to default
return 0
}
}
}
@RequiresApi(Build.VERSION_CODES.Q)
private fun supportsHardwareBufferFlags(flags: Long): Boolean {
val hardwareBufferFormat = format.toHardwareBufferFormat()
return HardwareBuffer.isSupported(width, height, hardwareBufferFormat, 1, flags)
}
private external fun getInputTextureId(): Int
private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray)

View File

@ -8,6 +8,14 @@ enum class Orientation(override val unionValue: String) : JSUnionValue {
PORTRAIT_UPSIDE_DOWN("portrait-upside-down"),
LANDSCAPE_LEFT("landscape-left");
fun isLandscape(): Boolean =
when (this) {
PORTRAIT -> false
PORTRAIT_UPSIDE_DOWN -> false
LANDSCAPE_LEFT -> true
LANDSCAPE_RIGHT -> true
}
fun toDegrees(): Int =
when (this) {
PORTRAIT -> 0

View File

@ -4,6 +4,7 @@ import android.graphics.ImageFormat
import android.util.Log
import com.mrousavy.camera.core.InvalidTypeScriptUnionError
import com.mrousavy.camera.core.PixelFormatNotSupportedError
import com.mrousavy.camera.utils.HardwareBufferUtils
import com.mrousavy.camera.utils.ImageFormatUtils
enum class PixelFormat(override val unionValue: String) : JSUnionValue {
@ -19,6 +20,11 @@ enum class PixelFormat(override val unionValue: String) : JSUnionValue {
else -> throw PixelFormatNotSupportedError(this.unionValue)
}
fun toHardwareBufferFormat(): Int {
val imageFormat = toImageFormat()
return HardwareBufferUtils.getHardwareBufferFormat(imageFormat)
}
companion object : JSUnionValue.Companion<PixelFormat> {
private const val TAG = "PixelFormat"
fun fromImageFormat(imageFormat: Int): PixelFormat =

View File

@ -1,5 +1,6 @@
package com.mrousavy.camera.types
import android.media.MediaFormat
import android.media.MediaRecorder
enum class VideoCodec(override val unionValue: String) : JSUnionValue {
@ -12,6 +13,12 @@ enum class VideoCodec(override val unionValue: String) : JSUnionValue {
H265 -> MediaRecorder.VideoEncoder.HEVC
}
fun toMimeType(): String =
when (this) {
H264 -> MediaFormat.MIMETYPE_VIDEO_AVC
H265 -> MediaFormat.MIMETYPE_VIDEO_HEVC
}
companion object : JSUnionValue.Companion<VideoCodec> {
override fun fromUnionValue(unionValue: String?): VideoCodec =
when (unionValue) {

View File

@ -0,0 +1,36 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.hardware.HardwareBuffer
import android.media.ImageReader
import android.os.Build
class HardwareBufferUtils {
companion object {
fun getHardwareBufferFormat(imageFormat: Int): Int {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
// Dynamically create an ImageReader with the target ImageFormat, and then
// get it's HardwareBuffer format to see what it uses underneath.
val imageReader = ImageReader.newInstance(1, 1, imageFormat, 1, HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE)
val format = imageReader.hardwareBufferFormat
imageReader.close()
return format
}
if (imageFormat == ImageFormat.PRIVATE) {
// PRIVATE formats are opaque, their actual equivalent HardwareBuffer format is unknown.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
// We can assume that YUV 4:2:0 or RGB is used.
return HardwareBuffer.YCBCR_420_888
} else {
// Maybe assume we are on RGB if we're not on API R or above...
return HardwareBuffer.RGB_888
}
}
// According to PublicFormat.cpp in Android's codebase, the formats map 1:1 anyways..
// https://cs.android.com/android/platform/superproject/main/+/main:frameworks/native/libs/ui/PublicFormat.cpp
return imageFormat
}
}
}

34
package/cpp/FinalAction.h Normal file
View File

@ -0,0 +1,34 @@
//
// MutableRawBuffer.h
// VisionCamera
//
// Created by Marc Rousavy on 17.01.24.
// Copyright © 2024 mrousavy. All rights reserved.
//
#pragma once
namespace vision {
template <typename F> struct FinalAction {
public:
FinalAction(F f) : clean_{f} {}
~FinalAction() {
if (enabled_)
clean_();
}
void disable() {
enabled_ = false;
};
private:
F clean_;
bool enabled_ = true;
};
} // namespace vision
template <typename F> vision::FinalAction<F> finally(F f) {
return vision::FinalAction<F>(std::move(f));
}

View File

@ -10,7 +10,7 @@
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
# Default value: -Xmx10248m -XX:MaxPermSize=256m
org.gradle.jvmargs=-Xms512M -Xmx4g -XX:MaxPermSize=1024m -XX:MaxMetaspaceSize=1g -Dkotlin.daemon.jvm.options="-Xmx1g"
org.gradle.jvmargs=-Xms512M -Xmx4g -XX:MaxMetaspaceSize=1g -Dkotlin.daemon.jvm.options="-Xmx1g"
org.gradle.parallel=true
org.gradle.daemon=true
org.gradle.configureondemand=true
@ -43,3 +43,5 @@ hermesEnabled=true
#VisionCamera_disableFrameProcessors=true
# Can be set to true to include the full 2.4 MB MLKit dependency
VisionCamera_enableCodeScanner=true
android.aapt2FromMavenOverride=/nix/store/6nrdbhdcmrig3vr80sc7qf9lna5cs1gb-android-sdk-env/share/android-sdk/build-tools/33.0.0/aapt2

View File

@ -27,9 +27,9 @@ PODS:
- libwebp/sharpyuv (1.3.2)
- libwebp/webp (1.3.2):
- libwebp/sharpyuv
- MMKV (1.3.2):
- MMKVCore (~> 1.3.2)
- MMKVCore (1.3.2)
- MMKV (1.3.3):
- MMKVCore (~> 1.3.3)
- MMKVCore (1.3.3)
- RCT-Folly (2021.07.22.00):
- boost
- DoubleConversion
@ -337,7 +337,7 @@ PODS:
- react-native-mmkv (2.11.0):
- MMKV (>= 1.2.13)
- React-Core
- react-native-safe-area-context (4.8.0):
- react-native-safe-area-context (4.8.2):
- React-Core
- react-native-video (5.2.1):
- React-Core
@ -675,8 +675,8 @@ SPEC CHECKSUMS:
hermes-engine: 9180d43df05c1ed658a87cc733dc3044cf90c00a
libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913
libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
MMKV: f21593c0af4b3f2a0ceb8f820f28bb639ea22bb7
MMKVCore: 31b4cb83f8266467eef20a35b6d78e409a11060d
MMKV: f902fb6719da13c2ab0965233d8963a59416f911
MMKVCore: d26e4d3edd5cb8588c2569222cbd8be4231374e9
RCT-Folly: 424b8c9a7a0b9ab2886ffe9c3b041ef628fd4fb1
RCTRequired: 83bca1c184feb4d2e51c72c8369b83d641443f95
RCTTypeSafety: 13c4a87a16d7db6cd66006ce9759f073402ef85b
@ -695,7 +695,7 @@ SPEC CHECKSUMS:
react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2
react-native-cameraroll: 4701ae7c3dbcd3f5e9e150ca17f250a276154b35
react-native-mmkv: e97c0c79403fb94577e5d902ab1ebd42b0715b43
react-native-safe-area-context: d1c8161a1e9560f7066e8926a7d825eb57c5dab5
react-native-safe-area-context: 0ee144a6170530ccc37a0fd9388e28d06f516a89
react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253
react-native-worklets-core: a894d572639fcf37c6d284cc799882d25d00c93d
React-NativeModulesApple: b6868ee904013a7923128892ee4a032498a1024a
@ -729,4 +729,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 27f53791141a3303d814e09b55770336416ff4eb
COCOAPODS: 1.14.3
COCOAPODS: 1.11.3

View File

@ -160,9 +160,9 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
examplePlugin(frame)
exampleKotlinSwiftPlugin(frame)
// console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
// examplePlugin(frame)
// exampleKotlinSwiftPlugin(frame)
}, [])
return (

View File

@ -74,7 +74,7 @@ export function MediaPage({ navigation, route }: Props): React.ReactElement {
}
}, [path, type])
const source = useMemo(() => ({ uri: `file://${path}` }), [path])
const source = useMemo(() => ({ uri: `file://${path}/1.mp4` }), [path])
const screenStyle = useMemo(() => ({ opacity: hasMediaLoaded ? 1 : 0 }), [hasMediaLoaded])

189
package/flake.lock Normal file
View File

@ -0,0 +1,189 @@
{
"nodes": {
"android-nixpkgs": {
"inputs": {
"devshell": "devshell",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1701980274,
"narHash": "sha256-uKJIFvsahbWw52TsIht7g2iosXBgJDRMSMoCE1fvEAk=",
"owner": "tadfisher",
"repo": "android-nixpkgs",
"rev": "bce9d437ed54ee1425b66442a12814fee4cdbd51",
"type": "github"
},
"original": {
"owner": "tadfisher",
"repo": "android-nixpkgs",
"type": "github"
}
},
"devshell": {
"inputs": {
"nixpkgs": [
"android-nixpkgs",
"nixpkgs"
],
"systems": "systems"
},
"locked": {
"lastModified": 1701787589,
"narHash": "sha256-ce+oQR4Zq9VOsLoh9bZT8Ip9PaMLcjjBUHVPzW5d7Cw=",
"owner": "numtide",
"repo": "devshell",
"rev": "44ddedcbcfc2d52a76b64fb6122f209881bd3e1e",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "devshell",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1694102001,
"narHash": "sha256-vky6VPK1n1od6vXbqzOXnekrQpTL4hbPAwUhT5J9c9E=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "9e21c80adf67ebcb077d75bd5e7d724d21eeafd6",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1701718080,
"narHash": "sha256-6ovz0pG76dE0P170pmmZex1wWcQoeiomUZGggfH9XPs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2c7f3c0fb7c08a0814627611d9d7d45ab6d75335",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1702312524,
"narHash": "sha256-gkZJRDBUCpTPBvQk25G0B7vfbpEYM5s5OZqghkjZsnE=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a9bf124c46ef298113270b1f84a164865987a91c",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"android-nixpkgs": "android-nixpkgs",
"flake-utils": "flake-utils_2",
"gitignore": "gitignore",
"nixpkgs": "nixpkgs_2"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

77
package/flake.nix Normal file
View File

@ -0,0 +1,77 @@
{
description = "Sample Nix ts-node build";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
gitignore = {
url = "github:hercules-ci/gitignore.nix";
inputs.nixpkgs.follows = "nixpkgs";
};
android-nixpkgs = {
url = "github:tadfisher/android-nixpkgs";
};
};
outputs = {
self,
nixpkgs,
flake-utils,
gitignore,
android-nixpkgs,
...
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
nodejs = pkgs.nodejs-18_x;
# NOTE: this does not work
appBuild = pkgs.stdenv.mkDerivation {
name = "example-ts-node";
version = "0.1.0";
src = gitignore.lib.gitignoreSource ./.; # uses the gitignore in the repo to only copy files git would see
buildInputs = [nodejs];
# https://nixos.org/manual/nixpkgs/stable/#sec-stdenv-phases
buildPhase = ''
# each phase has pre/postHooks. When you make your own phase be sure to still call the hooks
runHook preBuild
npm ci
npm run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
cp -r node_modules $out/node_modules
cp package.json $out/package.json
cp -r dist $out/dist
runHook postInstall
'';
};
android-sdk = android-nixpkgs.sdk.${system} (sdkPkgs:
with sdkPkgs; [
cmdline-tools-latest
build-tools-30-0-3
build-tools-33-0-0
build-tools-33-0-1
build-tools-34-0-0
platform-tools
platforms-android-33
platforms-android-34
emulator
ndk-23-1-7779620
cmake-3-22-1
system-images-android-33-google-apis-x86-64
system-images-android-34-google-apis-x86-64
]);
in
with pkgs; {
defaultPackage = appBuild;
devShell = mkShell {
buildInputs = [nodejs yarn watchman gradle_7 alejandra nodePackages.prettier ktlint kotlin-language-server];
ANDROID_SDK_BIN = android-sdk;
shellHook = ''
export JAVA_HOME=${pkgs.jdk17.home}
source ${android-sdk.out}/nix-support/setup-hook
export PATH=${android-sdk}/bin:$PATH
ORG_GRADLE_PROJECT_ANDROID_HOME="$ANDROID_HOME"
'';
};
});
}