Simplify ChunkedRecorder

This commit is contained in:
Ivan Malison 2024-01-27 19:55:20 -07:00
parent d95057fa47
commit 08f37070a4
7 changed files with 172 additions and 200 deletions

View File

@ -1,68 +1,76 @@
package com.mrousavy.camera.core
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.Channel
import android.media.MediaCodec
import android.media.MediaCodec.BufferInfo
import android.media.MediaCodecInfo
import android.media.MediaFormat
import android.media.MediaMuxer
import android.util.Log
import android.util.Size
import android.view.Surface
import java.io.File
import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.nio.ByteBuffer
import kotlinx.coroutines.*
class ChunkedRecordingManager(
private val encoder: MediaCodec,
private val outputDirectory: File,
private val orientationHint: Int,
) {
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) :
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
private const val targetDurationUs = 10 * 1000000
fun fromParams(
size: Size,
enableAudio: Boolean,
fps: Int? = null,
orientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
iFrameInterval: Int = 3
): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val format = MediaFormat.createVideoFormat(mimeType, size.width, size.height)
var width = size.width
var height = size.height
val orientationDegrees = orientation.toDegrees()
if (orientationDegrees == 90 || orientationDegrees == 270) {
width = size.height
height = size.width
}
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
// TODO: Pull this out into configuration
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format")
Log.i(TAG, "Video Format: $format")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(codec, outputDirectory, orientation.toDegrees())
return ChunkedRecordingManager(codec, outputDirectory, orientationDegrees, iFrameInterval)
}
}
// In flight details
private val bufferInfo = MediaCodec.BufferInfo()
private var currentFrameNumber: Int = 0
private var chunkIndex = 0
private var chunkIndex = -1
private var encodedFormat: MediaFormat? = null
private var recording = false;
private val targetDurationUs = iFrameInterval * 1000000
val surface: Surface = encoder.createInputSurface()
@ -70,15 +78,11 @@ class ChunkedRecordingManager(
if (!this.outputDirectory.exists()) {
this.outputDirectory.mkdirs()
}
encoder.setCallback(this)
}
// Muxer specific
private class MuxerContext(
muxer: MediaMuxer,
startTimeUs: Long,
encodedFormat: MediaFormat
) {
val muxer = muxer
private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) {
val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs
@ -86,135 +90,80 @@ class ChunkedRecordingManager(
muxer.start()
}
fun finish() {
muxer.stop()
muxer.release()
}
}
private lateinit var muxerContext: MuxerContext
private var muxerContext: MuxerContext? = null
private fun createNextMuxer() {
if (::muxerContext.isInitialized) {
muxerContext.finish()
chunkIndex++
}
private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.finish()
chunkIndex++
val newFileName = "$chunkIndex.mp4"
val newOutputFile = File(this.outputDirectory, newFileName)
Log.d(TAG, "Creating new muxer for file: $newFileName")
Log.i(TAG, "Creating new muxer for file: $newFileName")
val muxer = MediaMuxer(
newOutputFile.absolutePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
newOutputFile.absolutePath,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, bufferInfo.presentationTimeUs, this.encodedFormat!!
)
muxer.setOrientationHint(orientationHint)
}
private fun atKeyframe(): Boolean {
private fun atKeyframe(bufferInfo: BufferInfo): Boolean {
return (bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0
}
private fun chunkLengthUs(): Long {
return bufferInfo.presentationTimeUs - muxerContext.startTimeUs
private fun chunkLengthUs(bufferInfo: BufferInfo): Long {
return bufferInfo.presentationTimeUs - muxerContext!!.startTimeUs
}
fun drainEncoder(): Boolean {
val timeout: Long = 0
var frameWasEncoded = false
while (true) {
var encoderStatus: Int = encoder.dequeueOutputBuffer(bufferInfo, timeout)
if (encoderStatus < 0) {
Log.w(
TAG, "Unexpected result from encoder.dequeueOutputBuffer: $encoderStatus"
)
}
when (encoderStatus) {
MediaCodec.INFO_TRY_AGAIN_LATER -> break;
MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
// Should happen before receiving buffers, and should only happen once. The MediaFormat
// contains the csd-0 and csd-1 keys, which we'll need for MediaMuxer. It's unclear what
// else MediaMuxer might want, so rather than extract the codec-specific data and
// reconstruct a new MediaFormat later, we just grab it here and keep it around.
encodedFormat = encoder.outputFormat
Log.d(TAG, "encoder output format changed: $encodedFormat")
}
else -> {
var encodedData: ByteBuffer = encoder.getOutputBuffer(encoderStatus)
?: throw RuntimeException("encoderOutputBuffer $encoderStatus was null")
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out when we got the
// INFO_OUTPUT_FORMAT_CHANGED status. The MediaMuxer won't accept
// a single big blob -- it wants separate csd-0/csd-1 chunks --
// so simply saving this off won't work.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG")
bufferInfo.size = 0
}
if (bufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(bufferInfo.offset)
encodedData.limit(bufferInfo.offset + bufferInfo.size)
if (!::muxerContext.isInitialized || (atKeyframe() && chunkLengthUs() >= targetDurationUs)) {
this.createNextMuxer()
}
// TODO: we should probably add the presentation time stamp
// mEncBuffer.add(encodedData, bufferInfo.flags, bufferInfo.presentationTimeUs)
muxerContext.muxer.writeSampleData(muxerContext.videoTrack, encodedData, bufferInfo)
frameWasEncoded = true
}
encoder.releaseOutputBuffer(encoderStatus, false)
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.w(TAG, "reached end of stream unexpectedly")
break
}
}
}
}
return frameWasEncoded
fun start() {
encoder.start()
recording = true
}
fun finish() {
if (::muxerContext.isInitialized) {
muxerContext.finish()
synchronized(this) {
muxerContext?.finish()
recording = false
muxerContext = null
encoder.stop()
}
}
}
class ChunkedRecorder(private val manager: ChunkedRecordingManager) {
private val messageChannel = Channel<Message>()
init {
CoroutineScope(Dispatchers.Default).launch {
for (msg in messageChannel) {
when (msg) {
is Message.FrameAvailable -> manager.drainEncoder()
is Message.Shutdown -> manager.finish()
}
}
}
}
fun sendFrameAvailable() {
messageChannel.trySend(Message.FrameAvailable)
}
fun sendShutdown() {
messageChannel.trySend(Message.Shutdown)
}
sealed class Message {
object FrameAvailable : Message()
object Shutdown : Message()
}
// MediaCodec.Callback methods
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: MediaCodec.BufferInfo) {
synchronized(this) {
if (!recording) {
return
}
val encodedData: ByteBuffer = encoder.getOutputBuffer(index)
?: throw RuntimeException("getOutputBuffer was null")
if (muxerContext == null || (atKeyframe(bufferInfo) && chunkLengthUs(bufferInfo) >= targetDurationUs)) {
this.createNextMuxer(bufferInfo)
}
muxerContext!!.muxer.writeSampleData(muxerContext!!.videoTrack, encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false)
}
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
// Implement error handling
Log.e(TAG, "Codec error: ${e.message}")
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
encodedFormat = format
}
}

View File

@ -10,7 +10,10 @@ import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.utils.FileUtils
import java.io.File
import android.os.Environment
import java.text.SimpleDateFormat
import java.util.Locale
import java.util.Date
class RecordingSession(
context: Context,
val cameraId: String,
@ -33,23 +36,34 @@ class RecordingSession(
data class Video(val path: String, val durationMs: Long, val size: Size)
private val outputPath = File.createTempFile("mrousavy", options.fileType.toExtension(), context.cacheDir)
private val outputPath = run {
val videoDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES)
val sdf = SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US)
val videoFileName = "VID_${sdf.format(Date())}"
File(videoDir!!, videoFileName)
}
private val bitRate = getBitRate()
private val recordingManager = ChunkedRecordingManager.fromParams(
size, enableAudio, fps, orientation, options, outputPath
private val recorder = ChunkedRecordingManager.fromParams(
size,
enableAudio,
fps,
orientation,
bitRate,
options,
outputPath
)
private val recorder: ChunkedRecorder = ChunkedRecorder(recordingManager)
private var startTime: Long? = null
val surface: Surface
get() {
return recordingManager.surface
return recorder.surface
}
fun start() {
synchronized(this) {
Log.i(TAG, "Starting RecordingSession..")
startTime = System.currentTimeMillis()
recorder.start()
}
}
@ -57,14 +71,15 @@ class RecordingSession(
synchronized(this) {
Log.i(TAG, "Stopping RecordingSession..")
try {
recorder.sendShutdown()
recorder.finish()
} catch (e: Error) {
Log.e(TAG, "Failed to stop MediaRecorder!", e)
}
val stopTime = System.currentTimeMillis()
val durationMs = stopTime - (startTime ?: stopTime)
//callback(Video(outputFile.absolutePath, durationMs, size))
Log.i(TAG, "Finished recording video at $outputPath")
callback(Video(outputPath.absolutePath, durationMs, size))
}
}
@ -113,6 +128,5 @@ class RecordingSession(
}
fun onFrame() {
recorder.sendFrameAvailable()
}
}

View File

@ -91,7 +91,7 @@ class VideoPipeline(
imageWriter = ImageWriter.newInstance(glSurface, MAX_IMAGES)
}
imageReader!!.setOnImageAvailableListener({ reader ->
Log.i(TAG, "ImageReader::onImageAvailable!")
// Log.i(TAG, "ImageReader::onImageAvailable!")s
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored
@ -153,7 +153,8 @@ class VideoPipeline(
// 5. Draw it with applied rotation/mirroring
onFrame(transformMatrix)
recording?.onFrame()
// 6. Notify the recording session.
recordingSession?.onFrame()
}
}

View File

@ -1,7 +1,7 @@
package com.mrousavy.camera.types
import android.media.MediaRecorder
import android.media.MediaFormat
import android.media.MediaRecorder
enum class VideoCodec(override val unionValue: String) : JSUnionValue {
H264("h264"),

View File

@ -160,9 +160,9 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
examplePlugin(frame)
exampleKotlinSwiftPlugin(frame)
// console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
// examplePlugin(frame)
// exampleKotlinSwiftPlugin(frame)
}, [])
return (

View File

@ -74,7 +74,7 @@ export function MediaPage({ navigation, route }: Props): React.ReactElement {
}
}, [path, type])
const source = useMemo(() => ({ uri: `file://${path}` }), [path])
const source = useMemo(() => ({ uri: `file://${path}/1.mp4` }), [path])
const screenStyle = useMemo(() => ({ opacity: hasMediaLoaded ? 1 : 0 }), [hasMediaLoaded])

View File

@ -11,59 +11,67 @@
url = "github:tadfisher/android-nixpkgs";
};
};
outputs = { self, nixpkgs, flake-utils, gitignore, android-nixpkgs, ... }:
flake-utils.lib.eachDefaultSystem (system:
let
pkgs = import nixpkgs { inherit system; };
nodejs = pkgs.nodejs-18_x;
# NOTE: this does not work
appBuild = pkgs.stdenv.mkDerivation {
name = "example-ts-node";
version = "0.1.0";
src = gitignore.lib.gitignoreSource ./.; # uses the gitignore in the repo to only copy files git would see
buildInputs = [ nodejs ];
# https://nixos.org/manual/nixpkgs/stable/#sec-stdenv-phases
buildPhase = ''
# each phase has pre/postHooks. When you make your own phase be sure to still call the hooks
runHook preBuild
npm ci
npm run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
cp -r node_modules $out/node_modules
cp package.json $out/package.json
cp -r dist $out/dist
runHook postInstall
'';
};
android-sdk = android-nixpkgs.sdk.${system} (sdkPkgs: with sdkPkgs; [
cmdline-tools-latest
build-tools-30-0-3
build-tools-33-0-0
build-tools-33-0-1
build-tools-34-0-0
platform-tools
platforms-android-33
platforms-android-34
emulator
ndk-23-1-7779620
cmake-3-22-1
system-images-android-33-google-apis-x86-64
system-images-android-34-google-apis-x86-64
]);
in with pkgs; {
defaultPackage = appBuild;
devShell = mkShell {
buildInputs = [ nodejs yarn watchman gradle_7 alejandra nodePackages.prettier ];
ANDROID_SDK_BIN = android-sdk;
shellHook = ''
export JAVA_HOME=${pkgs.jdk17.home}
source ${android-sdk.out}/nix-support/setup-hook
export PATH=${android-sdk}/bin:$PATH
ORG_GRADLE_PROJECT_ANDROID_HOME="$ANDROID_HOME"
'';
};
});
outputs = {
self,
nixpkgs,
flake-utils,
gitignore,
android-nixpkgs,
...
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
nodejs = pkgs.nodejs-18_x;
# NOTE: this does not work
appBuild = pkgs.stdenv.mkDerivation {
name = "example-ts-node";
version = "0.1.0";
src = gitignore.lib.gitignoreSource ./.; # uses the gitignore in the repo to only copy files git would see
buildInputs = [nodejs];
# https://nixos.org/manual/nixpkgs/stable/#sec-stdenv-phases
buildPhase = ''
# each phase has pre/postHooks. When you make your own phase be sure to still call the hooks
runHook preBuild
npm ci
npm run build
runHook postBuild
'';
installPhase = ''
runHook preInstall
cp -r node_modules $out/node_modules
cp package.json $out/package.json
cp -r dist $out/dist
runHook postInstall
'';
};
android-sdk = android-nixpkgs.sdk.${system} (sdkPkgs:
with sdkPkgs; [
cmdline-tools-latest
build-tools-30-0-3
build-tools-33-0-0
build-tools-33-0-1
build-tools-34-0-0
platform-tools
platforms-android-33
platforms-android-34
emulator
ndk-23-1-7779620
cmake-3-22-1
system-images-android-33-google-apis-x86-64
system-images-android-34-google-apis-x86-64
]);
in
with pkgs; {
defaultPackage = appBuild;
devShell = mkShell {
buildInputs = [nodejs yarn watchman gradle_7 alejandra nodePackages.prettier ktlint kotlin-language-server];
ANDROID_SDK_BIN = android-sdk;
shellHook = ''
export JAVA_HOME=${pkgs.jdk17.home}
source ${android-sdk.out}/nix-support/setup-hook
export PATH=${android-sdk}/bin:$PATH
ORG_GRADLE_PROJECT_ANDROID_HOME="$ANDROID_HOME"
'';
};
});
}