Compare commits

..

29 Commits

Author SHA1 Message Date
8a6537c914 Merge pull request 'Rotate frag recording output instead of relying on muxer metadata' (#24) from loewy/android-frag-mp4-video-orientation into main
Reviewed-on: #24
2026-03-27 19:10:35 +00:00
fde8a47a2d rotate frag recording output instead of relying on muxer metadata 2026-03-26 15:54:20 -07:00
b5cde91062 Merge pull request 'Fix android crash when setting pixels' (#23) from loewy/fix-remove-set-get-Pixel-from-transform-bitmap into main
Reviewed-on: #23
2026-03-09 23:11:28 +00:00
0e665528fe remove setPixel/getPixel entirely from transformBitmap to address crash 2026-02-16 13:11:08 -08:00
45b16b5189 Merge pull request 'Fix IllegalStateException crash when camera device closes during focus' (#22) from loewy/fix-camera-device-closed-crash into main
Reviewed-on: #22
2026-01-30 23:24:09 +00:00
2d570c9af2 fix IllegalStateException crash when camera device closes during focus 2026-01-30 15:15:50 -08:00
31e7b8bd35 Merge pull request 'Catch focus timeout and negative value errs' (#21) from loewy/fix-android-focus-timeout-crash into main
Reviewed-on: #21
2026-01-27 00:50:04 +00:00
3bb72d5d94 catch negative values set by out of preview bound press 2026-01-21 12:58:13 -08:00
ac5dac127f catch focus timeout error on android 2026-01-21 12:58:13 -08:00
e3de8c018c Merge pull request 'WIP fix: Skip NAL header byte when reading SPS profile data in HlsMuxer' (#20) from fix/hlsmuxer-codec-string into main
Reviewed-on: #20
2026-01-21 20:57:30 +00:00
Dean
dd26812a9c fix: Add pasp box to declare square pixels (1:1) for web playback
The codec string fix caused videos to appear squished on web players
like Shaka. Adding an explicit pixel aspect ratio (pasp) box with
1:1 ratio tells the player not to apply any SAR scaling.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 12:22:43 -08:00
Dean
b716608379 fix: Skip NAL header byte when reading SPS profile data in HlsMuxer
The SPS NAL unit format is: [NAL header, profile_idc, constraint_flags, level_idc, ...]
The code was incorrectly reading from byte 0 (NAL header, typically 0x67)
instead of byte 1 (profile_idc).

This produced invalid codec strings like `avc1.676400` instead of valid
ones like `avc1.64001f`, causing Shaka Player on web to fail with error
4032 (unable to parse codec).

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 11:52:08 -08:00
0ecc3d8210 Merge pull request 'fix: Handle both file:// and file:/ URI prefixes' (#19) from dean/fix-file-prefix-fmp4 into main
Reviewed-on: #19
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2026-01-06 19:41:32 +00:00
309e1e9457 Merge branch 'main' into dean/fix-file-prefix-fmp4 2026-01-06 17:38:24 +00:00
71b08e6898 Merge pull request 'Android Fmp4' (#17) from loewy/android-fmp4-normalize-timestamp-fix-fps into main
Reviewed-on: #17
2026-01-06 17:21:29 +00:00
Dean
699481f6f8 fix: Handle both file:// and file:/ URI prefixes
The previous code only stripped file:// (double slash) but some paths
come with file:/ (single slash), causing FileNotFoundException.

Fixes RAILBIRD-FRONTEND-1JH

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-06 08:45:39 -08:00
11ce9ba8f6 ensure compatability with rb chunked and fmp4, move orientation detection for rb chunked to chunked manager 2026-01-03 13:40:09 -08:00
dd9de38a7d use window manager to determine device rotation in android 2026-01-02 10:04:49 -08:00
3f5d0a2109 fix fps (alter tfhd and trun size, add logs 2026-01-02 10:04:49 -08:00
6c2319608d normalize timestamps and fix framerate metadata in init file 2026-01-02 10:04:47 -08:00
27f127fe94 Fix orientation issues 2026-01-02 10:02:51 -08:00
92b29cbd78 Write our own muxer to make hls uupload actually work 2026-01-02 10:02:51 -08:00
fb23c57a6c feat: Add fragmented MP4 (fMP4) support for Android
Implements HLS-compatible fragmented MP4 recording on Android using
AndroidX Media3 FragmentedMp4Muxer, matching the iOS implementation.

Changes:
- Add FragmentedRecordingManager for fMP4 segment output
- Add ChunkedRecorderInterface to abstract recorder implementations
- Add onInitSegmentReady callback for init segment (init.mp4)
- Update onVideoChunkReady to include segment duration
- RecordingSession now uses FragmentedRecordingManager by default

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-02 10:02:51 -08:00
8d06ab9e66 Merge pull request 'Strip file prefix ( for expo-file-system)' (#15) from loewy/stripe-file-prefix into main
Reviewed-on: #15
Reviewed-by: Ivan Malison <ivanmalison@gmail.com>
2025-12-26 17:26:42 +00:00
f6b6cfb3d5 strip file prefix 2025-12-19 12:55:41 -08:00
3ac555a2b3 Merge pull request 'Stop recording on init write failure' (#14) from loewy/stop-recording-on-init-write-failure into main
Reviewed-on: #14
2025-12-19 20:53:06 +00:00
7e1e074e0f force recording to stop on init write failure and fix silent failure 2025-12-18 13:29:31 -08:00
b269e9c493 fix silent init file write failure 2025-12-18 13:09:34 -08:00
5fe7f35127 Merge pull request 'Fix orientation Android - get orientation directly from WindowManager' (#11) from loewy/fix-android-orientation-bugs into main
Reviewed-on: #11
2025-12-17 23:20:52 +00:00
19 changed files with 302 additions and 111 deletions

View File

@@ -17,15 +17,18 @@
namespace vision { namespace vision {
std::unique_ptr<OpenGLRenderer> OpenGLRenderer::CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) { std::unique_ptr<OpenGLRenderer> OpenGLRenderer::CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context,
return std::unique_ptr<OpenGLRenderer>(new OpenGLRenderer(std::move(context), surface)); ANativeWindow* surface,
int rotationDegrees) {
return std::unique_ptr<OpenGLRenderer>(new OpenGLRenderer(std::move(context), surface, rotationDegrees));
} }
OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) { OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface, int rotationDegrees) {
_context = std::move(context); _context = std::move(context);
_outputSurface = surface; _outputSurface = surface;
_width = ANativeWindow_getWidth(surface); _width = ANativeWindow_getWidth(surface);
_height = ANativeWindow_getHeight(surface); _height = ANativeWindow_getHeight(surface);
_rotationDegrees = rotationDegrees;
} }
OpenGLRenderer::~OpenGLRenderer() { OpenGLRenderer::~OpenGLRenderer() {
@@ -66,7 +69,7 @@ void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float*
glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 4. Draw it using the pass-through shader which also applies transforms // 4. Draw it using the pass-through shader which also applies transforms
_passThroughShader.draw(texture, transformMatrix); _passThroughShader.draw(texture, transformMatrix, _rotationDegrees);
// 5. Swap buffers to pass it to the window surface // 5. Swap buffers to pass it to the window surface
eglSwapBuffers(_context->display, _surface); eglSwapBuffers(_context->display, _surface);

View File

@@ -24,7 +24,7 @@ public:
* Note: The `surface` is considered moved, and the OpenGL context will release it when it is * Note: The `surface` is considered moved, and the OpenGL context will release it when it is
* being deleted. * being deleted.
*/ */
static std::unique_ptr<OpenGLRenderer> CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface); static std::unique_ptr<OpenGLRenderer> CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface, int rotationDegrees);
/** /**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called. * Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/ */
@@ -43,10 +43,11 @@ public:
void destroy(); void destroy();
private: private:
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface); explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface, int rotationDegrees);
private: private:
int _width = 0, _height = 0; int _width = 0, _height = 0;
int _rotationDegrees = 0;
std::shared_ptr<OpenGLContext> _context; std::shared_ptr<OpenGLContext> _context;
ANativeWindow* _outputSurface; ANativeWindow* _outputSurface;
EGLSurface _surface = EGL_NO_SURFACE; EGLSurface _surface = EGL_NO_SURFACE;

View File

@@ -7,11 +7,76 @@
#include <EGL/egl.h> #include <EGL/egl.h>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
#include <GLES2/gl2ext.h> #include <GLES2/gl2ext.h>
#include <cmath>
#include <memory> #include <memory>
#include <string> #include <string>
namespace vision { namespace vision {
namespace {
void setIdentity(float* matrix) {
for (int i = 0; i < 16; i++) {
matrix[i] = 0.0f;
}
matrix[0] = 1.0f;
matrix[5] = 1.0f;
matrix[10] = 1.0f;
matrix[15] = 1.0f;
}
void multiply4x4(const float* left, const float* right, float* out) {
for (int column = 0; column < 4; column++) {
for (int row = 0; row < 4; row++) {
float sum = 0.0f;
for (int k = 0; k < 4; k++) {
sum += left[k * 4 + row] * right[column * 4 + k];
}
out[column * 4 + row] = sum;
}
}
}
void makeTranslation(float tx, float ty, float* matrix) {
setIdentity(matrix);
matrix[12] = tx;
matrix[13] = ty;
}
void makeRotation(float degrees, float* matrix) {
setIdentity(matrix);
const float radians = degrees * static_cast<float>(M_PI) / 180.0f;
const float cosine = std::cos(radians);
const float sine = std::sin(radians);
matrix[0] = cosine;
matrix[1] = sine;
matrix[4] = -sine;
matrix[5] = cosine;
}
void makeCenteredRotation(int rotationDegrees, float* matrix) {
const int normalized = ((rotationDegrees % 360) + 360) % 360;
if (normalized == 0) {
setIdentity(matrix);
return;
}
float translateToOrigin[16];
float rotation[16];
float translateBack[16];
float temp[16];
makeTranslation(-0.5f, -0.5f, translateToOrigin);
makeRotation(static_cast<float>(normalized), rotation);
makeTranslation(0.5f, 0.5f, translateBack);
multiply4x4(rotation, translateToOrigin, temp);
multiply4x4(translateBack, temp, matrix);
}
} // namespace
PassThroughShader::~PassThroughShader() { PassThroughShader::~PassThroughShader() {
if (_programId != NO_SHADER) { if (_programId != NO_SHADER) {
glDeleteProgram(_programId); glDeleteProgram(_programId);
@@ -24,7 +89,7 @@ PassThroughShader::~PassThroughShader() {
} }
} }
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) { void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix, int rotationDegrees) {
// 1. Set up Shader Program // 1. Set up Shader Program
if (_programId == NO_SHADER) { if (_programId == NO_SHADER) {
_programId = createProgram(); _programId = createProgram();
@@ -57,7 +122,12 @@ void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatri
glVertexAttribPointer(_vertexParameters.aTexCoord, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), glVertexAttribPointer(_vertexParameters.aTexCoord, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, texCoord))); reinterpret_cast<void*>(offsetof(Vertex, texCoord)));
glUniformMatrix4fv(_vertexParameters.uTransformMatrix, 1, GL_FALSE, transformMatrix); float outputRotationMatrix[16];
float combinedTransformMatrix[16];
makeCenteredRotation(rotationDegrees, outputRotationMatrix);
multiply4x4(transformMatrix, outputRotationMatrix, combinedTransformMatrix);
glUniformMatrix4fv(_vertexParameters.uTransformMatrix, 1, GL_FALSE, combinedTransformMatrix);
// 4. Pass texture to fragment shader // 4. Pass texture to fragment shader
glActiveTexture(GL_TEXTURE0); glActiveTexture(GL_TEXTURE0);

View File

@@ -29,7 +29,7 @@ public:
* Draw the texture using this shader. * Draw the texture using this shader.
* Note: At the moment, only EXTERNAL textures are supported by the Shader. * Note: At the moment, only EXTERNAL textures are supported by the Shader.
*/ */
void draw(const OpenGLTexture& texture, float* transformMatrix); void draw(const OpenGLTexture& texture, float* transformMatrix, int rotationDegrees);
private: private:
// Loading // Loading

View File

@@ -47,13 +47,13 @@ void VideoPipeline::removeRecordingSessionOutputSurface() {
_recordingSessionOutput = nullptr; _recordingSessionOutput = nullptr;
} }
void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) { void VideoPipeline::setRecordingSessionOutputSurface(jobject surface, int rotationDegrees) {
// 1. Delete existing output surface // 1. Delete existing output surface
removeRecordingSessionOutputSurface(); removeRecordingSessionOutputSurface();
// 2. Set new output surface if it is not null // 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface); ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window); _recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window, rotationDegrees);
} }
int VideoPipeline::getInputTextureId() { int VideoPipeline::getInputTextureId() {
@@ -78,7 +78,6 @@ void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixPara
OpenGLTexture& texture = _inputTexture.value(); OpenGLTexture& texture = _inputTexture.value();
if (_recordingSessionOutput) { if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix); _recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
} }
} }

View File

@@ -31,7 +31,7 @@ public:
int getInputTextureId(); int getInputTextureId();
// <- MediaRecorder output // <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface); void setRecordingSessionOutputSurface(jobject surface, int rotationDegrees);
void removeRecordingSessionOutputSurface(); void removeRecordingSessionOutputSurface();
// Frame callbacks // Frame callbacks

View File

@@ -30,8 +30,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
iFrameInterval: Int = 5 iFrameInterval: Int = 5
): ChunkedRecordingManager { ): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType() val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees() // Use cameraOrientation (from WindowManager) for rotation metadata
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees(); // The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes
// Note: MediaMuxer.setOrientationHint() uses opposite convention from HlsMuxer's rotation matrix
// We need to invert the rotation: 90 <-> 270, while 0 and 180 stay the same
val orientationDegrees = when (cameraOrientation.toDegrees()) {
90 -> 270
270 -> 90
else -> cameraOrientation.toDegrees()
}
val (width, height) = if (cameraOrientation.isLandscape()) { val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width size.height to size.width
} else { } else {
@@ -55,12 +62,12 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval) format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate) format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees") Log.d(TAG, "Video Format: $format, orientation: $orientationDegrees")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface // Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work. // we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE) codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager( return ChunkedRecordingManager(
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks codec, outputDirectory, orientationDegrees, iFrameInterval, callbacks
) )
} }
} }
@@ -91,12 +98,13 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
muxer.start() muxer.start()
} }
fun writeSample(buffer: java.nio.ByteBuffer, bufferInfo: BufferInfo) {
muxer.writeSampleData(videoTrack, buffer, bufferInfo)
}
fun finish() { fun finish() {
muxer.stop() muxer.stop()
muxer.release() muxer.release()
// Calculate duration from start time - this is approximate
// The new FragmentedRecordingManager provides accurate duration
callbacks.onVideoChunkReady(filepath, chunkIndex, null) callbacks.onVideoChunkReady(filepath, chunkIndex, null)
} }
} }
@@ -170,7 +178,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
return return
} }
context.muxer.writeSampleData(context.videoTrack, encodedData, bufferInfo) context.writeSample(encodedData, bufferInfo)
encoder.releaseOutputBuffer(index, false) encoder.releaseOutputBuffer(index, false)
} }
} }

View File

@@ -39,18 +39,11 @@ class FragmentedRecordingManager(
segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS segmentDurationSeconds: Int = DEFAULT_SEGMENT_DURATION_SECONDS
): FragmentedRecordingManager { ): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType() val mimeType = options.videoCodec.toMimeType()
// Use cameraOrientation (from WindowManager) for rotation metadata val cameraOrientationDegrees = cameraOrientation.toDegrees()
// The options.orientation from JavaScript is unreliable on Android when rotating between landscape modes val orientationDegrees = 0
val orientationDegrees = cameraOrientation.toDegrees() val (width, height) = size.width to size.height
// Swap dimensions based on camera orientation, same as ChunkedRecordingManager Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees° (cameraOrientation=$cameraOrientationDegrees°)")
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
}
Log.d(TAG, "Recording: ${width}x${height}, orientation=$orientationDegrees°")
val format = MediaFormat.createVideoFormat(mimeType, width, height) val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType) val codec = MediaCodec.createEncoderByType(mimeType)

View File

@@ -753,17 +753,32 @@ class HlsMuxer(
dos.writeShort(-1) // pre-defined dos.writeShort(-1) // pre-defined
output.write(buildAvcCBox(sps, pps)) output.write(buildAvcCBox(sps, pps))
output.write(buildPaspBox())
return wrapBox("avc1", output.toByteArray()) return wrapBox("avc1", output.toByteArray())
} }
/**
* Builds pixel aspect ratio box to explicitly declare square pixels (1:1).
* This helps players correctly interpret video dimensions without SAR scaling.
*/
private fun buildPaspBox(): ByteArray {
val output = ByteArrayOutputStream()
val dos = DataOutputStream(output)
dos.writeInt(1) // hSpacing (horizontal)
dos.writeInt(1) // vSpacing (vertical)
return wrapBox("pasp", output.toByteArray())
}
private fun buildAvcCBox(sps: ByteArray, pps: ByteArray): ByteArray { private fun buildAvcCBox(sps: ByteArray, pps: ByteArray): ByteArray {
val output = ByteArrayOutputStream() val output = ByteArrayOutputStream()
val dos = DataOutputStream(output) val dos = DataOutputStream(output)
val profileIdc = if (sps.isNotEmpty()) sps[0].toInt() and 0xFF else 0x42 // SPS NAL unit format: [NAL header, profile_idc, constraint_flags, level_idc, ...]
val profileCompat = if (sps.size > 1) sps[1].toInt() and 0xFF else 0x00 // Skip byte 0 (NAL header, typically 0x67) to get the actual profile data
val levelIdc = if (sps.size > 2) sps[2].toInt() and 0xFF else 0x1F val profileIdc = if (sps.size > 1) sps[1].toInt() and 0xFF else 0x42
val profileCompat = if (sps.size > 2) sps[2].toInt() and 0xFF else 0x00
val levelIdc = if (sps.size > 3) sps[3].toInt() and 0xFF else 0x1F
dos.writeByte(1) // configuration version dos.writeByte(1) // configuration version
dos.writeByte(profileIdc) // AVC profile dos.writeByte(profileIdc) // AVC profile

View File

@@ -235,10 +235,19 @@ class PersistentCameraCaptureSession(private val cameraManager: CameraManager, p
// 1. Run a precapture sequence for AF, AE and AWB. // 1. Run a precapture sequence for AF, AE and AWB.
focusJob = coroutineScope.launch { focusJob = coroutineScope.launch {
try {
val request = repeatingRequest.createCaptureRequest(device, deviceDetails, outputs) val request = repeatingRequest.createCaptureRequest(device, deviceDetails, outputs)
val options = val options =
PrecaptureOptions(listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE), Flash.OFF, listOf(point), false, FOCUS_RESET_TIMEOUT) PrecaptureOptions(listOf(PrecaptureTrigger.AF, PrecaptureTrigger.AE), Flash.OFF, listOf(point), false, FOCUS_RESET_TIMEOUT)
session.precapture(request, deviceDetails, options) session.precapture(request, deviceDetails, options)
} catch (e: CaptureTimedOutError) {
// Focus timed out - this is non-fatal, just log and continue
Log.w(TAG, "Focus timed out at point $point, continuing without focus lock")
} catch (e: IllegalStateException) {
Log.w(TAG, "Focus failed, camera device was already closed: ${e.message}")
} catch (e: CameraAccessException) {
Log.w(TAG, "Focus failed, camera not accessible: ${e.message}")
}
} }
focusJob?.join() focusJob?.join()
@@ -254,9 +263,15 @@ class PersistentCameraCaptureSession(private val cameraManager: CameraManager, p
return@launch return@launch
} }
Log.i(TAG, "Resetting focus to auto-focus...") Log.i(TAG, "Resetting focus to auto-focus...")
try {
repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request -> repeatingRequest.createCaptureRequest(device, deviceDetails, outputs).also { request ->
session.setRepeatingRequest(request.build(), null, null) session.setRepeatingRequest(request.build(), null, null)
} }
} catch (e: IllegalStateException) {
Log.w(TAG, "Failed to reset focus, camera device was already closed: ${e.message}")
} catch (e: CameraAccessException) {
Log.w(TAG, "Failed to reset focus, camera not accessible: ${e.message}")
}
} }
} }
} }

View File

@@ -5,7 +5,7 @@ import android.content.Context
import android.content.res.Configuration import android.content.res.Configuration
import android.graphics.Point import android.graphics.Point
import android.os.Handler import android.os.Handler
import android.os.Looper import android.os.HandlerThread
import android.util.Log import android.util.Log
import android.util.Size import android.util.Size
import android.view.PixelCopy import android.view.PixelCopy
@@ -25,58 +25,72 @@ import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.suspendCancellableCoroutine import kotlinx.coroutines.suspendCancellableCoroutine
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
import android.graphics.Bitmap import android.graphics.Bitmap
import android.graphics.Matrix
fun rotateBitmap90CounterClockwise(source: Bitmap): Bitmap {
val width = source.width
val height = source.height
// Create a new Bitmap with swapped width and height
val rotatedBitmap = Bitmap.createBitmap(height, width, source.config ?: Bitmap.Config.ARGB_8888)
for (y in 0 until height) {
for (x in 0 until width) {
// Set the pixel in the new position
rotatedBitmap.setPixel(y, width - 1 - x, source.getPixel(x, y))
}
}
return rotatedBitmap
}
fun Bitmap.transformBitmap(orientation: Orientation): Bitmap { fun Bitmap.transformBitmap(orientation: Orientation): Bitmap {
return when (orientation) { return when (orientation) {
Orientation.PORTRAIT -> this // No transformation needed Orientation.PORTRAIT -> this // No transformation needed
Orientation.LANDSCAPE_LEFT -> { Orientation.LANDSCAPE_LEFT -> {
// Transpose (swap width and height) val srcWidth = width
val transposedBitmap = Bitmap.createBitmap(height, width, config ?: Bitmap.Config.ARGB_8888) val srcHeight = height
for (y in 0 until height) { val sourcePixels = IntArray(srcWidth * srcHeight)
for (x in 0 until width) { getPixels(sourcePixels, 0, srcWidth, 0, 0, srcWidth, srcHeight)
transposedBitmap.setPixel(y, width - 1 - x, getPixel(x, y))
val dstWidth = srcHeight
val dstHeight = srcWidth
val destinationPixels = IntArray(dstWidth * dstHeight)
for (y in 0 until srcHeight) {
for (x in 0 until srcWidth) {
val dstX = y
val dstY = srcWidth - 1 - x
destinationPixels[dstY * dstWidth + dstX] = sourcePixels[y * srcWidth + x]
} }
} }
transposedBitmap
val transformedBitmap = Bitmap.createBitmap(dstWidth, dstHeight, config ?: Bitmap.Config.ARGB_8888)
transformedBitmap.setPixels(destinationPixels, 0, dstWidth, 0, 0, dstWidth, dstHeight)
transformedBitmap
} }
Orientation.PORTRAIT_UPSIDE_DOWN -> { Orientation.PORTRAIT_UPSIDE_DOWN -> {
// Invert vertically and horizontally (180-degree rotation) val srcWidth = width
val invertedBitmap = Bitmap.createBitmap(width, height, config ?: Bitmap.Config.ARGB_8888) val srcHeight = height
for (y in 0 until height) { val sourcePixels = IntArray(srcWidth * srcHeight)
for (x in 0 until width) { getPixels(sourcePixels, 0, srcWidth, 0, 0, srcWidth, srcHeight)
invertedBitmap.setPixel(width - 1 - x, height - 1 - y, getPixel(x, y))
val dstWidth = srcWidth
val dstHeight = srcHeight
val destinationPixels = IntArray(dstWidth * dstHeight)
for (y in 0 until srcHeight) {
for (x in 0 until srcWidth) {
val dstX = srcWidth - 1 - x
val dstY = srcHeight - 1 - y
destinationPixels[dstY * dstWidth + dstX] = sourcePixels[y * srcWidth + x]
} }
} }
invertedBitmap
val transformedBitmap = Bitmap.createBitmap(dstWidth, dstHeight, config ?: Bitmap.Config.ARGB_8888)
transformedBitmap.setPixels(destinationPixels, 0, dstWidth, 0, 0, dstWidth, dstHeight)
transformedBitmap
} }
Orientation.LANDSCAPE_RIGHT -> { Orientation.LANDSCAPE_RIGHT -> {
// Transpose (swap width and height) and invert vertically val srcWidth = width
val transposedBitmap = Bitmap.createBitmap(height, width, config ?: Bitmap.Config.ARGB_8888) val srcHeight = height
for (y in 0 until height) { val sourcePixels = IntArray(srcWidth * srcHeight)
for (x in 0 until width) { getPixels(sourcePixels, 0, srcWidth, 0, 0, srcWidth, srcHeight)
transposedBitmap.setPixel(height - 1 - y, x, getPixel(x, y))
val dstWidth = srcHeight
val dstHeight = srcWidth
val destinationPixels = IntArray(dstWidth * dstHeight)
for (y in 0 until srcHeight) {
for (x in 0 until srcWidth) {
val dstX = srcHeight - 1 - y
val dstY = x
destinationPixels[dstY * dstWidth + dstX] = sourcePixels[y * srcWidth + x]
} }
} }
transposedBitmap
val transformedBitmap = Bitmap.createBitmap(dstWidth, dstHeight, config ?: Bitmap.Config.ARGB_8888)
transformedBitmap.setPixels(destinationPixels, 0, dstWidth, 0, 0, dstWidth, dstHeight)
transformedBitmap
} }
} }
} }
@@ -186,7 +200,7 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
) )
} }
}, },
Handler(Looper.getMainLooper()) pixelCopyHandler
) )
} }
} }
@@ -198,8 +212,10 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
val viewOrientation = Orientation.PORTRAIT val viewOrientation = Orientation.PORTRAIT
val rotated = point.rotatedBy(viewSize, cameraSize, viewOrientation, sensorOrientation) val rotated = point.rotatedBy(viewSize, cameraSize, viewOrientation, sensorOrientation)
Log.i(TAG, "Converted layer point $point to camera point $rotated! ($sensorOrientation, $cameraSize -> $viewSize)") // Clamp to valid camera coordinates (must be non-negative for MeteringRectangle)
return rotated val clamped = Point(maxOf(0, rotated.x), maxOf(0, rotated.y))
Log.i(TAG, "Converted layer point $point to camera point $clamped! ($sensorOrientation, $cameraSize -> $viewSize)")
return clamped
} }
private fun updateLayout() { private fun updateLayout() {
@@ -254,5 +270,10 @@ class PreviewView(context: Context, callback: SurfaceHolder.Callback) :
companion object { companion object {
private const val TAG = "PreviewView" private const val TAG = "PreviewView"
private val pixelCopyHandler: Handler by lazy {
val handlerThread = HandlerThread("VisionCamera.PixelCopy")
handlerThread.start()
Handler(handlerThread.looper)
}
} }
} }

View File

@@ -8,6 +8,7 @@ import com.facebook.common.statfs.StatFsHelper
import com.mrousavy.camera.extensions.getRecommendedBitRate import com.mrousavy.camera.extensions.getRecommendedBitRate
import com.mrousavy.camera.types.Orientation import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.types.StreamSegmentType
import com.mrousavy.camera.utils.FileUtils import com.mrousavy.camera.utils.FileUtils
import java.io.File import java.io.File
import android.os.Environment import android.os.Environment
@@ -27,9 +28,7 @@ class RecordingSession(
private val filePath: String, private val filePath: String,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit, private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback, private val allCallbacks: CameraSession.Callback
// Use FragmentedRecordingManager for HLS-compatible fMP4 output
private val useFragmentedMp4: Boolean = true
) { ) {
companion object { companion object {
private const val TAG = "RecordingSession" private const val TAG = "RecordingSession"
@@ -44,20 +43,26 @@ class RecordingSession(
data class Video(val path: String, val durationMs: Long, val size: Size) data class Video(val path: String, val durationMs: Long, val size: Size)
// Strip file:// prefix if present (expo-file-system returns URIs with this prefix) val outputRotationDegrees: Int =
private val outputPath: File = File( if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
if (filePath.startsWith("file://")) { when (cameraOrientation.toDegrees()) {
filePath.removePrefix("file://") 90 -> 270
} else { 270 -> 90
filePath else -> cameraOrientation.toDegrees()
} }
) } else {
0
}
// Normalize path - expo-file-system passes file:// URIs but File expects raw paths
// Handle both file:// and file:/ variants
private val outputPath: File = File(filePath.replace(Regex("^file:/+"), "/"))
private val bitRate = getBitRate() private val bitRate = getBitRate()
// Use FragmentedRecordingManager for HLS-compatible fMP4 output, // Use FragmentedRecordingManager for HLS-compatible fMP4 output,
// or fall back to ChunkedRecordingManager for regular MP4 chunks // or fall back to ChunkedRecordingManager for regular MP4 chunks
private val recorder: ChunkedRecorderInterface = if (useFragmentedMp4) { private val recorder: ChunkedRecorderInterface = if (options.streamSegmentType == StreamSegmentType.FRAGMENTED_MP4) {
FragmentedRecordingManager.fromParams( FragmentedRecordingManager.fromParams(
allCallbacks, allCallbacks,
size, size,
@@ -89,7 +94,7 @@ class RecordingSession(
fun start() { fun start() {
synchronized(this) { synchronized(this) {
Log.i(TAG, "Starting RecordingSession..") Log.i(TAG, "Starting RecordingSession with ${options.streamSegmentType} recorder..")
startTime = System.currentTimeMillis() startTime = System.currentTimeMillis()
recorder.start() recorder.start()
} }

View File

@@ -186,7 +186,7 @@ class VideoPipeline(
if (recordingSession != null) { if (recordingSession != null) {
// Configure OpenGL pipeline to stream Frames into the Recording Session's surface // Configure OpenGL pipeline to stream Frames into the Recording Session's surface
Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...") Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...")
setRecordingSessionOutputSurface(recordingSession.surface) setRecordingSessionOutputSurface(recordingSession.surface, recordingSession.outputRotationDegrees)
this.recordingSession = recordingSession this.recordingSession = recordingSession
} else { } else {
// Configure OpenGL pipeline to stop streaming Frames into the Recording Session's surface // Configure OpenGL pipeline to stop streaming Frames into the Recording Session's surface
@@ -250,7 +250,7 @@ class VideoPipeline(
private external fun getInputTextureId(): Int private external fun getInputTextureId(): Int
private external fun onBeforeFrame() private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray) private external fun onFrame(transformMatrix: FloatArray)
private external fun setRecordingSessionOutputSurface(surface: Any) private external fun setRecordingSessionOutputSurface(surface: Any, rotationDegrees: Int)
private external fun removeRecordingSessionOutputSurface() private external fun removeRecordingSessionOutputSurface()
private external fun initHybrid(width: Int, height: Int): HybridData private external fun initHybrid(width: Int, height: Int): HybridData
} }

View File

@@ -9,6 +9,7 @@ class RecordVideoOptions(map: ReadableMap) {
var videoBitRateOverride: Double? = null var videoBitRateOverride: Double? = null
var videoBitRateMultiplier: Double? = null var videoBitRateMultiplier: Double? = null
var orientation: Orientation? = null var orientation: Orientation? = null
var streamSegmentType: StreamSegmentType = StreamSegmentType.FRAGMENTED_MP4
init { init {
if (map.hasKey("fileType")) { if (map.hasKey("fileType")) {
@@ -29,5 +30,8 @@ class RecordVideoOptions(map: ReadableMap) {
if (map.hasKey("orientation")) { if (map.hasKey("orientation")) {
orientation = Orientation.fromUnionValue(map.getString("orientation")) orientation = Orientation.fromUnionValue(map.getString("orientation"))
} }
if (map.hasKey("streamSegmentType")) {
streamSegmentType = StreamSegmentType.fromUnionValue(map.getString("streamSegmentType"))
}
} }
} }

View File

@@ -0,0 +1,15 @@
package com.mrousavy.camera.types
enum class StreamSegmentType(override val unionValue: String) : JSUnionValue {
FRAGMENTED_MP4("FRAGMENTED_MP4"),
RB_CHUNKED_MP4("RB_CHUNKED_MP4");
companion object : JSUnionValue.Companion<StreamSegmentType> {
override fun fromUnionValue(unionValue: String?): StreamSegmentType =
when (unionValue) {
"FRAGMENTED_MP4" -> FRAGMENTED_MP4
"RB_CHUNKED_MP4" -> RB_CHUNKED_MP4
else -> FRAGMENTED_MP4 // Default to fMP4
}
}
}

View File

@@ -20,6 +20,9 @@ extension CameraSession {
onError: @escaping (_ error: CameraError) -> Void) { onError: @escaping (_ error: CameraError) -> Void) {
// Run on Camera Queue // Run on Camera Queue
CameraQueues.cameraQueue.async { CameraQueues.cameraQueue.async {
// Normalize path - expo-file-system passes file:// URIs but FileManager expects raw paths
let normalizedPath = filePath.hasPrefix("file://") ? String(filePath.dropFirst(7)) : filePath
let start = DispatchTime.now() let start = DispatchTime.now()
ReactLogger.log(level: .info, message: "Starting Video recording...") ReactLogger.log(level: .info, message: "Starting Video recording...")
@@ -38,11 +41,27 @@ extension CameraSession {
// Callback for when new chunks are ready // Callback for when new chunks are ready
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
guard let delegate = self.delegate else { guard let delegate = self.delegate else {
ReactLogger.log(level: .warning, message: "Chunk ready but delegate is nil, dropping chunk: \(chunk)")
return return
} }
delegate.onVideoChunkReady(chunk: chunk) delegate.onVideoChunkReady(chunk: chunk)
} }
// Callback for when a chunk write fails (e.g. init file write failure)
let onChunkError: (Error) -> Void = { error in
ReactLogger.log(level: .error, message: "Chunk write error, stopping recording: \(error.localizedDescription)")
// Stop recording immediately
if let session = self.recordingSession {
session.stop(clock: self.captureSession.clock)
}
// Surface error to RN
if let cameraError = error as? CameraError {
onError(cameraError)
} else {
onError(.capture(.fileError))
}
}
// Callback for when the recording ends // Callback for when the recording ends
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer { defer {
@@ -82,22 +101,23 @@ extension CameraSession {
} }
} }
if !FileManager.default.fileExists(atPath: filePath) { if !FileManager.default.fileExists(atPath: normalizedPath) {
do { do {
try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true) try FileManager.default.createDirectory(atPath: normalizedPath, withIntermediateDirectories: true)
} catch { } catch {
onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription))) onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription)))
return return
} }
} }
ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)") ReactLogger.log(level: .info, message: "Will record to temporary file: \(normalizedPath)")
do { do {
// Create RecordingSession for the temp file // Create RecordingSession for the temp file
let recordingSession = try RecordingSession(outputDiretory: filePath, let recordingSession = try RecordingSession(outputDiretory: normalizedPath,
fileType: options.fileType, fileType: options.fileType,
onChunkReady: onChunkReady, onChunkReady: onChunkReady,
onChunkError: onChunkError,
completion: onFinish) completion: onFinish)
// Init Audio + Activate Audio Session (optional) // Init Audio + Activate Audio Session (optional)

View File

@@ -24,12 +24,14 @@ class ChunkedRecorder: NSObject {
let outputURL: URL let outputURL: URL
let onChunkReady: ((Chunk) -> Void) let onChunkReady: ((Chunk) -> Void)
let onError: ((Error) -> Void)?
private var chunkIndex: UInt64 = 0 private var chunkIndex: UInt64 = 0
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws { init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void), onError: ((Error) -> Void)? = nil) throws {
self.outputURL = outputURL self.outputURL = outputURL
self.onChunkReady = onChunkReady self.onChunkReady = onChunkReady
self.onError = onError
guard FileManager.default.fileExists(atPath: outputURL.path) else { guard FileManager.default.fileExists(atPath: outputURL.path) else {
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil) throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
} }
@@ -56,14 +58,19 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
private func saveInitSegment(_ data: Data) { private func saveInitSegment(_ data: Data) {
let url = outputURL.appendingPathComponent("init.mp4") let url = outputURL.appendingPathComponent("init.mp4")
save(data: data, url: url) do {
try data.write(to: url)
onChunkReady(url: url, type: .initialization) onChunkReady(url: url, type: .initialization)
} catch {
ReactLogger.log(level: .error, message: "Failed to write init file \(url): \(error.localizedDescription)")
onError?(CameraError.capture(.fileError))
}
} }
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) { private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
let name = "\(chunkIndex).mp4" let name = "\(chunkIndex).mp4"
let url = outputURL.appendingPathComponent(name) let url = outputURL.appendingPathComponent(name)
save(data: data, url: url) if save(data: data, url: url) {
let duration = report? let duration = report?
.trackReports .trackReports
.filter { $0.mediaType == .video } .filter { $0.mediaType == .video }
@@ -72,12 +79,15 @@ extension ChunkedRecorder: AVAssetWriterDelegate {
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration)) onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
chunkIndex += 1 chunkIndex += 1
} }
}
private func save(data: Data, url: URL) { private func save(data: Data, url: URL) -> Bool {
do { do {
try data.write(to: url) try data.write(to: url)
return true
} catch { } catch {
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)") ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
return false
} }
} }

View File

@@ -74,12 +74,13 @@ class RecordingSession {
init(outputDiretory: String, init(outputDiretory: String,
fileType: AVFileType, fileType: AVFileType,
onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void), onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void),
onChunkError: ((Error) -> Void)? = nil,
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws { completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
completionHandler = completion completionHandler = completion
do { do {
let outputURL = URL(fileURLWithPath: outputDiretory) let outputURL = URL(fileURLWithPath: outputDiretory)
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady) recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady, onError: onChunkError)
assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!) assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!)
assetWriter.shouldOptimizeForNetworkUse = false assetWriter.shouldOptimizeForNetworkUse = false
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS assetWriter.outputFileTypeProfile = .mpeg4AppleHLS

View File

@@ -41,6 +41,17 @@ export interface RecordVideoOptions {
* @default 'normal' * @default 'normal'
*/ */
videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number videoBitRate?: 'extra-low' | 'low' | 'normal' | 'high' | 'extra-high' | number
/**
* The stream segment type for recording on Android.
* - `FRAGMENTED_MP4`: HLS-compatible segments (init.mp4 + numbered segments)
* - `RB_CHUNKED_MP4`: Legacy chunked MP4 format
*
* iOS always uses FRAGMENTED_MP4 regardless of this setting.
*
* @platform android
* @default 'FRAGMENTED_MP4'
*/
streamSegmentType?: 'FRAGMENTED_MP4' | 'RB_CHUNKED_MP4'
} }
/** /**