Fix fMP4 video orientation by using raw sensor frames with Y-flip transform

This commit is contained in:
2025-12-22 18:48:12 -05:00
parent a2d218580c
commit 49fba9ed60
4 changed files with 33 additions and 16 deletions

View File

@@ -26,6 +26,7 @@ OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWi
_outputSurface = surface;
_width = ANativeWindow_getWidth(surface);
_height = ANativeWindow_getHeight(surface);
__android_log_print(ANDROID_LOG_INFO, TAG, "ROTATION_DEBUG OpenGLRenderer created with output surface dimensions: %dx%d", _width, _height);
}
OpenGLRenderer::~OpenGLRenderer() {

View File

@@ -79,7 +79,17 @@ void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixPara
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
// For recording, use a simple Y-flip matrix instead of the display transform.
// The display transform includes rotations for preview which we don't want in recordings.
// We'll rely on rotation metadata in the MP4 container for playback orientation.
// This matrix flips Y (OpenGL origin is bottom-left, video expects top-left).
float recordingMatrix[16] = {
1.0f, 0.0f, 0.0f, 0.0f, // row 0
0.0f, -1.0f, 0.0f, 0.0f, // row 1 (Y flip)
0.0f, 0.0f, 1.0f, 0.0f, // row 2
0.0f, 1.0f, 0.0f, 1.0f // row 3 (translate Y by 1 after flip)
};
_recordingSessionOutput->renderTextureToSurface(texture, recordingMatrix);
}
}