From 6b0a3cbb985295b7e35c35a8c87a42ee2474bf10 Mon Sep 17 00:00:00 2001 From: Loewy Date: Mon, 22 Dec 2025 18:55:08 -0500 Subject: [PATCH] fix clockwise rotation error --- .../android/src/main/cpp/VideoPipeline.cpp | 37 ++++++++++++++----- package/android/src/main/cpp/VideoPipeline.h | 2 + .../com/mrousavy/camera/core/CameraSession.kt | 3 +- .../mrousavy/camera/core/RecordingSession.kt | 2 +- .../com/mrousavy/camera/core/VideoPipeline.kt | 17 ++++++++- 5 files changed, 47 insertions(+), 14 deletions(-) diff --git a/package/android/src/main/cpp/VideoPipeline.cpp b/package/android/src/main/cpp/VideoPipeline.cpp index 1aa746f..8d9b36d 100644 --- a/package/android/src/main/cpp/VideoPipeline.cpp +++ b/package/android/src/main/cpp/VideoPipeline.cpp @@ -56,6 +56,11 @@ void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) { _recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window); } +void VideoPipeline::setRecordingOrientation(int orientation) { + _recordingOrientation = orientation; + __android_log_print(ANDROID_LOG_INFO, TAG, "Recording orientation set to: %d", orientation); +} + int VideoPipeline::getInputTextureId() { if (_inputTexture == std::nullopt) { _inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height); @@ -78,17 +83,28 @@ void VideoPipeline::onFrame(jni::alias_ref transformMatrixPara OpenGLTexture& texture = _inputTexture.value(); if (_recordingSessionOutput) { - __android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.."); - // For recording, use a simple Y-flip matrix instead of the display transform. + __android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.. orientation=%d", _recordingOrientation); + + // For recording, use a simple transform matrix instead of the display transform. // The display transform includes rotations for preview which we don't want in recordings. - // We'll rely on rotation metadata in the MP4 container for playback orientation. - // This matrix flips Y (OpenGL origin is bottom-left, video expects top-left). - float recordingMatrix[16] = { - 1.0f, 0.0f, 0.0f, 0.0f, // row 0 - 0.0f, -1.0f, 0.0f, 0.0f, // row 1 (Y flip) - 0.0f, 0.0f, 1.0f, 0.0f, // row 2 - 0.0f, 1.0f, 0.0f, 1.0f // row 3 (translate Y by 1 after flip) - }; + float recordingMatrix[16]; + + if (_recordingOrientation == 1) { + // LANDSCAPE_RIGHT (CW): Y-flip + 180° rotation = flip both X and Y + // This negates both X and Y, then translates by (1,1) + recordingMatrix[0] = -1.0f; recordingMatrix[1] = 0.0f; recordingMatrix[2] = 0.0f; recordingMatrix[3] = 0.0f; + recordingMatrix[4] = 0.0f; recordingMatrix[5] = 1.0f; recordingMatrix[6] = 0.0f; recordingMatrix[7] = 0.0f; + recordingMatrix[8] = 0.0f; recordingMatrix[9] = 0.0f; recordingMatrix[10] = 1.0f; recordingMatrix[11] = 0.0f; + recordingMatrix[12] = 1.0f; recordingMatrix[13] = 0.0f; recordingMatrix[14] = 0.0f; recordingMatrix[15] = 1.0f; + } else { + // LANDSCAPE_LEFT (CCW): Simple Y-flip + // OpenGL origin is bottom-left, video expects top-left + recordingMatrix[0] = 1.0f; recordingMatrix[1] = 0.0f; recordingMatrix[2] = 0.0f; recordingMatrix[3] = 0.0f; + recordingMatrix[4] = 0.0f; recordingMatrix[5] = -1.0f; recordingMatrix[6] = 0.0f; recordingMatrix[7] = 0.0f; + recordingMatrix[8] = 0.0f; recordingMatrix[9] = 0.0f; recordingMatrix[10] = 1.0f; recordingMatrix[11] = 0.0f; + recordingMatrix[12] = 0.0f; recordingMatrix[13] = 1.0f; recordingMatrix[14] = 0.0f; recordingMatrix[15] = 1.0f; + } + _recordingSessionOutput->renderTextureToSurface(texture, recordingMatrix); } } @@ -98,6 +114,7 @@ void VideoPipeline::registerNatives() { makeNativeMethod("initHybrid", VideoPipeline::initHybrid), makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface), makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface), + makeNativeMethod("setRecordingOrientation", VideoPipeline::setRecordingOrientation), makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId), makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame), makeNativeMethod("onFrame", VideoPipeline::onFrame), diff --git a/package/android/src/main/cpp/VideoPipeline.h b/package/android/src/main/cpp/VideoPipeline.h index 67f0725..42cdb8e 100644 --- a/package/android/src/main/cpp/VideoPipeline.h +++ b/package/android/src/main/cpp/VideoPipeline.h @@ -33,6 +33,7 @@ public: // <- MediaRecorder output void setRecordingSessionOutputSurface(jobject surface); void removeRecordingSessionOutputSurface(); + void setRecordingOrientation(int orientation); // Frame callbacks void onBeforeFrame(); @@ -47,6 +48,7 @@ private: std::optional _inputTexture = std::nullopt; int _width = 0; int _height = 0; + int _recordingOrientation = 0; // 0=LANDSCAPE_LEFT, 1=LANDSCAPE_RIGHT // Output Contexts std::shared_ptr _context = nullptr; diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt index cda1ff9..d4694c1 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraSession.kt @@ -409,7 +409,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam private fun updateVideoOutputs() { val videoOutput = videoOutput ?: return Log.i(TAG, "Updating Video Outputs...") - videoOutput.videoPipeline.setRecordingSessionOutput(recording) + val orientation = recording?.cameraOrientation ?: Orientation.LANDSCAPE_LEFT + videoOutput.videoPipeline.setRecordingSessionOutput(recording, orientation) } suspend fun startRecording( diff --git a/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt b/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt index 4f3331e..29dc24b 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/RecordingSession.kt @@ -24,7 +24,7 @@ class RecordingSession( private val enableAudio: Boolean, private val fps: Int? = null, private val hdr: Boolean = false, - private val cameraOrientation: Orientation, + val cameraOrientation: Orientation, private val options: RecordVideoOptions, private val filePath: String, private val callback: (video: Video) -> Unit, diff --git a/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt b/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt index 395d396..b84d48c 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/VideoPipeline.kt @@ -162,6 +162,14 @@ class VideoPipeline( // 4. Get the transform matrix from the SurfaceTexture (rotations/scales applied by Camera) surfaceTexture.getTransformMatrix(transformMatrix) + // Log transform matrix for debugging rotation issues (only when recording) + if (recordingSession != null) { + Log.i(TAG, "ROTATION_DEBUG TransformMatrix: [${transformMatrix[0]}, ${transformMatrix[1]}, ${transformMatrix[2]}, ${transformMatrix[3]}], " + + "[${transformMatrix[4]}, ${transformMatrix[5]}, ${transformMatrix[6]}, ${transformMatrix[7]}], " + + "[${transformMatrix[8]}, ${transformMatrix[9]}, ${transformMatrix[10]}, ${transformMatrix[11]}], " + + "[${transformMatrix[12]}, ${transformMatrix[13]}, ${transformMatrix[14]}, ${transformMatrix[15]}]") + } + // 5. Draw it with applied rotation/mirroring onFrame(transformMatrix) @@ -181,11 +189,15 @@ class VideoPipeline( /** * Configures the Pipeline to also write Frames to a Surface from a `MediaRecorder` (or null) */ - fun setRecordingSessionOutput(recordingSession: RecordingSession?) { + fun setRecordingSessionOutput(recordingSession: RecordingSession?, orientation: Orientation = Orientation.LANDSCAPE_LEFT) { synchronized(this) { if (recordingSession != null) { // Configure OpenGL pipeline to stream Frames into the Recording Session's surface - Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...") + Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output with orientation=$orientation...") + // Set the recording orientation for the native layer + // 0 = LANDSCAPE_LEFT (CCW), 1 = LANDSCAPE_RIGHT (CW) + val orientationValue = if (orientation == Orientation.LANDSCAPE_RIGHT) 1 else 0 + setRecordingOrientation(orientationValue) setRecordingSessionOutputSurface(recordingSession.surface) this.recordingSession = recordingSession } else { @@ -252,5 +264,6 @@ class VideoPipeline( private external fun onFrame(transformMatrix: FloatArray) private external fun setRecordingSessionOutputSurface(surface: Any) private external fun removeRecordingSessionOutputSurface() + private external fun setRecordingOrientation(orientation: Int) private external fun initHybrid(width: Int, height: Int): HybridData }