fix clockwise rotation error

This commit is contained in:
2025-12-22 18:55:08 -05:00
parent 49fba9ed60
commit 6b0a3cbb98
5 changed files with 47 additions and 14 deletions

View File

@@ -56,6 +56,11 @@ void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window); _recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
} }
void VideoPipeline::setRecordingOrientation(int orientation) {
_recordingOrientation = orientation;
__android_log_print(ANDROID_LOG_INFO, TAG, "Recording orientation set to: %d", orientation);
}
int VideoPipeline::getInputTextureId() { int VideoPipeline::getInputTextureId() {
if (_inputTexture == std::nullopt) { if (_inputTexture == std::nullopt) {
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height); _inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
@@ -78,17 +83,28 @@ void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixPara
OpenGLTexture& texture = _inputTexture.value(); OpenGLTexture& texture = _inputTexture.value();
if (_recordingSessionOutput) { if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.."); __android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.. orientation=%d", _recordingOrientation);
// For recording, use a simple Y-flip matrix instead of the display transform.
// For recording, use a simple transform matrix instead of the display transform.
// The display transform includes rotations for preview which we don't want in recordings. // The display transform includes rotations for preview which we don't want in recordings.
// We'll rely on rotation metadata in the MP4 container for playback orientation. float recordingMatrix[16];
// This matrix flips Y (OpenGL origin is bottom-left, video expects top-left).
float recordingMatrix[16] = { if (_recordingOrientation == 1) {
1.0f, 0.0f, 0.0f, 0.0f, // row 0 // LANDSCAPE_RIGHT (CW): Y-flip + 180° rotation = flip both X and Y
0.0f, -1.0f, 0.0f, 0.0f, // row 1 (Y flip) // This negates both X and Y, then translates by (1,1)
0.0f, 0.0f, 1.0f, 0.0f, // row 2 recordingMatrix[0] = -1.0f; recordingMatrix[1] = 0.0f; recordingMatrix[2] = 0.0f; recordingMatrix[3] = 0.0f;
0.0f, 1.0f, 0.0f, 1.0f // row 3 (translate Y by 1 after flip) recordingMatrix[4] = 0.0f; recordingMatrix[5] = 1.0f; recordingMatrix[6] = 0.0f; recordingMatrix[7] = 0.0f;
}; recordingMatrix[8] = 0.0f; recordingMatrix[9] = 0.0f; recordingMatrix[10] = 1.0f; recordingMatrix[11] = 0.0f;
recordingMatrix[12] = 1.0f; recordingMatrix[13] = 0.0f; recordingMatrix[14] = 0.0f; recordingMatrix[15] = 1.0f;
} else {
// LANDSCAPE_LEFT (CCW): Simple Y-flip
// OpenGL origin is bottom-left, video expects top-left
recordingMatrix[0] = 1.0f; recordingMatrix[1] = 0.0f; recordingMatrix[2] = 0.0f; recordingMatrix[3] = 0.0f;
recordingMatrix[4] = 0.0f; recordingMatrix[5] = -1.0f; recordingMatrix[6] = 0.0f; recordingMatrix[7] = 0.0f;
recordingMatrix[8] = 0.0f; recordingMatrix[9] = 0.0f; recordingMatrix[10] = 1.0f; recordingMatrix[11] = 0.0f;
recordingMatrix[12] = 0.0f; recordingMatrix[13] = 1.0f; recordingMatrix[14] = 0.0f; recordingMatrix[15] = 1.0f;
}
_recordingSessionOutput->renderTextureToSurface(texture, recordingMatrix); _recordingSessionOutput->renderTextureToSurface(texture, recordingMatrix);
} }
} }
@@ -98,6 +114,7 @@ void VideoPipeline::registerNatives() {
makeNativeMethod("initHybrid", VideoPipeline::initHybrid), makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface), makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface), makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("setRecordingOrientation", VideoPipeline::setRecordingOrientation),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId), makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame), makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame), makeNativeMethod("onFrame", VideoPipeline::onFrame),

View File

@@ -33,6 +33,7 @@ public:
// <- MediaRecorder output // <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface); void setRecordingSessionOutputSurface(jobject surface);
void removeRecordingSessionOutputSurface(); void removeRecordingSessionOutputSurface();
void setRecordingOrientation(int orientation);
// Frame callbacks // Frame callbacks
void onBeforeFrame(); void onBeforeFrame();
@@ -47,6 +48,7 @@ private:
std::optional<OpenGLTexture> _inputTexture = std::nullopt; std::optional<OpenGLTexture> _inputTexture = std::nullopt;
int _width = 0; int _width = 0;
int _height = 0; int _height = 0;
int _recordingOrientation = 0; // 0=LANDSCAPE_LEFT, 1=LANDSCAPE_RIGHT
// Output Contexts // Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr; std::shared_ptr<OpenGLContext> _context = nullptr;

View File

@@ -409,7 +409,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
private fun updateVideoOutputs() { private fun updateVideoOutputs() {
val videoOutput = videoOutput ?: return val videoOutput = videoOutput ?: return
Log.i(TAG, "Updating Video Outputs...") Log.i(TAG, "Updating Video Outputs...")
videoOutput.videoPipeline.setRecordingSessionOutput(recording) val orientation = recording?.cameraOrientation ?: Orientation.LANDSCAPE_LEFT
videoOutput.videoPipeline.setRecordingSessionOutput(recording, orientation)
} }
suspend fun startRecording( suspend fun startRecording(

View File

@@ -24,7 +24,7 @@ class RecordingSession(
private val enableAudio: Boolean, private val enableAudio: Boolean,
private val fps: Int? = null, private val fps: Int? = null,
private val hdr: Boolean = false, private val hdr: Boolean = false,
private val cameraOrientation: Orientation, val cameraOrientation: Orientation,
private val options: RecordVideoOptions, private val options: RecordVideoOptions,
private val filePath: String, private val filePath: String,
private val callback: (video: Video) -> Unit, private val callback: (video: Video) -> Unit,

View File

@@ -162,6 +162,14 @@ class VideoPipeline(
// 4. Get the transform matrix from the SurfaceTexture (rotations/scales applied by Camera) // 4. Get the transform matrix from the SurfaceTexture (rotations/scales applied by Camera)
surfaceTexture.getTransformMatrix(transformMatrix) surfaceTexture.getTransformMatrix(transformMatrix)
// Log transform matrix for debugging rotation issues (only when recording)
if (recordingSession != null) {
Log.i(TAG, "ROTATION_DEBUG TransformMatrix: [${transformMatrix[0]}, ${transformMatrix[1]}, ${transformMatrix[2]}, ${transformMatrix[3]}], " +
"[${transformMatrix[4]}, ${transformMatrix[5]}, ${transformMatrix[6]}, ${transformMatrix[7]}], " +
"[${transformMatrix[8]}, ${transformMatrix[9]}, ${transformMatrix[10]}, ${transformMatrix[11]}], " +
"[${transformMatrix[12]}, ${transformMatrix[13]}, ${transformMatrix[14]}, ${transformMatrix[15]}]")
}
// 5. Draw it with applied rotation/mirroring // 5. Draw it with applied rotation/mirroring
onFrame(transformMatrix) onFrame(transformMatrix)
@@ -181,11 +189,15 @@ class VideoPipeline(
/** /**
* Configures the Pipeline to also write Frames to a Surface from a `MediaRecorder` (or null) * Configures the Pipeline to also write Frames to a Surface from a `MediaRecorder` (or null)
*/ */
fun setRecordingSessionOutput(recordingSession: RecordingSession?) { fun setRecordingSessionOutput(recordingSession: RecordingSession?, orientation: Orientation = Orientation.LANDSCAPE_LEFT) {
synchronized(this) { synchronized(this) {
if (recordingSession != null) { if (recordingSession != null) {
// Configure OpenGL pipeline to stream Frames into the Recording Session's surface // Configure OpenGL pipeline to stream Frames into the Recording Session's surface
Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...") Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output with orientation=$orientation...")
// Set the recording orientation for the native layer
// 0 = LANDSCAPE_LEFT (CCW), 1 = LANDSCAPE_RIGHT (CW)
val orientationValue = if (orientation == Orientation.LANDSCAPE_RIGHT) 1 else 0
setRecordingOrientation(orientationValue)
setRecordingSessionOutputSurface(recordingSession.surface) setRecordingSessionOutputSurface(recordingSession.surface)
this.recordingSession = recordingSession this.recordingSession = recordingSession
} else { } else {
@@ -252,5 +264,6 @@ class VideoPipeline(
private external fun onFrame(transformMatrix: FloatArray) private external fun onFrame(transformMatrix: FloatArray)
private external fun setRecordingSessionOutputSurface(surface: Any) private external fun setRecordingSessionOutputSurface(surface: Any)
private external fun removeRecordingSessionOutputSurface() private external fun removeRecordingSessionOutputSurface()
private external fun setRecordingOrientation(orientation: Int)
private external fun initHybrid(width: Int, height: Int): HybridData private external fun initHybrid(width: Int, height: Int): HybridData
} }