Compare commits

...

2 Commits

7 changed files with 71 additions and 21 deletions

View File

@@ -26,6 +26,7 @@ OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWi
_outputSurface = surface;
_width = ANativeWindow_getWidth(surface);
_height = ANativeWindow_getHeight(surface);
__android_log_print(ANDROID_LOG_INFO, TAG, "ROTATION_DEBUG OpenGLRenderer created with output surface dimensions: %dx%d", _width, _height);
}
OpenGLRenderer::~OpenGLRenderer() {

View File

@@ -56,6 +56,11 @@ void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
void VideoPipeline::setRecordingOrientation(int orientation) {
_recordingOrientation = orientation;
__android_log_print(ANDROID_LOG_INFO, TAG, "Recording orientation set to: %d", orientation);
}
int VideoPipeline::getInputTextureId() {
if (_inputTexture == std::nullopt) {
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
@@ -78,8 +83,29 @@ void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixPara
OpenGLTexture& texture = _inputTexture.value();
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.. orientation=%d", _recordingOrientation);
// For recording, use a simple transform matrix instead of the display transform.
// The display transform includes rotations for preview which we don't want in recordings.
float recordingMatrix[16];
if (_recordingOrientation == 1) {
// LANDSCAPE_RIGHT (CW): Y-flip + 180° rotation = flip both X and Y
// This negates both X and Y, then translates by (1,1)
recordingMatrix[0] = -1.0f; recordingMatrix[1] = 0.0f; recordingMatrix[2] = 0.0f; recordingMatrix[3] = 0.0f;
recordingMatrix[4] = 0.0f; recordingMatrix[5] = 1.0f; recordingMatrix[6] = 0.0f; recordingMatrix[7] = 0.0f;
recordingMatrix[8] = 0.0f; recordingMatrix[9] = 0.0f; recordingMatrix[10] = 1.0f; recordingMatrix[11] = 0.0f;
recordingMatrix[12] = 1.0f; recordingMatrix[13] = 0.0f; recordingMatrix[14] = 0.0f; recordingMatrix[15] = 1.0f;
} else {
// LANDSCAPE_LEFT (CCW): Simple Y-flip
// OpenGL origin is bottom-left, video expects top-left
recordingMatrix[0] = 1.0f; recordingMatrix[1] = 0.0f; recordingMatrix[2] = 0.0f; recordingMatrix[3] = 0.0f;
recordingMatrix[4] = 0.0f; recordingMatrix[5] = -1.0f; recordingMatrix[6] = 0.0f; recordingMatrix[7] = 0.0f;
recordingMatrix[8] = 0.0f; recordingMatrix[9] = 0.0f; recordingMatrix[10] = 1.0f; recordingMatrix[11] = 0.0f;
recordingMatrix[12] = 0.0f; recordingMatrix[13] = 1.0f; recordingMatrix[14] = 0.0f; recordingMatrix[15] = 1.0f;
}
_recordingSessionOutput->renderTextureToSurface(texture, recordingMatrix);
}
}
@@ -88,6 +114,7 @@ void VideoPipeline::registerNatives() {
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("setRecordingOrientation", VideoPipeline::setRecordingOrientation),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame),

View File

@@ -33,6 +33,7 @@ public:
// <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface);
void removeRecordingSessionOutputSurface();
void setRecordingOrientation(int orientation);
// Frame callbacks
void onBeforeFrame();
@@ -47,6 +48,7 @@ private:
std::optional<OpenGLTexture> _inputTexture = std::nullopt;
int _width = 0;
int _height = 0;
int _recordingOrientation = 0; // 0=LANDSCAPE_LEFT, 1=LANDSCAPE_RIGHT
// Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr;

View File

@@ -409,7 +409,8 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
private fun updateVideoOutputs() {
val videoOutput = videoOutput ?: return
Log.i(TAG, "Updating Video Outputs...")
videoOutput.videoPipeline.setRecordingSessionOutput(recording)
val orientation = recording?.cameraOrientation ?: Orientation.LANDSCAPE_LEFT
videoOutput.videoPipeline.setRecordingSessionOutput(recording, orientation)
}
suspend fun startRecording(
@@ -428,18 +429,16 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
// Get actual device rotation from WindowManager since the React Native orientation hook
// doesn't update when rotating between landscape-left and landscape-right on Android.
// Map device rotation to the correct orientationHint for video recording:
// - Counter-clockwise (ROTATION_90) → 270° hint
// - Clockwise (ROTATION_270) → 90° hint
val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val deviceRotation = windowManager.defaultDisplay.rotation
val recordingOrientation = when (deviceRotation) {
Surface.ROTATION_0 -> Orientation.PORTRAIT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_RIGHT
Surface.ROTATION_90 -> Orientation.LANDSCAPE_LEFT // CCW rotation, top to left
Surface.ROTATION_180 -> Orientation.PORTRAIT_UPSIDE_DOWN
Surface.ROTATION_270 -> Orientation.LANDSCAPE_LEFT
Surface.ROTATION_270 -> Orientation.LANDSCAPE_RIGHT // CW rotation, top to right
else -> Orientation.PORTRAIT
}
Log.i(TAG, "ROTATION_DEBUG: deviceRotation=$deviceRotation, recordingOrientation=$recordingOrientation, options.orientation=${options.orientation}")
val recording = RecordingSession(
context,
@@ -448,7 +447,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
enableAudio,
fps,
videoOutput.enableHdr,
orientation,
recordingOrientation,
options,
filePath,
callback,

View File

@@ -51,14 +51,21 @@ class FragmentedRecordingManager(
segmentDurationSeconds: Int = 6
): FragmentedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
size.width to size.height
// For fragmented MP4: DON'T swap dimensions, use camera's native dimensions.
// The C++ VideoPipeline now uses a simple Y-flip matrix (not the display transform).
// This gives us raw sensor frames, and we rely on rotation metadata for playback.
val cameraOrientationDegrees = when (cameraOrientation) {
Orientation.LANDSCAPE_LEFT -> 0 // CCW landscape - works!
Orientation.LANDSCAPE_RIGHT -> 0 // CW landscape - same as CCW (Y-flip handles it)
Orientation.PORTRAIT -> 90 // Portrait typically needs 90° on Android
Orientation.PORTRAIT_UPSIDE_DOWN -> 270
}
Log.i(TAG, "ROTATION_DEBUG FragmentedRecordingManager: cameraOrientation=$cameraOrientation, cameraOrientationDegrees=$cameraOrientationDegrees, inputSize=${size.width}x${size.height}")
// Keep original dimensions - don't swap. Let rotation metadata handle orientation.
val width = size.width
val height = size.height
Log.i(TAG, "ROTATION_DEBUG FragmentedRecordingManager: outputDimensions=${width}x${height} (no swap)")
val format = MediaFormat.createVideoFormat(mimeType, width, height)
val codec = MediaCodec.createEncoderByType(mimeType)
@@ -74,14 +81,14 @@ class FragmentedRecordingManager(
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, segmentDurationSeconds)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees")
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return FragmentedRecordingManager(
codec,
outputDirectory,
recordingOrientationDegrees,
cameraOrientationDegrees,
segmentDurationSeconds * 1_000_000L,
callbacks
)
@@ -317,6 +324,7 @@ class FragmentedRecordingManager(
initData.add(bytes)
}
Log.i(TAG, "ROTATION_DEBUG convertToMedia3Format: orientationDegrees=$orientationDegrees, width=$width, height=$height")
return Format.Builder()
.setSampleMimeType(mimeType)
.setWidth(width)

View File

@@ -24,7 +24,7 @@ class RecordingSession(
private val enableAudio: Boolean,
private val fps: Int? = null,
private val hdr: Boolean = false,
private val cameraOrientation: Orientation,
val cameraOrientation: Orientation,
private val options: RecordVideoOptions,
private val filePath: String,
private val callback: (video: Video) -> Unit,

View File

@@ -162,6 +162,14 @@ class VideoPipeline(
// 4. Get the transform matrix from the SurfaceTexture (rotations/scales applied by Camera)
surfaceTexture.getTransformMatrix(transformMatrix)
// Log transform matrix for debugging rotation issues (only when recording)
if (recordingSession != null) {
Log.i(TAG, "ROTATION_DEBUG TransformMatrix: [${transformMatrix[0]}, ${transformMatrix[1]}, ${transformMatrix[2]}, ${transformMatrix[3]}], " +
"[${transformMatrix[4]}, ${transformMatrix[5]}, ${transformMatrix[6]}, ${transformMatrix[7]}], " +
"[${transformMatrix[8]}, ${transformMatrix[9]}, ${transformMatrix[10]}, ${transformMatrix[11]}], " +
"[${transformMatrix[12]}, ${transformMatrix[13]}, ${transformMatrix[14]}, ${transformMatrix[15]}]")
}
// 5. Draw it with applied rotation/mirroring
onFrame(transformMatrix)
@@ -181,11 +189,15 @@ class VideoPipeline(
/**
* Configures the Pipeline to also write Frames to a Surface from a `MediaRecorder` (or null)
*/
fun setRecordingSessionOutput(recordingSession: RecordingSession?) {
fun setRecordingSessionOutput(recordingSession: RecordingSession?, orientation: Orientation = Orientation.LANDSCAPE_LEFT) {
synchronized(this) {
if (recordingSession != null) {
// Configure OpenGL pipeline to stream Frames into the Recording Session's surface
Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output...")
Log.i(TAG, "Setting ${recordingSession.size} RecordingSession Output with orientation=$orientation...")
// Set the recording orientation for the native layer
// 0 = LANDSCAPE_LEFT (CCW), 1 = LANDSCAPE_RIGHT (CW)
val orientationValue = if (orientation == Orientation.LANDSCAPE_RIGHT) 1 else 0
setRecordingOrientation(orientationValue)
setRecordingSessionOutputSurface(recordingSession.surface)
this.recordingSession = recordingSession
} else {
@@ -252,5 +264,6 @@ class VideoPipeline(
private external fun onFrame(transformMatrix: FloatArray)
private external fun setRecordingSessionOutputSurface(surface: Any)
private external fun removeRecordingSessionOutputSurface()
private external fun setRecordingOrientation(orientation: Int)
private external fun initHybrid(width: Int, height: Int): HybridData
}