feat: Create C++/OpenGL-based Video Pipeline for more efficient Recording and Frame Processing (#1721)

* Create `VideoPipeline` c++

* Remove folly C++ dependency

* Create `VideoPipeline` HybridClass

* Set up OpenGL

* Add outputs

* Update VideoPipeline.kt

* Bum `minSdkVersion` to `26`

* Create `VideoPipelineOutput`

* Create output funcs

* Set output pipelines

* Add FP/Recording on Output change

* Update VideoPipeline.cpp

* Create `PassThroughShader`

* Try to draw? I have honestly no idea

* fix: Fix `setFrameProcessor` nameclash

* fix: Fix `high-res-sizes` being null

* Add preview output

* Create `OpenGLContext.cpp`

* Make screen red

* This _should_ work (MESSY)

* FINALLY RENDER TEXTURE

* Rotate

* Mirror

* Clean up a bit

* Add `getWidth()`/`getHeight()`

* Cleanup

* fix: Use uniforms instead of attributes

* Draw with passed rotation/mirror mode

* feat: Use SurfaceTexture's transformMatrix in OpenGL pipeline (#1727)

* feat: Use Transform Matrix from SurfaceTexture

* Renam

* feat: Fix OpenGL Shader

* Update VideoPipeline.kt

* Measure elapsed time

* fix: Fix low resolution

* Render to offscreen

* Render to every context

* Release `SurfaceTexture` on close

* Use one OpenGL context to render to multiple EGLSurfaces

* Clean up a bit

* fix: Fix recording pipeline not triggering

* fix: Synchronize close to prevent nulls

* Update OpenGLRenderer.cpp

* fix: Hardcode Android recorder size
This commit is contained in:
Marc Rousavy 2023-08-29 17:52:03 +02:00 committed by GitHub
parent dfb86e174b
commit ea3686cb9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 1024 additions and 157 deletions

View File

@ -7,10 +7,6 @@ set(BUILD_DIR ${CMAKE_SOURCE_DIR}/build)
set(CMAKE_VERBOSE_MAKEFILE ON) set(CMAKE_VERBOSE_MAKEFILE ON)
set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD 17)
# Folly
include("${NODE_MODULES_DIR}/react-native/ReactAndroid/cmake-utils/folly-flags.cmake")
add_compile_options(${folly_FLAGS})
# Third party libraries (Prefabs) # Third party libraries (Prefabs)
find_package(ReactAndroid REQUIRED CONFIG) find_package(ReactAndroid REQUIRED CONFIG)
find_package(fbjni REQUIRED CONFIG) find_package(fbjni REQUIRED CONFIG)
@ -25,6 +21,10 @@ add_library(
SHARED SHARED
../cpp/JSITypedArray.cpp ../cpp/JSITypedArray.cpp
src/main/cpp/VisionCamera.cpp src/main/cpp/VisionCamera.cpp
src/main/cpp/VideoPipeline.cpp
src/main/cpp/PassThroughShader.cpp
src/main/cpp/OpenGLContext.cpp
src/main/cpp/OpenGLRenderer.cpp
# Frame Processor # Frame Processor
src/main/cpp/frameprocessor/FrameHostObject.cpp src/main/cpp/frameprocessor/FrameHostObject.cpp
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
@ -60,7 +60,6 @@ target_link_libraries(
android # <-- Android JNI core android # <-- Android JNI core
ReactAndroid::jsi # <-- RN: JSI ReactAndroid::jsi # <-- RN: JSI
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings
ReactAndroid::folly_runtime # <-- RN: For casting JSI <> Java objects
fbjni::fbjni # <-- fbjni fbjni::fbjni # <-- fbjni
) )

View File

@ -93,7 +93,7 @@ android {
} }
defaultConfig { defaultConfig {
minSdkVersion safeExtGet('minSdkVersion', 21) minSdkVersion safeExtGet('minSdkVersion', 26)
compileSdkVersion safeExtGet('compileSdkVersion', 33) compileSdkVersion safeExtGet('compileSdkVersion', 33)
targetSdkVersion safeExtGet('targetSdkVersion', 33) targetSdkVersion safeExtGet('targetSdkVersion', 33)
versionCode 1 versionCode 1

View File

@ -14,10 +14,6 @@ org.gradle.configureondemand=true
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true # org.gradle.parallel=true
#Fri Feb 19 20:46:14 CET 2021 #Fri Feb 19 20:46:14 CET 2021
VisionCamera_buildToolsVersion=30.0.0
VisionCamera_compileSdkVersion=31
VisionCamera_kotlinVersion=1.7.20 VisionCamera_kotlinVersion=1.7.20
VisionCamera_targetSdkVersion=31
VisionCamera_ndkVersion=21.4.7075529
android.enableJetifier=true android.enableJetifier=true
android.useAndroidX=true android.useAndroidX=true

View File

@ -0,0 +1,133 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#include "OpenGLContext.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/native_window.h>
#include <android/log.h>
#include "OpenGLError.h"
namespace vision {
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface(int width, int height) {
return std::unique_ptr<OpenGLContext>(new OpenGLContext(width, height));
}
OpenGLContext::OpenGLContext(int width, int height) {
_width = width;
_height = height;
}
OpenGLContext::~OpenGLContext() {
destroy();
}
void OpenGLContext::destroy() {
if (display != EGL_NO_DISPLAY) {
eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
if (offscreenSurface != EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
eglDestroySurface(display, offscreenSurface);
offscreenSurface = EGL_NO_SURFACE;
}
if (context != EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Context...");
eglDestroyContext(display, context);
context = EGL_NO_CONTEXT;
}
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Display...");
eglTerminate(display);
display = EGL_NO_DISPLAY;
config = nullptr;
}
}
void OpenGLContext::ensureOpenGL() {
bool successful;
// EGLDisplay
if (display == EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLDisplay..");
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY) throw OpenGLError("Failed to get default OpenGL Display!");
EGLint major;
EGLint minor;
successful = eglInitialize(display, &major, &minor);
if (!successful) throw OpenGLError("Failed to initialize OpenGL!");
}
// EGLConfig
if (config == nullptr) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
EGLint attributes[] = {EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_ALPHA_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 0,
EGL_STENCIL_SIZE, 0,
EGL_NONE};
EGLint numConfigs;
successful = eglChooseConfig(display, attributes, &config, 1, &numConfigs);
if (!successful || numConfigs == 0) throw OpenGLError("Failed to choose OpenGL config!");
}
// EGLContext
if (context == EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLContext..");
EGLint contextAttributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
context = eglCreateContext(display, config, nullptr, contextAttributes);
if (context == EGL_NO_CONTEXT) throw OpenGLError("Failed to create OpenGL context!");
}
// EGLSurface
if (offscreenSurface == EGL_NO_SURFACE) {
// If we don't have a surface at all
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing %i x %i offscreen pbuffer EGLSurface..", _width, _height);
EGLint attributes[] = {EGL_WIDTH, _width,
EGL_HEIGHT, _height,
EGL_NONE};
offscreenSurface = eglCreatePbufferSurface(display, config, attributes);
if (offscreenSurface == EGL_NO_SURFACE) throw OpenGLError("Failed to create OpenGL Surface!");
}
}
void OpenGLContext::use() {
this->use(offscreenSurface);
}
void OpenGLContext::use(EGLSurface surface) {
if (surface == EGL_NO_SURFACE) throw OpenGLError("Cannot render to a null Surface!");
// 1. Make sure the OpenGL context is initialized
this->ensureOpenGL();
// 2. Make the OpenGL context current
bool successful = eglMakeCurrent(display, surface, surface, context);
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to use current OpenGL context!");
// 3. Caller can now render to this surface
}
GLuint OpenGLContext::createTexture() {
// 1. Make sure the OpenGL context is initialized
this->ensureOpenGL();
// 2. Make the OpenGL context current
bool successful = eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to use current OpenGL context!");
GLuint textureId;
glGenTextures(1, &textureId);
return textureId;
}
} // namespace vision

View File

@ -0,0 +1,68 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <memory>
#include <functional>
#include "PassThroughShader.h"
namespace vision {
/**
* An OpenGL Context that can be used to render to different surfaces.
* By default, it creates an off-screen PixelBuffer surface.
*/
class OpenGLContext {
public:
/**
* Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*/
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface(int width, int height);
/**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/
~OpenGLContext();
/**
* Use this OpenGL Context to render to the given EGLSurface.
* After the `renderFunc` returns, the default offscreen PixelBuffer surface becomes active again.
*/
void use(EGLSurface surface);
/**
* Use this OpenGL Context to render to the offscreen PixelBuffer surface.
*/
void use();
/**
* Create a new texture on this context
*/
GLuint createTexture();
public:
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
EGLSurface offscreenSurface = EGL_NO_SURFACE;
EGLConfig config = nullptr;
private:
int _width = 0, _height = 0;
explicit OpenGLContext(int width, int height);
void destroy();
void ensureOpenGL();
private:
PassThroughShader _passThroughShader;
private:
static constexpr auto TAG = "OpenGLContext";
};
} // namespace vision

View File

@ -0,0 +1,74 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#include "OpenGLRenderer.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/native_window.h>
#include <android/log.h>
#include <utility>
#include "OpenGLError.h"
namespace vision {
std::unique_ptr<OpenGLRenderer> OpenGLRenderer::CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
return std::unique_ptr<OpenGLRenderer>(new OpenGLRenderer(std::move(context), surface));
}
OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
_context = std::move(context);
_outputSurface = surface;
_width = ANativeWindow_getWidth(surface);
_height = ANativeWindow_getHeight(surface);
}
OpenGLRenderer::~OpenGLRenderer() {
if (_outputSurface != nullptr) {
ANativeWindow_release(_outputSurface);
}
destroy();
}
void OpenGLRenderer::destroy() {
if (_context != nullptr && _surface != EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
eglDestroySurface(_context->display, _surface);
_surface = EGL_NO_SURFACE;
}
}
void OpenGLRenderer::renderTextureToSurface(GLuint textureId, float* transformMatrix) {
if (_surface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
_context->use();
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
}
// 1. Activate the OpenGL context for this surface
_context->use(_surface);
// 2. Set the viewport for rendering
glViewport(0, 0, _width, _height);
glDisable(GL_BLEND);
// 3. Bind the input texture
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 4. Draw it using the pass-through shader which also applies transforms
_passThroughShader.draw(textureId, transformMatrix);
// 5. Swap buffers to pass it to the window surface
eglSwapBuffers(_context->display, _surface);
}
} // namespace vision

View File

@ -0,0 +1,58 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <android/native_window.h>
#include <memory>
#include "PassThroughShader.h"
#include "OpenGLContext.h"
namespace vision {
class OpenGLRenderer {
public:
/**
* Create a new instance of the OpenGLRenderer that draws to an on-screen window surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*
* Note: The `surface` is considered moved, and the OpenGL context will release it when it is being deleted.
*/
static std::unique_ptr<OpenGLRenderer> CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
/**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/
~OpenGLRenderer();
/**
* Renders the given Texture ID to the Surface
*/
void renderTextureToSurface(GLuint textureId, float* transformMatrix);
/**
* Destroys the OpenGL context. This needs to be called on the same thread that `use()` was called.
* After calling `destroy()`, it is legal to call `use()` again, which will re-construct everything.
*/
void destroy();
private:
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
private:
int _width = 0, _height = 0;
std::shared_ptr<OpenGLContext> _context;
ANativeWindow* _outputSurface;
EGLSurface _surface = EGL_NO_SURFACE;
private:
PassThroughShader _passThroughShader;
private:
static constexpr auto TAG = "OpenGLRenderer";
};
} // namespace vision

View File

@ -0,0 +1,119 @@
//
// Created by Marc Rousavy on 28.08.23.
//
#include "PassThroughShader.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <memory>
#include "OpenGLError.h"
#include <string>
namespace vision {
PassThroughShader::~PassThroughShader() {
if (_programId != NO_SHADER) {
glDeleteProgram(_programId);
_programId = NO_SHADER;
}
if (_vertexBuffer != NO_BUFFER) {
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = NO_BUFFER;
}
}
void PassThroughShader::draw(GLuint textureId, float* transformMatrix) {
// 1. Set up Shader Program
if (_programId == NO_SHADER) {
_programId = createProgram();
}
glUseProgram(_programId);
if (_vertexParameters.aPosition == NO_POSITION) {
_vertexParameters = {
.aPosition = glGetAttribLocation(_programId, "aPosition"),
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
.uTransformMatrix = glGetUniformLocation(_programId, "uTransformMatrix"),
};
_fragmentParameters = {
.uTexture = glGetUniformLocation(_programId, "uTexture"),
};
}
// 2. Set up Vertices Buffer
if (_vertexBuffer == NO_BUFFER) {
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
}
// 3. Pass all uniforms/attributes for vertex shader
glEnableVertexAttribArray(_vertexParameters.aPosition);
glVertexAttribPointer(_vertexParameters.aPosition,
2,
GL_FLOAT,
GL_FALSE,
sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, position)));
glEnableVertexAttribArray(_vertexParameters.aTexCoord);
glVertexAttribPointer(_vertexParameters.aTexCoord,
2,
GL_FLOAT,
GL_FALSE,
sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, texCoord)));
glUniformMatrix4fv(_vertexParameters.uTransformMatrix, 1, GL_FALSE, transformMatrix);
// 4. Pass texture to fragment shader
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
glUniform1i(_fragmentParameters.uTexture, 0);
// 5. Draw!
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode) {
GLuint shader = glCreateShader(shaderType);
if (shader == 0) throw OpenGLError("Failed to load shader!");
glShaderSource(shader, 1, &shaderCode, nullptr);
glCompileShader(shader);
GLint compileStatus = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
glDeleteShader(shader);
throw OpenGLError("Failed to compile shader!");
}
return shader;
}
GLuint PassThroughShader::createProgram() {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
GLuint program = glCreateProgram();
if (program == 0) throw OpenGLError("Failed to create pass-through program!");
glAttachShader(program, vertexShader);
if (glGetError() != GL_NO_ERROR) throw OpenGLError("Failed to attach Vertex Shader!");
glAttachShader(program, fragmentShader);
if (glGetError() != GL_NO_ERROR) throw OpenGLError("Failed to attach Fragment Shader!");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (!linkStatus) {
glDeleteProgram(program);
throw OpenGLError("Failed to load pass-through program!");
}
return program;
}
} // namespace vision

View File

@ -0,0 +1,81 @@
//
// Created by Marc Rousavy on 28.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
namespace vision {
#define NO_SHADER 0
#define NO_POSITION 0
#define NO_BUFFER 0
struct Vertex {
GLfloat position[2];
GLfloat texCoord[2];
};
class PassThroughShader {
public:
PassThroughShader() = default;
~PassThroughShader();
/**
* Draw the texture using this shader.
*/
void draw(GLuint textureId, float* transformMatrix);
private:
// Loading
static GLuint loadShader(GLenum shaderType, const char* shaderCode);
static GLuint createProgram();
private:
// Parameters
GLuint _programId = NO_SHADER;
GLuint _vertexBuffer = NO_BUFFER;
struct VertexParameters {
GLint aPosition = NO_POSITION;
GLint aTexCoord = NO_POSITION;
GLint uTransformMatrix = NO_POSITION;
} _vertexParameters;
struct FragmentParameters {
GLint uTexture = NO_POSITION;
} _fragmentParameters;
private:
// Statics
static constexpr Vertex VERTICES[] = {
{{-1.0f, -1.0f}, {0.0f, 0.0f}}, // bottom-left
{{1.0f, -1.0f}, {1.0f, 0.0f}}, // bottom-right
{{-1.0f, 1.0f}, {0.0f, 1.0f}}, // top-left
{{1.0f, 1.0f}, {1.0f, 1.0f}} // top-right
};
static constexpr char VERTEX_SHADER[] = R"(
attribute vec4 aPosition;
attribute vec2 aTexCoord;
uniform mat4 uTransformMatrix;
varying vec2 vTexCoord;
void main() {
gl_Position = aPosition;
vTexCoord = (uTransformMatrix * vec4(aTexCoord, 0.0, 1.0)).xy;
}
)";
static constexpr char FRAGMENT_SHADER[] = R"(
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTexCoord;
uniform samplerExternalOES uTexture;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoord);
}
)";
};
} // namespace vision

View File

@ -0,0 +1,133 @@
//
// Created by Marc Rousavy on 25.08.23.
//
#include "VideoPipeline.h"
#include "OpenGLError.h"
#include <android/native_window_jni.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <EGL/egl.h>
namespace vision {
jni::local_ref<VideoPipeline::jhybriddata> VideoPipeline::initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height) {
return makeCxxInstance(jThis, width, height);
}
VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height): _javaPart(jni::make_global(jThis)) {
_width = width;
_height = height;
_context = OpenGLContext::CreateWithOffscreenSurface(width, height);
}
VideoPipeline::~VideoPipeline() {
// 1. Remove output surfaces
removeFrameProcessorOutputSurface();
removeRecordingSessionOutputSurface();
removePreviewOutputSurface();
// 2. Delete the input textures
if (_inputTextureId != NO_TEXTURE) {
glDeleteTextures(1, &_inputTextureId);
_inputTextureId = NO_TEXTURE;
}
// 4. Destroy all surfaces
_previewOutput = nullptr;
_frameProcessorOutput = nullptr;
_recordingSessionOutput = nullptr;
// 5. Destroy the OpenGL context
_context = nullptr;
}
void VideoPipeline::removeFrameProcessorOutputSurface() {
if (_frameProcessorOutput) _frameProcessorOutput->destroy();
_frameProcessorOutput = nullptr;
}
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
// 1. Delete existing output surface
removeFrameProcessorOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
void VideoPipeline::removeRecordingSessionOutputSurface() {
if (_recordingSessionOutput) _recordingSessionOutput->destroy();
_recordingSessionOutput = nullptr;
}
void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
// 1. Delete existing output surface
removeRecordingSessionOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
void VideoPipeline::removePreviewOutputSurface() {
if (_previewOutput) _previewOutput->destroy();
_previewOutput = nullptr;
}
void VideoPipeline::setPreviewOutputSurface(jobject surface) {
// 1. Delete existing output surface
removePreviewOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_previewOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
int VideoPipeline::getInputTextureId() {
if (_inputTextureId != NO_TEXTURE) return static_cast<int>(_inputTextureId);
_inputTextureId = _context->createTexture();
return static_cast<int>(_inputTextureId);
}
void VideoPipeline::onBeforeFrame() {
_context->use();
glBindTexture(GL_TEXTURE_EXTERNAL_OES, _inputTextureId);
}
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
// Get the OpenGL transform Matrix (transforms, scales, rotations)
float transformMatrix[16];
transformMatrixParam->getRegion(0, 16, transformMatrix);
if (_previewOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
_previewOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
}
if (_frameProcessorOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
_frameProcessorOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
}
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
}
}
void VideoPipeline::registerNatives() {
registerHybrid({
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface),
makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface),
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("setPreviewOutputSurface", VideoPipeline::setPreviewOutputSurface),
makeNativeMethod("removePreviewOutputSurface", VideoPipeline::removePreviewOutputSurface),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame),
});
}
} // namespace vision

View File

@ -0,0 +1,72 @@
//
// Created by Marc Rousavy on 25.08.23.
//
#pragma once
#include <jni.h>
#include <fbjni/fbjni.h>
#include <EGL/egl.h>
#include <android/native_window.h>
#include "PassThroughShader.h"
#include "OpenGLRenderer.h"
#include "OpenGLContext.h"
#include <memory>
namespace vision {
#define NO_TEXTURE 0
using namespace facebook;
class VideoPipeline: public jni::HybridClass<VideoPipeline> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/utils/VideoPipeline;";
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height);
static void registerNatives();
public:
~VideoPipeline();
// -> SurfaceTexture input
int getInputTextureId();
// <- Frame Processor output
void setFrameProcessorOutputSurface(jobject surface);
void removeFrameProcessorOutputSurface();
// <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface);
void removeRecordingSessionOutputSurface();
// <- Preview output
void setPreviewOutputSurface(jobject surface);
void removePreviewOutputSurface();
// Frame callbacks
void onBeforeFrame();
void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix);
private:
// Private constructor. Use `create(..)` to create new instances.
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
private:
// Input Surface Texture
GLuint _inputTextureId = NO_TEXTURE;
int _width = 0;
int _height = 0;
// Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr;
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _previewOutput = nullptr;
private:
friend HybridBase;
jni::global_ref<javaobject> _javaPart;
static constexpr auto TAG = "VideoPipeline";
};
} // namespace vision

View File

@ -5,12 +5,14 @@
#include "JVisionCameraProxy.h" #include "JVisionCameraProxy.h"
#include "VisionCameraProxy.h" #include "VisionCameraProxy.h"
#include "SkiaRenderer.h" #include "SkiaRenderer.h"
#include "VideoPipeline.h"
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) { JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
return facebook::jni::initialize(vm, [] { return facebook::jni::initialize(vm, [] {
vision::VisionCameraInstaller::registerNatives(); vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives(); vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives(); vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives(); vision::JFrameProcessor::registerNatives();
#endif #endif

View File

@ -4,7 +4,6 @@
#include "FrameHostObject.h" #include "FrameHostObject.h"
#include <android/log.h>
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <jni.h> #include <jni.h>

View File

@ -27,9 +27,6 @@ class JSI_EXPORT FrameHostObject : public jsi::HostObject {
public: public:
jni::global_ref<JFrame> frame; jni::global_ref<JFrame> frame;
private:
static auto constexpr TAG = "VisionCamera";
}; };
} // namespace vision } // namespace vision

View File

@ -68,7 +68,6 @@ jni::local_ref<jni::JMap<jstring, jobject>> JSIJNIConversion::convertJSIObjectTo
auto map = convertJSIObjectToJNIMap(runtime, valueAsObject); auto map = convertJSIObjectToJNIMap(runtime, valueAsObject);
hashMap->put(key, map); hashMap->put(key, map);
} }
} else { } else {
@ -139,8 +138,7 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
result.setProperty(runtime, key.c_str(), jsiValue); result.setProperty(runtime, key.c_str(), jsiValue);
} }
return result; return result;
} else if (object->isInstanceOf(JFrame::javaClassStatic())) {
} if (object->isInstanceOf(JFrame::javaClassStatic())) {
// Frame // Frame
auto frame = static_ref_cast<JFrame>(object); auto frame = static_ref_cast<JFrame>(object);

View File

@ -22,8 +22,8 @@ void JVisionCameraScheduler::dispatchAsync(const std::function<void()>& job) {
void JVisionCameraScheduler::scheduleTrigger() { void JVisionCameraScheduler::scheduleTrigger() {
// 2. schedule `triggerUI` to be called on the java thread // 2. schedule `triggerUI` to be called on the java thread
static auto method = javaPart_->getClass()->getMethod<void()>("scheduleTrigger"); static auto method = _javaPart->getClass()->getMethod<void()>("scheduleTrigger");
method(javaPart_.get()); method(_javaPart.get());
} }
void JVisionCameraScheduler::trigger() { void JVisionCameraScheduler::trigger() {

View File

@ -33,12 +33,12 @@ class JVisionCameraScheduler : public jni::HybridClass<JVisionCameraScheduler> {
private: private:
friend HybridBase; friend HybridBase;
jni::global_ref<JVisionCameraScheduler::javaobject> javaPart_; jni::global_ref<JVisionCameraScheduler::javaobject> _javaPart;
std::queue<std::function<void()>> _jobs; std::queue<std::function<void()>> _jobs;
std::mutex _mutex; std::mutex _mutex;
explicit JVisionCameraScheduler(jni::alias_ref<JVisionCameraScheduler::jhybridobject> jThis): explicit JVisionCameraScheduler(jni::alias_ref<JVisionCameraScheduler::jhybridobject> jThis):
javaPart_(jni::make_global(jThis)) {} _javaPart(jni::make_global(jThis)) {}
// Schedules a call to `trigger` on the VisionCamera FP Thread // Schedules a call to `trigger` on the VisionCamera FP Thread
void scheduleTrigger(); void scheduleTrigger();

View File

@ -23,7 +23,6 @@ import com.mrousavy.camera.extensions.createPhotoCaptureRequest
import com.mrousavy.camera.extensions.openCamera import com.mrousavy.camera.extensions.openCamera
import com.mrousavy.camera.extensions.tryClose import com.mrousavy.camera.extensions.tryClose
import com.mrousavy.camera.extensions.zoomed import com.mrousavy.camera.extensions.zoomed
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.parsers.Flash import com.mrousavy.camera.parsers.Flash
import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.Orientation
@ -88,8 +87,17 @@ class CameraSession(private val context: Context,
private val mutex = Mutex() private val mutex = Mutex()
private var isRunning = false private var isRunning = false
private var enableTorch = false private var enableTorch = false
// Video Outputs
private var recording: RecordingSession? = null private var recording: RecordingSession? = null
private var frameProcessor: FrameProcessor? = null set(value) {
field = value
updateVideoOutputs()
}
var frameProcessor: FrameProcessor? = null
set(value) {
field = value
updateVideoOutputs()
}
override val coroutineContext: CoroutineContext = CameraQueues.cameraQueue.coroutineDispatcher override val coroutineContext: CoroutineContext = CameraQueues.cameraQueue.coroutineDispatcher
@ -130,8 +138,14 @@ class CameraSession(private val context: Context,
Log.i(TAG, "Nothing changed in configuration, canceling..") Log.i(TAG, "Nothing changed in configuration, canceling..")
} }
this.cameraId = cameraId // 1. Close previous outputs
this.outputs?.close()
// 2. Assign new outputs
this.outputs = outputs this.outputs = outputs
// 3. Update with existing render targets (surfaces)
updateVideoOutputs()
this.cameraId = cameraId
launch { launch {
startRunning() startRunning()
} }
@ -183,8 +197,12 @@ class CameraSession(private val context: Context,
} }
} }
fun setFrameProcessor(frameProcessor: FrameProcessor?) { private fun updateVideoOutputs() {
this.frameProcessor = frameProcessor val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return
val previewOutput = outputs?.previewOutput
videoPipeline.setRecordingSessionOutput(this.recording)
videoPipeline.setFrameProcessorOutput(this.frameProcessor)
videoPipeline.setPreviewOutput(previewOutput?.surface)
} }
suspend fun takePhoto(qualityPrioritization: QualityPrioritization, suspend fun takePhoto(qualityPrioritization: QualityPrioritization,
@ -229,20 +247,6 @@ class CameraSession(private val context: Context,
photoOutputSynchronizer.set(image.timestamp, image) photoOutputSynchronizer.set(image.timestamp, image)
} }
override fun onVideoFrameCaptured(image: Image) {
// TODO: Correctly get orientation and everything
val frame = Frame(image, System.currentTimeMillis(), Orientation.PORTRAIT, false)
frame.incrementRefCount()
// Call (Skia-) Frame Processor
frameProcessor?.call(frame)
// Write Image to the Recording
recording?.appendImage(image)
frame.decrementRefCount()
}
suspend fun startRecording(enableAudio: Boolean, suspend fun startRecording(enableAudio: Boolean,
codec: VideoCodec, codec: VideoCodec,
fileType: VideoFileType, fileType: VideoFileType,
@ -253,7 +257,7 @@ class CameraSession(private val context: Context,
val outputs = outputs ?: throw CameraNotReadyError() val outputs = outputs ?: throw CameraNotReadyError()
val videoOutput = outputs.videoOutput ?: throw VideoNotEnabledError() val videoOutput = outputs.videoOutput ?: throw VideoNotEnabledError()
val recording = RecordingSession(context, enableAudio, videoOutput.size, fps, codec, orientation, fileType, callback, onError) val recording = RecordingSession(context, videoOutput.size, enableAudio, fps, codec, orientation, fileType, callback, onError)
recording.start() recording.start()
this.recording = recording this.recording = recording
} }
@ -497,7 +501,8 @@ class CameraSession(private val context: Context,
val captureRequest = camera.createCaptureRequest(template) val captureRequest = camera.createCaptureRequest(template)
outputs.previewOutput?.let { output -> outputs.previewOutput?.let { output ->
Log.i(TAG, "Adding output surface ${output.outputType}..") Log.i(TAG, "Adding output surface ${output.outputType}..")
captureRequest.addTarget(output.surface) // TODO: Add here again?
// captureRequest.addTarget(output.surface)
} }
outputs.videoOutput?.let { output -> outputs.videoOutput?.let { output ->
Log.i(TAG, "Adding output surface ${output.outputType}..") Log.i(TAG, "Adding output surface ${output.outputType}..")

View File

@ -96,7 +96,7 @@ class CameraView(context: Context) : FrameLayout(context) {
internal var frameProcessor: FrameProcessor? = null internal var frameProcessor: FrameProcessor? = null
set(value) { set(value) {
field = value field = value
cameraSession.setFrameProcessor(frameProcessor) cameraSession.frameProcessor = frameProcessor
} }
private val inputOrientation: Orientation private val inputOrientation: Orientation

View File

@ -180,11 +180,6 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
@ReactMethod @ReactMethod
fun requestCameraPermission(promise: Promise) { fun requestCameraPermission(promise: Promise) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
// API 21 and below always grants permission on app install
return promise.resolve(PermissionStatus.GRANTED.unionValue)
}
val activity = reactApplicationContext.currentActivity val activity = reactApplicationContext.currentActivity
if (activity is PermissionAwareActivity) { if (activity is PermissionAwareActivity) {
val currentRequestCode = RequestCode++ val currentRequestCode = RequestCode++
@ -205,11 +200,6 @@ class CameraViewModule(reactContext: ReactApplicationContext): ReactContextBaseJ
@ReactMethod @ReactMethod
fun requestMicrophonePermission(promise: Promise) { fun requestMicrophonePermission(promise: Promise) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
// API 21 and below always grants permission on app install
return promise.resolve(PermissionStatus.GRANTED.unionValue)
}
val activity = reactApplicationContext.currentActivity val activity = reactApplicationContext.currentActivity
if (activity is PermissionAwareActivity) { if (activity is PermissionAwareActivity) {
val currentRequestCode = RequestCode++ val currentRequestCode = RequestCode++

View File

@ -63,10 +63,6 @@ fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List<Siz
fun CameraCharacteristics.getPhotoSizes(format: Int): List<Size> { fun CameraCharacteristics.getPhotoSizes(format: Int): List<Size> {
val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!! val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
val sizes = config.getOutputSizes(format) ?: emptyArray() val sizes = config.getOutputSizes(format) ?: emptyArray()
val highResSizes = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { val highResSizes = config.getHighResolutionOutputSizes(format) ?: emptyArray()
config.getHighResolutionOutputSizes(format)
} else {
null
} ?: emptyArray()
return sizes.plus(highResSizes).toList() return sizes.plus(highResSizes).toList()
} }

View File

@ -8,7 +8,6 @@ import android.hardware.camera2.params.OutputConfiguration
import android.hardware.camera2.params.SessionConfiguration import android.hardware.camera2.params.SessionConfiguration
import android.os.Build import android.os.Build
import android.util.Log import android.util.Log
import android.view.Surface
import androidx.annotation.RequiresApi import androidx.annotation.RequiresApi
import com.mrousavy.camera.CameraQueues import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.CameraSessionCannotBeConfiguredError import com.mrousavy.camera.CameraSessionCannotBeConfiguredError
@ -63,47 +62,35 @@ suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
} }
} }
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { val outputConfigurations = arrayListOf<OutputConfiguration>()
// API >= 24 outputs.previewOutput?.let { output ->
val outputConfigurations = arrayListOf<OutputConfiguration>() // TODO: add here again?
outputs.previewOutput?.let { output -> // outputConfigurations.add(output.toOutputConfiguration(characteristics))
outputConfigurations.add(output.toOutputConfiguration(characteristics)) }
} outputs.photoOutput?.let { output ->
outputs.photoOutput?.let { output -> outputConfigurations.add(output.toOutputConfiguration(characteristics))
outputConfigurations.add(output.toOutputConfiguration(characteristics)) }
} outputs.videoOutput?.let { output ->
outputs.videoOutput?.let { output -> outputConfigurations.add(output.toOutputConfiguration(characteristics))
outputConfigurations.add(output.toOutputConfiguration(characteristics)) }
} if (outputs.enableHdr == true && Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
if (outputs.enableHdr == true && Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { val supportedProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES)
val supportedProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES) val hdrProfile = supportedProfiles?.bestProfile ?: supportedProfiles?.supportedProfiles?.firstOrNull()
val hdrProfile = supportedProfiles?.bestProfile ?: supportedProfiles?.supportedProfiles?.firstOrNull() if (hdrProfile != null) {
if (hdrProfile != null) { Log.i(TAG, "Camera $id: Using HDR Profile $hdrProfile...")
Log.i(TAG, "Camera $id: Using HDR Profile $hdrProfile...") outputConfigurations.forEach { it.dynamicRangeProfile = hdrProfile }
outputConfigurations.forEach { it.dynamicRangeProfile = hdrProfile }
} else {
Log.w(TAG, "Camera $id: HDR was enabled, but the device does not support any matching HDR profile!")
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
// API >=28
Log.i(TAG, "Using new API (>=28)")
val config = SessionConfiguration(sessionType.toSessionType(), outputConfigurations, queue.executor, callback)
this.createCaptureSession(config)
} else { } else {
// API >=24 Log.w(TAG, "Camera $id: HDR was enabled, but the device does not support any matching HDR profile!")
Log.i(TAG, "Using legacy API (<28)")
this.createCaptureSessionByOutputConfigurations(outputConfigurations, callback, queue.handler)
} }
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
Log.i(TAG, "Using new API (>=28)")
val config = SessionConfiguration(sessionType.toSessionType(), outputConfigurations, queue.executor, callback)
this.createCaptureSession(config)
} else { } else {
// API <24 Log.i(TAG, "Using legacy API (<28)")
Log.i(TAG, "Using legacy API (<24)") this.createCaptureSessionByOutputConfigurations(outputConfigurations, callback, queue.handler)
val surfaces = arrayListOf<Surface>()
outputs.previewOutput?.let { surfaces.add(it.surface) }
outputs.photoOutput?.let { surfaces.add(it.surface) }
outputs.videoOutput?.let { surfaces.add(it.surface) }
this.createCaptureSession(surfaces, callback, queue.handler)
} }
} }
} }

View File

@ -1,7 +1,6 @@
package com.mrousavy.camera.utils package com.mrousavy.camera.utils
import android.content.Context import android.content.Context
import android.media.Image
import android.media.ImageWriter import android.media.ImageWriter
import android.media.MediaCodec import android.media.MediaCodec
import android.media.MediaRecorder import android.media.MediaRecorder
@ -13,12 +12,11 @@ import com.mrousavy.camera.RecorderError
import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.VideoCodec import com.mrousavy.camera.parsers.VideoCodec
import com.mrousavy.camera.parsers.VideoFileType import com.mrousavy.camera.parsers.VideoFileType
import com.mrousavy.camera.utils.outputs.CameraOutputs
import java.io.File import java.io.File
class RecordingSession(context: Context, class RecordingSession(context: Context,
val size: Size,
private val enableAudio: Boolean, private val enableAudio: Boolean,
private val videoSize: Size,
private val fps: Int? = null, private val fps: Int? = null,
private val codec: VideoCodec = VideoCodec.H264, private val codec: VideoCodec = VideoCodec.H264,
private val orientation: Orientation, private val orientation: Orientation,
@ -40,14 +38,9 @@ class RecordingSession(context: Context,
private val outputFile: File private val outputFile: File
private var startTime: Long? = null private var startTime: Long? = null
private var imageWriter: ImageWriter? = null private var imageWriter: ImageWriter? = null
val surface: Surface val surface: Surface = MediaCodec.createPersistentInputSurface()
init { init {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
throw Error("Video Recording is only supported on Devices running Android version 23 (M) or newer.")
}
surface = MediaCodec.createPersistentInputSurface()
outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir) outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir)
@ -61,7 +54,7 @@ class RecordingSession(context: Context,
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4) recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
recorder.setOutputFile(outputFile.absolutePath) recorder.setOutputFile(outputFile.absolutePath)
recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE) recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE)
recorder.setVideoSize(videoSize.width, videoSize.height) recorder.setVideoSize(size.height, size.width)
if (fps != null) recorder.setVideoFrameRate(fps) if (fps != null) recorder.setVideoFrameRate(fps)
Log.i(TAG, "Using $codec Video Codec..") Log.i(TAG, "Using $codec Video Codec..")
@ -74,7 +67,7 @@ class RecordingSession(context: Context,
recorder.setAudioChannels(AUDIO_CHANNELS) recorder.setAudioChannels(AUDIO_CHANNELS)
} }
recorder.setInputSurface(surface) recorder.setInputSurface(surface)
recorder.setOrientationHint(orientation.toDegrees()) //recorder.setOrientationHint(orientation.toDegrees())
recorder.setOnErrorListener { _, what, extra -> recorder.setOnErrorListener { _, what, extra ->
Log.e(TAG, "MediaRecorder Error: $what ($extra)") Log.e(TAG, "MediaRecorder Error: $what ($extra)")
@ -109,10 +102,8 @@ class RecordingSession(context: Context,
recorder.stop() recorder.stop()
recorder.release() recorder.release()
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { imageWriter?.close()
imageWriter?.close() imageWriter = null
imageWriter = null
}
} catch (e: Error) { } catch (e: Error) {
Log.e(TAG, "Failed to stop MediaRecorder!", e) Log.e(TAG, "Failed to stop MediaRecorder!", e)
} }
@ -125,9 +116,6 @@ class RecordingSession(context: Context,
fun pause() { fun pause() {
synchronized(this) { synchronized(this) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
throw Error("Pausing a recording is only supported on Devices running Android version 24 (N) or newer.")
}
Log.i(TAG, "Pausing Recording Session..") Log.i(TAG, "Pausing Recording Session..")
recorder.pause() recorder.pause()
} }
@ -135,32 +123,13 @@ class RecordingSession(context: Context,
fun resume() { fun resume() {
synchronized(this) { synchronized(this) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
throw Error("Resuming a recording is only supported on Devices running Android version 24 (N) or newer.")
}
Log.i(TAG, "Resuming Recording Session..") Log.i(TAG, "Resuming Recording Session..")
recorder.resume() recorder.resume()
} }
} }
fun appendImage(image: Image) {
synchronized(this) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
throw Error("Video Recording is only supported on Devices running Android version 23 (M) or newer.")
}
// TODO: Correctly mirror/flip Image in OpenGL pipeline, otherwise flipping camera while recording results in inverted frames
if (imageWriter == null) {
imageWriter = ImageWriter.newInstance(surface, CameraOutputs.VIDEO_OUTPUT_BUFFER_SIZE)
}
image.timestamp = System.nanoTime()
imageWriter!!.queueInputImage(image)
}
}
override fun toString(): String { override fun toString(): String {
val audio = if (enableAudio) "with audio" else "without audio" val audio = if (enableAudio) "with audio" else "without audio"
return "${videoSize.width} x ${videoSize.height} @ $fps FPS $codec $fileType $orientation RecordingSession ($audio)" return "${size.width} x ${size.height} @ $fps FPS $codec $fileType $orientation RecordingSession ($audio)"
} }
} }

View File

@ -0,0 +1,185 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.graphics.SurfaceTexture
import android.media.ImageReader
import android.media.ImageWriter
import android.media.MediaRecorder
import android.util.Log
import android.view.Surface
import com.facebook.jni.HybridData
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.parsers.Orientation
import java.io.Closeable
/**
* An OpenGL pipeline for streaming Camera Frames to one or more outputs.
* Currently, [VideoPipeline] can stream to a [FrameProcessor] and a [MediaRecorder].
*
* @param [width] The width of the Frames to stream (> 0)
* @param [height] The height of the Frames to stream (> 0)
* @param [format] The format of the Frames to stream. ([ImageFormat.PRIVATE], [ImageFormat.YUV_420_888] or [ImageFormat.JPEG])
*/
@Suppress("KotlinJniMissingFunction")
class VideoPipeline(val width: Int,
val height: Int,
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
companion object {
private const val MAX_IMAGES = 5
private const val TAG = "VideoPipeline"
}
private val mHybridData: HybridData
private var openGLTextureId: Int? = null
private var transformMatrix = FloatArray(16)
private var isActive = true
// Output 1
private var frameProcessor: FrameProcessor? = null
private var imageReader: ImageReader? = null
// Output 2
private var recordingSession: RecordingSession? = null
// Output 3
private var previewSurface: Surface? = null
// Input
private val surfaceTexture: SurfaceTexture
val surface: Surface
init {
mHybridData = initHybrid(width, height)
surfaceTexture = SurfaceTexture(false)
surfaceTexture.setDefaultBufferSize(width, height)
surfaceTexture.setOnFrameAvailableListener(this)
surface = Surface(surfaceTexture)
}
override fun close() {
synchronized(this) {
isActive = false
imageReader?.close()
imageReader = null
frameProcessor = null
recordingSession = null
surfaceTexture.release()
mHybridData.resetNative()
}
}
override fun onFrameAvailable(surfaceTexture: SurfaceTexture) {
synchronized(this) {
if (!isActive) return@synchronized
// 1. Attach Surface to OpenGL context
if (openGLTextureId == null) {
openGLTextureId = getInputTextureId()
surfaceTexture.attachToGLContext(openGLTextureId!!)
Log.i(TAG, "Attached Texture to Context $openGLTextureId")
}
// 2. Prepare the OpenGL context (eglMakeCurrent)
onBeforeFrame()
// 3. Update the OpenGL texture
surfaceTexture.updateTexImage()
// 4. Get the transform matrix from the SurfaceTexture (rotations/scales applied by Camera)
surfaceTexture.getTransformMatrix(transformMatrix)
// 5. Draw it with applied rotation/mirroring
onFrame(transformMatrix)
}
}
private fun getImageReader(): ImageReader {
val imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
imageReader.setOnImageAvailableListener({ reader ->
Log.i("VideoPipeline", "ImageReader::onImageAvailable!")
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
frame.incrementRefCount()
frameProcessor?.call(frame)
frame.decrementRefCount()
}, null)
return imageReader
}
/**
* Configures the Pipeline to also call the given [FrameProcessor].
* * If the [frameProcessor] is `null`, this output channel will be removed.
* * If the [frameProcessor] is not `null`, the [VideoPipeline] will create Frames
* using an [ImageWriter] and call the [FrameProcessor] with those Frames.
*/
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
synchronized(this) {
Log.i(TAG, "Setting $width x $height FrameProcessor Output...")
this.frameProcessor = frameProcessor
if (frameProcessor != null) {
if (this.imageReader == null) {
// 1. Create new ImageReader that just calls the Frame Processor
this.imageReader = getImageReader()
}
// 2. Configure OpenGL pipeline to stream Frames into the ImageReader's surface
setFrameProcessorOutputSurface(imageReader!!.surface)
} else {
// 1. Configure OpenGL pipeline to stop streaming Frames into the ImageReader's surface
removeFrameProcessorOutputSurface()
// 2. Close the ImageReader
this.imageReader?.close()
this.imageReader = null
}
}
}
/**
* Configures the Pipeline to also write Frames to a Surface from a [MediaRecorder].
* * If the [surface] is `null`, this output channel will be removed.
* * If the [surface] is not `null`, the [VideoPipeline] will write Frames to this Surface.
*/
fun setRecordingSessionOutput(recordingSession: RecordingSession?) {
synchronized(this) {
Log.i(TAG, "Setting $width x $height RecordingSession Output...")
if (recordingSession != null) {
// Configure OpenGL pipeline to stream Frames into the Recording Session's surface
setRecordingSessionOutputSurface(recordingSession.surface)
this.recordingSession = recordingSession
} else {
// Configure OpenGL pipeline to stop streaming Frames into the Recording Session's surface
removeRecordingSessionOutputSurface()
this.recordingSession = null
}
}
}
fun setPreviewOutput(surface: Surface?) {
synchronized(this) {
Log.i(TAG, "Setting Preview Output...")
if (surface != null) {
setPreviewOutputSurface(surface)
this.previewSurface = surface
} else {
removePreviewOutputSurface()
this.previewSurface = null
}
}
}
private external fun getInputTextureId(): Int
private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray)
private external fun setFrameProcessorOutputSurface(surface: Any)
private external fun removeFrameProcessorOutputSurface()
private external fun setRecordingSessionOutputSurface(surface: Any)
private external fun removeRecordingSessionOutputSurface()
private external fun setPreviewOutputSurface(surface: Any)
private external fun removePreviewOutputSurface()
private external fun initHybrid(width: Int, height: Int): HybridData
}

View File

@ -1,11 +1,9 @@
package com.mrousavy.camera.utils.outputs package com.mrousavy.camera.utils.outputs
import android.graphics.ImageFormat import android.graphics.ImageFormat
import android.hardware.HardwareBuffer
import android.hardware.camera2.CameraManager import android.hardware.camera2.CameraManager
import android.media.Image import android.media.Image
import android.media.ImageReader import android.media.ImageReader
import android.os.Build
import android.util.Log import android.util.Log
import android.util.Size import android.util.Size
import android.view.Surface import android.view.Surface
@ -14,6 +12,7 @@ import com.mrousavy.camera.extensions.closestToOrMax
import com.mrousavy.camera.extensions.getPhotoSizes import com.mrousavy.camera.extensions.getPhotoSizes
import com.mrousavy.camera.extensions.getPreviewSize import com.mrousavy.camera.extensions.getPreviewSize
import com.mrousavy.camera.extensions.getVideoSizes import com.mrousavy.camera.extensions.getVideoSizes
import com.mrousavy.camera.utils.VideoPipeline
import java.io.Closeable import java.io.Closeable
class CameraOutputs(val cameraId: String, class CameraOutputs(val cameraId: String,
@ -25,7 +24,6 @@ class CameraOutputs(val cameraId: String,
val callback: Callback): Closeable { val callback: Callback): Closeable {
companion object { companion object {
private const val TAG = "CameraOutputs" private const val TAG = "CameraOutputs"
const val VIDEO_OUTPUT_BUFFER_SIZE = 3
const val PHOTO_OUTPUT_BUFFER_SIZE = 3 const val PHOTO_OUTPUT_BUFFER_SIZE = 3
} }
@ -39,14 +37,13 @@ class CameraOutputs(val cameraId: String,
interface Callback { interface Callback {
fun onPhotoCaptured(image: Image) fun onPhotoCaptured(image: Image)
fun onVideoFrameCaptured(image: Image)
} }
var previewOutput: SurfaceOutput? = null var previewOutput: SurfaceOutput? = null
private set private set
var photoOutput: ImageReaderOutput? = null var photoOutput: ImageReaderOutput? = null
private set private set
var videoOutput: SurfaceOutput? = null var videoOutput: VideoPipelineOutput? = null
private set private set
val size: Int val size: Int
@ -118,23 +115,11 @@ class CameraOutputs(val cameraId: String,
// Video output: High resolution repeating images (startRecording() or useFrameProcessor()) // Video output: High resolution repeating images (startRecording() or useFrameProcessor())
if (video != null) { if (video != null) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) throw Error("Video Recordings and/or Frame Processors are only available on API 29 and above!")
val size = characteristics.getVideoSizes(cameraId, video.format).closestToOrMax(video.targetSize) val size = characteristics.getVideoSizes(cameraId, video.format).closestToOrMax(video.targetSize)
val videoPipeline = VideoPipeline(size.width, size.height, video.format)
val flags = HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE or HardwareBuffer.USAGE_VIDEO_ENCODE
val imageReader = ImageReader.newInstance(size.width, size.height, video.format, VIDEO_OUTPUT_BUFFER_SIZE, flags)
imageReader.setOnImageAvailableListener({ reader ->
try {
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
callback.onVideoFrameCaptured(image)
} catch (e: IllegalStateException) {
Log.e(TAG, "Failed to acquire a new Image, dropping a Frame.. The Frame Processor cannot keep up with the Camera's FPS!", e)
}
}, CameraQueues.videoQueue.handler)
Log.i(TAG, "Adding ${size.width}x${size.height} video output. (Format: ${video.format})") Log.i(TAG, "Adding ${size.width}x${size.height} video output. (Format: ${video.format})")
videoOutput = ImageReaderOutput(imageReader, SurfaceOutput.OutputType.VIDEO) videoOutput = VideoPipelineOutput(videoPipeline, SurfaceOutput.OutputType.VIDEO)
} }
Log.i(TAG, "Prepared $size Outputs for Camera $cameraId!") Log.i(TAG, "Prepared $size Outputs for Camera $cameraId!")

View File

@ -35,7 +35,6 @@ open class SurfaceOutput(val surface: Surface,
} }
} }
@RequiresApi(Build.VERSION_CODES.N)
fun toOutputConfiguration(characteristics: CameraCharacteristics): OutputConfiguration { fun toOutputConfiguration(characteristics: CameraCharacteristics): OutputConfiguration {
val result = OutputConfiguration(surface) val result = OutputConfiguration(surface)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {

View File

@ -0,0 +1,22 @@
package com.mrousavy.camera.utils.outputs
import android.util.Log
import android.util.Size
import com.mrousavy.camera.utils.VideoPipeline
import java.io.Closeable
/**
* A [SurfaceOutput] that uses a [VideoPipeline] as it's surface.
*/
class VideoPipelineOutput(val videoPipeline: VideoPipeline,
outputType: OutputType,
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(videoPipeline.surface, Size(videoPipeline.width, videoPipeline.height), outputType, dynamicRangeProfile) {
override fun close() {
Log.i(TAG, "Closing ${videoPipeline.width}x${videoPipeline.height} Video Pipeline..")
videoPipeline.close()
}
override fun toString(): String {
return "$outputType (${videoPipeline.width} x ${videoPipeline.height} in format #${videoPipeline.format})"
}
}

View File

@ -75,7 +75,7 @@ Before opening an issue, make sure you try the following:
2. Set `buildToolsVersion` to `33.0.0` or higher 2. Set `buildToolsVersion` to `33.0.0` or higher
3. Set `compileSdkVersion` to `33` or higher 3. Set `compileSdkVersion` to `33` or higher
4. Set `targetSdkVersion` to `33` or higher 4. Set `targetSdkVersion` to `33` or higher
5. Set `minSdkVersion` to `21` or higher 5. Set `minSdkVersion` to `26` or higher
6. Set `ndkVersion` to `"23.1.7779620"` or higher 6. Set `ndkVersion` to `"23.1.7779620"` or higher
7. Update the Gradle Build-Tools version to `7.3.1` or higher: 7. Update the Gradle Build-Tools version to `7.3.1` or higher:
``` ```

View File

@ -3,7 +3,7 @@
buildscript { buildscript {
ext { ext {
buildToolsVersion = "33.0.0" buildToolsVersion = "33.0.0"
minSdkVersion = 21 minSdkVersion = 26
compileSdkVersion = 33 compileSdkVersion = 33
targetSdkVersion = 33 targetSdkVersion = 33
ndkVersion = "23.1.7779620" ndkVersion = "23.1.7779620"