perf: Use ImageWriter instead of OpenGL Pipeline for faster processing (#1789)

* perf: Use `ImageWriter` instead of OpenGL Pipeline for faster processing

* chore: Remove C++ part of OpenGL pipeline

* Clean up

* Update README.md
This commit is contained in:
Marc Rousavy
2023-09-11 13:58:58 +02:00
committed by GitHub
parent 648c3638e8
commit 4e96eb77e0
15 changed files with 39 additions and 946 deletions

View File

@@ -1,163 +0,0 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#include "OpenGLContext.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/log.h>
#include <android/native_window.h>
#include "OpenGLError.h"
namespace vision {
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface() {
return std::unique_ptr<OpenGLContext>(new OpenGLContext());
}
OpenGLContext::~OpenGLContext() {
destroy();
}
void OpenGLContext::destroy() {
if (display != EGL_NO_DISPLAY) {
eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
if (offscreenSurface != EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
eglDestroySurface(display, offscreenSurface);
offscreenSurface = EGL_NO_SURFACE;
}
if (context != EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Context...");
eglDestroyContext(display, context);
context = EGL_NO_CONTEXT;
}
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Display...");
eglTerminate(display);
display = EGL_NO_DISPLAY;
config = nullptr;
}
}
void OpenGLContext::ensureOpenGL() {
bool successful;
// EGLDisplay
if (display == EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLDisplay..");
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY)
throw OpenGLError("Failed to get default OpenGL Display!");
EGLint major;
EGLint minor;
successful = eglInitialize(display, &major, &minor);
if (!successful)
throw OpenGLError("Failed to initialize OpenGL!");
}
// EGLConfig
if (config == nullptr) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
EGLint attributes[] = {EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE,
EGL_WINDOW_BIT,
EGL_RED_SIZE,
8,
EGL_GREEN_SIZE,
8,
EGL_BLUE_SIZE,
8,
EGL_ALPHA_SIZE,
8,
EGL_DEPTH_SIZE,
0,
EGL_STENCIL_SIZE,
0,
EGL_NONE};
EGLint numConfigs;
successful = eglChooseConfig(display, attributes, &config, 1, &numConfigs);
if (!successful || numConfigs == 0)
throw OpenGLError("Failed to choose OpenGL config!");
}
// EGLContext
if (context == EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLContext..");
EGLint contextAttributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
context = eglCreateContext(display, config, nullptr, contextAttributes);
if (context == EGL_NO_CONTEXT)
throw OpenGLError("Failed to create OpenGL context!");
}
// EGLSurface
if (offscreenSurface == EGL_NO_SURFACE) {
// If we don't have a surface at all
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing 1x1 offscreen pbuffer EGLSurface..");
EGLint attributes[] = {EGL_WIDTH, 1, EGL_HEIGHT, 1, EGL_NONE};
offscreenSurface = eglCreatePbufferSurface(display, config, attributes);
if (offscreenSurface == EGL_NO_SURFACE)
throw OpenGLError("Failed to create OpenGL Surface!");
}
}
void OpenGLContext::use() {
this->use(offscreenSurface);
}
void OpenGLContext::use(EGLSurface surface) {
if (surface == EGL_NO_SURFACE)
throw OpenGLError("Cannot render to a null Surface!");
// 1. Make sure the OpenGL context is initialized
this->ensureOpenGL();
// 2. Make the OpenGL context current
bool successful = eglMakeCurrent(display, surface, surface, context);
if (!successful || eglGetError() != EGL_SUCCESS)
throw OpenGLError("Failed to use current OpenGL context!");
// 3. Caller can now render to this surface
}
void OpenGLContext::flush() const {
bool successful = eglSwapBuffers(display, eglGetCurrentSurface(EGL_DRAW));
if (!successful || eglGetError() != EGL_SUCCESS)
throw OpenGLError("Failed to swap OpenGL buffers!");
}
OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width, int height) {
// 1. Make sure the OpenGL context is initialized
this->ensureOpenGL();
// 2. Make the OpenGL context current
bool successful = eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
if (!successful || eglGetError() != EGL_SUCCESS)
throw OpenGLError("Failed to use current OpenGL context!");
GLuint textureId;
glGenTextures(1, &textureId);
GLenum target;
switch (type) {
case OpenGLTexture::Type::ExternalOES:
target = GL_TEXTURE_EXTERNAL_OES;
break;
case OpenGLTexture::Type::Texture2D:
target = GL_TEXTURE_2D;
break;
default:
throw std::runtime_error("Invalid OpenGL Texture Type!");
}
glBindTexture(target, textureId);
glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
return {.id = textureId, .target = target, .width = width, .height = height};
}
} // namespace vision

View File

@@ -1,73 +0,0 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <functional>
#include <memory>
#include "OpenGLTexture.h"
#include "PassThroughShader.h"
namespace vision {
/**
* An OpenGL Context that can be used to render to different surfaces.
* By default, it creates an off-screen PixelBuffer surface.
*/
class OpenGLContext {
public:
/**
* Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*/
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface();
/**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/
~OpenGLContext();
/**
* Use this OpenGL Context to render to the given EGLSurface.
* After the `renderFunc` returns, the default offscreen PixelBuffer surface becomes active again.
*/
void use(EGLSurface surface);
/**
* Use this OpenGL Context to render to the offscreen PixelBuffer surface.
*/
void use();
/**
* Flushes all drawing operations by swapping the buffers and submitting the Frame to the GPU
*/
void flush() const;
/**
* Create a new texture on this context
*/
OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
public:
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
EGLSurface offscreenSurface = EGL_NO_SURFACE;
EGLConfig config = nullptr;
private:
OpenGLContext() = default;
void destroy();
void ensureOpenGL();
private:
PassThroughShader _passThroughShader;
private:
static constexpr auto TAG = "OpenGLContext";
};
} // namespace vision

View File

@@ -1,34 +0,0 @@
//
// Created by Marc Rousavy on 09.08.23.
//
#pragma once
#include <GLES2/gl2.h>
#include <stdexcept>
#include <string>
namespace vision {
inline std::string getEglErrorIfAny() {
EGLint error = glGetError();
if (error != GL_NO_ERROR)
return " Error: " + std::to_string(error);
error = eglGetError();
if (error != EGL_SUCCESS)
return " Error: " + std::to_string(error);
return "";
}
class OpenGLError : public std::runtime_error {
public:
explicit OpenGLError(const std::string&& message) : std::runtime_error(message + getEglErrorIfAny()) {}
static inline void checkIfError(const std::string&& message) {
auto error = getEglErrorIfAny();
if (error.length() > 0)
throw std::runtime_error(message + error);
}
};
} // namespace vision

View File

@@ -1,74 +0,0 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#include "OpenGLRenderer.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/log.h>
#include <android/native_window.h>
#include <utility>
#include "OpenGLError.h"
namespace vision {
std::unique_ptr<OpenGLRenderer> OpenGLRenderer::CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
return std::unique_ptr<OpenGLRenderer>(new OpenGLRenderer(std::move(context), surface));
}
OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
_context = std::move(context);
_outputSurface = surface;
_width = ANativeWindow_getWidth(surface);
_height = ANativeWindow_getHeight(surface);
}
OpenGLRenderer::~OpenGLRenderer() {
if (_outputSurface != nullptr) {
ANativeWindow_release(_outputSurface);
}
destroy();
}
void OpenGLRenderer::destroy() {
if (_context != nullptr && _surface != EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
eglDestroySurface(_context->display, _surface);
_surface = EGL_NO_SURFACE;
}
}
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
if (_surface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
_context->use();
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
}
// 1. Activate the OpenGL context for this surface
_context->use(_surface);
// 2. Set the viewport for rendering
glViewport(0, 0, _width, _height);
glDisable(GL_BLEND);
// 3. Bind the input texture
glBindTexture(texture.target, texture.id);
glTexParameteri(texture.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(texture.target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(texture.target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 4. Draw it using the pass-through shader which also applies transforms
_passThroughShader.draw(texture, transformMatrix);
// 5. Swap buffers to pass it to the window surface
eglSwapBuffers(_context->display, _surface);
}
} // namespace vision

View File

@@ -1,61 +0,0 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#pragma once
#include "PassThroughShader.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <android/native_window.h>
#include <memory>
#include "OpenGLContext.h"
#include "OpenGLTexture.h"
namespace vision {
class OpenGLRenderer {
public:
/**
* Create a new instance of the OpenGLRenderer that draws to an on-screen window surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*
* Note: The `surface` is considered moved, and the OpenGL context will release it when it is
* being deleted.
*/
static std::unique_ptr<OpenGLRenderer> CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
/**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/
~OpenGLRenderer();
/**
* Renders the given Texture to the Surface
*/
void renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix);
/**
* Destroys the OpenGL context. This needs to be called on the same thread that `use()` was
* called. After calling `destroy()`, it is legal to call `use()` again, which will re-construct
* everything.
*/
void destroy();
private:
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
private:
int _width = 0, _height = 0;
std::shared_ptr<OpenGLContext> _context;
ANativeWindow* _outputSurface;
EGLSurface _surface = EGL_NO_SURFACE;
private:
PassThroughShader _passThroughShader;
private:
static constexpr auto TAG = "OpenGLRenderer";
};
} // namespace vision

View File

@@ -1,22 +0,0 @@
//
// Created by Marc Rousavy on 30.08.23.
//
#pragma once
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdexcept>
struct OpenGLTexture {
enum Type { Texture2D, ExternalOES };
// The ID of the texture as returned in glGenTextures(..)
GLuint id;
// GL_TEXTURE_2D or GL_TEXTURE_EXTERNAL_OES
GLenum target;
// Width and height of the texture
int width = 0;
int height = 0;
};

View File

@@ -1,111 +0,0 @@
//
// Created by Marc Rousavy on 28.08.23.
//
#include "PassThroughShader.h"
#include "OpenGLError.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <memory>
#include <string>
namespace vision {
PassThroughShader::~PassThroughShader() {
if (_programId != NO_SHADER) {
glDeleteProgram(_programId);
_programId = NO_SHADER;
}
if (_vertexBuffer != NO_BUFFER) {
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = NO_BUFFER;
}
}
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
// 1. Set up Shader Program
if (_programId == NO_SHADER) {
_programId = createProgram();
glUseProgram(_programId);
_vertexParameters = {
.aPosition = glGetAttribLocation(_programId, "aPosition"),
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
.uTransformMatrix = glGetUniformLocation(_programId, "uTransformMatrix"),
};
_fragmentParameters = {
.uTexture = glGetUniformLocation(_programId, "uTexture"),
};
}
glUseProgram(_programId);
// 2. Set up Vertices Buffer
if (_vertexBuffer == NO_BUFFER) {
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
}
// 3. Pass all uniforms/attributes for vertex shader
glEnableVertexAttribArray(_vertexParameters.aPosition);
glVertexAttribPointer(_vertexParameters.aPosition, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, position)));
glEnableVertexAttribArray(_vertexParameters.aTexCoord);
glVertexAttribPointer(_vertexParameters.aTexCoord, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, texCoord)));
glUniformMatrix4fv(_vertexParameters.uTransformMatrix, 1, GL_FALSE, transformMatrix);
// 4. Pass texture to fragment shader
glActiveTexture(GL_TEXTURE0);
glBindTexture(texture.target, texture.id);
glUniform1i(_fragmentParameters.uTexture, 0);
// 5. Draw!
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode) {
GLuint shader = glCreateShader(shaderType);
if (shader == 0)
throw OpenGLError("Failed to load shader!");
glShaderSource(shader, 1, &shaderCode, nullptr);
glCompileShader(shader);
GLint compileStatus = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
glDeleteShader(shader);
throw OpenGLError("Failed to compile shader!");
}
return shader;
}
GLuint PassThroughShader::createProgram() {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
GLuint program = glCreateProgram();
if (program == 0)
throw OpenGLError("Failed to create pass-through program!");
glAttachShader(program, vertexShader);
OpenGLError::checkIfError("Failed to attach Vertex Shader!");
glAttachShader(program, fragmentShader);
OpenGLError::checkIfError("Failed to attach Fragment Shader!");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (!linkStatus) {
glDeleteProgram(program);
throw OpenGLError("Failed to load pass-through program!");
}
return program;
}
} // namespace vision

View File

@@ -1,84 +0,0 @@
//
// Created by Marc Rousavy on 28.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include "OpenGLTexture.h"
namespace vision {
#define NO_SHADER 0
#define NO_POSITION 0
#define NO_BUFFER 0
struct Vertex {
GLfloat position[2];
GLfloat texCoord[2];
};
class PassThroughShader {
public:
PassThroughShader() = default;
~PassThroughShader();
/**
* Draw the texture using this shader.
* Note: At the moment, only EXTERNAL textures are supported by the Shader.
*/
void draw(const OpenGLTexture& texture, float* transformMatrix);
private:
// Loading
static GLuint loadShader(GLenum shaderType, const char* shaderCode);
static GLuint createProgram();
private:
// Parameters
GLuint _programId = NO_SHADER;
GLuint _vertexBuffer = NO_BUFFER;
struct VertexParameters {
GLint aPosition = NO_POSITION;
GLint aTexCoord = NO_POSITION;
GLint uTransformMatrix = NO_POSITION;
} _vertexParameters;
struct FragmentParameters {
GLint uTexture = NO_POSITION;
} _fragmentParameters;
private:
// Statics
static constexpr Vertex VERTICES[] = {
{{-1.0f, -1.0f}, {0.0f, 0.0f}}, // bottom-left
{{1.0f, -1.0f}, {1.0f, 0.0f}}, // bottom-right
{{-1.0f, 1.0f}, {0.0f, 1.0f}}, // top-left
{{1.0f, 1.0f}, {1.0f, 1.0f}} // top-right
};
static constexpr char VERTEX_SHADER[] = R"(
attribute vec4 aPosition;
attribute vec2 aTexCoord;
uniform mat4 uTransformMatrix;
varying vec2 vTexCoord;
void main() {
gl_Position = aPosition;
vTexCoord = (uTransformMatrix * vec4(aTexCoord, 0.0, 1.0)).xy;
}
)";
static constexpr char FRAGMENT_SHADER[] = R"(
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTexCoord;
uniform samplerExternalOES uTexture;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoord);
}
)";
};
} // namespace vision

View File

@@ -1,119 +0,0 @@
//
// Created by Marc Rousavy on 25.08.23.
//
#include "VideoPipeline.h"
#include "OpenGLError.h"
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES/gl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/native_window_jni.h>
#include <chrono>
#include "JFrameProcessor.h"
#include "OpenGLTexture.h"
namespace vision {
jni::local_ref<VideoPipeline::jhybriddata> VideoPipeline::initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height) {
return makeCxxInstance(jThis, width, height);
}
VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height) : _javaPart(jni::make_global(jThis)) {
_width = width;
_height = height;
_context = OpenGLContext::CreateWithOffscreenSurface();
}
VideoPipeline::~VideoPipeline() {
// 1. Remove output surfaces
removeFrameProcessorOutputSurface();
removeRecordingSessionOutputSurface();
// 2. Delete the input textures
if (_inputTexture != std::nullopt) {
glDeleteTextures(1, &_inputTexture->id);
_inputTexture = std::nullopt;
}
// 3. Destroy the OpenGL context
_context = nullptr;
}
void VideoPipeline::removeFrameProcessorOutputSurface() {
if (_frameProcessorOutput)
_frameProcessorOutput->destroy();
_frameProcessorOutput = nullptr;
}
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
// 1. Delete existing output surface
removeFrameProcessorOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
void VideoPipeline::removeRecordingSessionOutputSurface() {
if (_recordingSessionOutput)
_recordingSessionOutput->destroy();
_recordingSessionOutput = nullptr;
}
void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
// 1. Delete existing output surface
removeRecordingSessionOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
int VideoPipeline::getInputTextureId() {
if (_inputTexture == std::nullopt) {
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
}
return static_cast<int>(_inputTexture->id);
}
void VideoPipeline::onBeforeFrame() {
_context->use();
glBindTexture(_inputTexture->target, _inputTexture->id);
}
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
// Get the OpenGL transform Matrix (transforms, scales, rotations)
float transformMatrix[16];
transformMatrixParam->getRegion(0, 16, transformMatrix);
OpenGLTexture& texture = _inputTexture.value();
if (_frameProcessorOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
_frameProcessorOutput->renderTextureToSurface(texture, transformMatrix);
}
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
}
}
void VideoPipeline::registerNatives() {
registerHybrid({
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface),
makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface),
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame),
});
}
} // namespace vision

View File

@@ -1,66 +0,0 @@
//
// Created by Marc Rousavy on 25.08.23.
//
#pragma once
#include "OpenGLContext.h"
#include "OpenGLRenderer.h"
#include "PassThroughShader.h"
#include <EGL/egl.h>
#include <android/native_window.h>
#include <fbjni/fbjni.h>
#include <jni.h>
#include <memory>
#include <optional>
namespace vision {
using namespace facebook;
class VideoPipeline : public jni::HybridClass<VideoPipeline> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/core/VideoPipeline;";
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height);
static void registerNatives();
public:
~VideoPipeline();
// -> SurfaceTexture input
int getInputTextureId();
// <- Frame Processor output
void setFrameProcessorOutputSurface(jobject surface);
void removeFrameProcessorOutputSurface();
// <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface);
void removeRecordingSessionOutputSurface();
// Frame callbacks
void onBeforeFrame();
void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix);
private:
// Private constructor. Use `create(..)` to create new instances.
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
private:
// Input Surface Texture
std::optional<OpenGLTexture> _inputTexture = std::nullopt;
int _width = 0;
int _height = 0;
// Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr;
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
private:
friend HybridBase;
jni::global_ref<javaobject> _javaPart;
static constexpr auto TAG = "VideoPipeline";
};
} // namespace vision

View File

@@ -1,7 +1,6 @@
#include "JFrameProcessor.h"
#include "JVisionCameraProxy.h"
#include "JVisionCameraScheduler.h"
#include "VideoPipeline.h"
#include "VisionCameraProxy.h"
#include <fbjni/fbjni.h>
#include <jni.h>
@@ -11,7 +10,6 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives();
#endif