feat: Use C++ OpenGL GPU VideoPipeline again (#1836)
1. Reverts 4e96eb77e0
(PR #1789) to bring the C++ OpenGL GPU Pipeline back.
2. Fixes the "initHybrid JNI not found" error by loading the native JNI/C++ library in `VideoPipeline.kt`.
This PR has two downsides:
1. `pixelFormat="yuv"` does not work on Android. OpenGL only works in RGB
2. OpenGL rendering is fast, but it has an overhead. I think for Camera -> Video Recording we shouldn't be using an entire OpenGL rendering pipeline.
The original plan was to use something similar to how it works on iOS by just passing GPU buffers around, but the android.media APIs just aren't as advanced yet. `ImageReader`/`ImageWriter` is way too buggy and doesn't really work with `MediaRecorder`/`MediaCodec`.
This sucks, I hope in the future we can use something like `AHardwareBuffer`s.
This commit is contained in:
parent
db5120e163
commit
9add0eb571
@ -24,7 +24,7 @@ VisionCamera is a powerful and fast Camera component for React Native. It featur
|
|||||||
* 🔍 Smooth zooming (Reanimated)
|
* 🔍 Smooth zooming (Reanimated)
|
||||||
* ⏯️ Fast pause and resume
|
* ⏯️ Fast pause and resume
|
||||||
* 🌓 HDR & Night modes
|
* 🌓 HDR & Night modes
|
||||||
* ⚡ Highly efficient C++/GPU buffers
|
* ⚡ Custom C++/GPU accelerated video pipeline (OpenGL)
|
||||||
|
|
||||||
Install VisionCamera from npm:
|
Install VisionCamera from npm:
|
||||||
|
|
||||||
|
@ -21,6 +21,10 @@ add_library(
|
|||||||
SHARED
|
SHARED
|
||||||
../cpp/JSITypedArray.cpp
|
../cpp/JSITypedArray.cpp
|
||||||
src/main/cpp/VisionCamera.cpp
|
src/main/cpp/VisionCamera.cpp
|
||||||
|
src/main/cpp/VideoPipeline.cpp
|
||||||
|
src/main/cpp/PassThroughShader.cpp
|
||||||
|
src/main/cpp/OpenGLContext.cpp
|
||||||
|
src/main/cpp/OpenGLRenderer.cpp
|
||||||
# Frame Processor
|
# Frame Processor
|
||||||
src/main/cpp/frameprocessor/FrameHostObject.cpp
|
src/main/cpp/frameprocessor/FrameHostObject.cpp
|
||||||
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
|
src/main/cpp/frameprocessor/FrameProcessorPluginHostObject.cpp
|
||||||
@ -54,6 +58,8 @@ target_link_libraries(
|
|||||||
ReactAndroid::jsi # <-- RN: JSI
|
ReactAndroid::jsi # <-- RN: JSI
|
||||||
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings
|
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings
|
||||||
fbjni::fbjni # <-- fbjni
|
fbjni::fbjni # <-- fbjni
|
||||||
|
GLESv2 # <-- OpenGL (for VideoPipeline)
|
||||||
|
EGL # <-- OpenGL (EGL) (for VideoPipeline)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Optionally also add Frame Processors here
|
# Optionally also add Frame Processors here
|
||||||
|
163
package/android/src/main/cpp/OpenGLContext.cpp
Normal file
163
package/android/src/main/cpp/OpenGLContext.cpp
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 29.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "OpenGLContext.h"
|
||||||
|
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <GLES2/gl2ext.h>
|
||||||
|
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
|
||||||
|
#include "OpenGLError.h"
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface() {
|
||||||
|
return std::unique_ptr<OpenGLContext>(new OpenGLContext());
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenGLContext::~OpenGLContext() {
|
||||||
|
destroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLContext::destroy() {
|
||||||
|
if (display != EGL_NO_DISPLAY) {
|
||||||
|
eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
|
||||||
|
if (offscreenSurface != EGL_NO_SURFACE) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
|
||||||
|
eglDestroySurface(display, offscreenSurface);
|
||||||
|
offscreenSurface = EGL_NO_SURFACE;
|
||||||
|
}
|
||||||
|
if (context != EGL_NO_CONTEXT) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Context...");
|
||||||
|
eglDestroyContext(display, context);
|
||||||
|
context = EGL_NO_CONTEXT;
|
||||||
|
}
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Display...");
|
||||||
|
eglTerminate(display);
|
||||||
|
display = EGL_NO_DISPLAY;
|
||||||
|
config = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLContext::ensureOpenGL() {
|
||||||
|
bool successful;
|
||||||
|
// EGLDisplay
|
||||||
|
if (display == EGL_NO_DISPLAY) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLDisplay..");
|
||||||
|
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
|
||||||
|
if (display == EGL_NO_DISPLAY)
|
||||||
|
throw OpenGLError("Failed to get default OpenGL Display!");
|
||||||
|
|
||||||
|
EGLint major;
|
||||||
|
EGLint minor;
|
||||||
|
successful = eglInitialize(display, &major, &minor);
|
||||||
|
if (!successful)
|
||||||
|
throw OpenGLError("Failed to initialize OpenGL!");
|
||||||
|
}
|
||||||
|
|
||||||
|
// EGLConfig
|
||||||
|
if (config == nullptr) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
|
||||||
|
EGLint attributes[] = {EGL_RENDERABLE_TYPE,
|
||||||
|
EGL_OPENGL_ES2_BIT,
|
||||||
|
EGL_SURFACE_TYPE,
|
||||||
|
EGL_WINDOW_BIT,
|
||||||
|
EGL_RED_SIZE,
|
||||||
|
8,
|
||||||
|
EGL_GREEN_SIZE,
|
||||||
|
8,
|
||||||
|
EGL_BLUE_SIZE,
|
||||||
|
8,
|
||||||
|
EGL_ALPHA_SIZE,
|
||||||
|
8,
|
||||||
|
EGL_DEPTH_SIZE,
|
||||||
|
0,
|
||||||
|
EGL_STENCIL_SIZE,
|
||||||
|
0,
|
||||||
|
EGL_NONE};
|
||||||
|
EGLint numConfigs;
|
||||||
|
successful = eglChooseConfig(display, attributes, &config, 1, &numConfigs);
|
||||||
|
if (!successful || numConfigs == 0)
|
||||||
|
throw OpenGLError("Failed to choose OpenGL config!");
|
||||||
|
}
|
||||||
|
|
||||||
|
// EGLContext
|
||||||
|
if (context == EGL_NO_CONTEXT) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLContext..");
|
||||||
|
EGLint contextAttributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
|
||||||
|
context = eglCreateContext(display, config, nullptr, contextAttributes);
|
||||||
|
if (context == EGL_NO_CONTEXT)
|
||||||
|
throw OpenGLError("Failed to create OpenGL context!");
|
||||||
|
}
|
||||||
|
|
||||||
|
// EGLSurface
|
||||||
|
if (offscreenSurface == EGL_NO_SURFACE) {
|
||||||
|
// If we don't have a surface at all
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing 1x1 offscreen pbuffer EGLSurface..");
|
||||||
|
EGLint attributes[] = {EGL_WIDTH, 1, EGL_HEIGHT, 1, EGL_NONE};
|
||||||
|
offscreenSurface = eglCreatePbufferSurface(display, config, attributes);
|
||||||
|
if (offscreenSurface == EGL_NO_SURFACE)
|
||||||
|
throw OpenGLError("Failed to create OpenGL Surface!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLContext::use() {
|
||||||
|
this->use(offscreenSurface);
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLContext::use(EGLSurface surface) {
|
||||||
|
if (surface == EGL_NO_SURFACE)
|
||||||
|
throw OpenGLError("Cannot render to a null Surface!");
|
||||||
|
|
||||||
|
// 1. Make sure the OpenGL context is initialized
|
||||||
|
this->ensureOpenGL();
|
||||||
|
|
||||||
|
// 2. Make the OpenGL context current
|
||||||
|
bool successful = eglMakeCurrent(display, surface, surface, context);
|
||||||
|
if (!successful || eglGetError() != EGL_SUCCESS)
|
||||||
|
throw OpenGLError("Failed to use current OpenGL context!");
|
||||||
|
|
||||||
|
// 3. Caller can now render to this surface
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLContext::flush() const {
|
||||||
|
bool successful = eglSwapBuffers(display, eglGetCurrentSurface(EGL_DRAW));
|
||||||
|
if (!successful || eglGetError() != EGL_SUCCESS)
|
||||||
|
throw OpenGLError("Failed to swap OpenGL buffers!");
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width, int height) {
|
||||||
|
// 1. Make sure the OpenGL context is initialized
|
||||||
|
this->ensureOpenGL();
|
||||||
|
|
||||||
|
// 2. Make the OpenGL context current
|
||||||
|
bool successful = eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
|
||||||
|
if (!successful || eglGetError() != EGL_SUCCESS)
|
||||||
|
throw OpenGLError("Failed to use current OpenGL context!");
|
||||||
|
|
||||||
|
GLuint textureId;
|
||||||
|
glGenTextures(1, &textureId);
|
||||||
|
|
||||||
|
GLenum target;
|
||||||
|
switch (type) {
|
||||||
|
case OpenGLTexture::Type::ExternalOES:
|
||||||
|
target = GL_TEXTURE_EXTERNAL_OES;
|
||||||
|
break;
|
||||||
|
case OpenGLTexture::Type::Texture2D:
|
||||||
|
target = GL_TEXTURE_2D;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw std::runtime_error("Invalid OpenGL Texture Type!");
|
||||||
|
}
|
||||||
|
glBindTexture(target, textureId);
|
||||||
|
glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
|
||||||
|
return {.id = textureId, .target = target, .width = width, .height = height};
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace vision
|
73
package/android/src/main/cpp/OpenGLContext.h
Normal file
73
package/android/src/main/cpp/OpenGLContext.h
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 29.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
#include "OpenGLTexture.h"
|
||||||
|
#include "PassThroughShader.h"
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An OpenGL Context that can be used to render to different surfaces.
|
||||||
|
* By default, it creates an off-screen PixelBuffer surface.
|
||||||
|
*/
|
||||||
|
class OpenGLContext {
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface.
|
||||||
|
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
|
||||||
|
*/
|
||||||
|
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface();
|
||||||
|
/**
|
||||||
|
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
|
||||||
|
*/
|
||||||
|
~OpenGLContext();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use this OpenGL Context to render to the given EGLSurface.
|
||||||
|
* After the `renderFunc` returns, the default offscreen PixelBuffer surface becomes active again.
|
||||||
|
*/
|
||||||
|
void use(EGLSurface surface);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use this OpenGL Context to render to the offscreen PixelBuffer surface.
|
||||||
|
*/
|
||||||
|
void use();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flushes all drawing operations by swapping the buffers and submitting the Frame to the GPU
|
||||||
|
*/
|
||||||
|
void flush() const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new texture on this context
|
||||||
|
*/
|
||||||
|
OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
|
||||||
|
|
||||||
|
public:
|
||||||
|
EGLDisplay display = EGL_NO_DISPLAY;
|
||||||
|
EGLContext context = EGL_NO_CONTEXT;
|
||||||
|
EGLSurface offscreenSurface = EGL_NO_SURFACE;
|
||||||
|
EGLConfig config = nullptr;
|
||||||
|
|
||||||
|
private:
|
||||||
|
OpenGLContext() = default;
|
||||||
|
void destroy();
|
||||||
|
void ensureOpenGL();
|
||||||
|
|
||||||
|
private:
|
||||||
|
PassThroughShader _passThroughShader;
|
||||||
|
|
||||||
|
private:
|
||||||
|
static constexpr auto TAG = "OpenGLContext";
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace vision
|
34
package/android/src/main/cpp/OpenGLError.h
Normal file
34
package/android/src/main/cpp/OpenGLError.h
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 09.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
inline std::string getEglErrorIfAny() {
|
||||||
|
EGLint error = glGetError();
|
||||||
|
if (error != GL_NO_ERROR)
|
||||||
|
return " Error: " + std::to_string(error);
|
||||||
|
error = eglGetError();
|
||||||
|
if (error != EGL_SUCCESS)
|
||||||
|
return " Error: " + std::to_string(error);
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
class OpenGLError : public std::runtime_error {
|
||||||
|
public:
|
||||||
|
explicit OpenGLError(const std::string&& message) : std::runtime_error(message + getEglErrorIfAny()) {}
|
||||||
|
|
||||||
|
static inline void checkIfError(const std::string&& message) {
|
||||||
|
auto error = getEglErrorIfAny();
|
||||||
|
if (error.length() > 0)
|
||||||
|
throw std::runtime_error(message + error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace vision
|
74
package/android/src/main/cpp/OpenGLRenderer.cpp
Normal file
74
package/android/src/main/cpp/OpenGLRenderer.cpp
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 29.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "OpenGLRenderer.h"
|
||||||
|
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <GLES2/gl2ext.h>
|
||||||
|
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
|
#include "OpenGLError.h"
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
std::unique_ptr<OpenGLRenderer> OpenGLRenderer::CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
|
||||||
|
return std::unique_ptr<OpenGLRenderer>(new OpenGLRenderer(std::move(context), surface));
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
|
||||||
|
_context = std::move(context);
|
||||||
|
_outputSurface = surface;
|
||||||
|
_width = ANativeWindow_getWidth(surface);
|
||||||
|
_height = ANativeWindow_getHeight(surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
OpenGLRenderer::~OpenGLRenderer() {
|
||||||
|
if (_outputSurface != nullptr) {
|
||||||
|
ANativeWindow_release(_outputSurface);
|
||||||
|
}
|
||||||
|
destroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLRenderer::destroy() {
|
||||||
|
if (_context != nullptr && _surface != EGL_NO_DISPLAY) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
|
||||||
|
eglDestroySurface(_context->display, _surface);
|
||||||
|
_surface = EGL_NO_SURFACE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
|
||||||
|
if (_surface == EGL_NO_SURFACE) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
|
||||||
|
_context->use();
|
||||||
|
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Activate the OpenGL context for this surface
|
||||||
|
_context->use(_surface);
|
||||||
|
|
||||||
|
// 2. Set the viewport for rendering
|
||||||
|
glViewport(0, 0, _width, _height);
|
||||||
|
glDisable(GL_BLEND);
|
||||||
|
|
||||||
|
// 3. Bind the input texture
|
||||||
|
glBindTexture(texture.target, texture.id);
|
||||||
|
glTexParameteri(texture.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(texture.target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(texture.target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||||
|
|
||||||
|
// 4. Draw it using the pass-through shader which also applies transforms
|
||||||
|
_passThroughShader.draw(texture, transformMatrix);
|
||||||
|
|
||||||
|
// 5. Swap buffers to pass it to the window surface
|
||||||
|
eglSwapBuffers(_context->display, _surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace vision
|
61
package/android/src/main/cpp/OpenGLRenderer.h
Normal file
61
package/android/src/main/cpp/OpenGLRenderer.h
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 29.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "PassThroughShader.h"
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
#include "OpenGLContext.h"
|
||||||
|
#include "OpenGLTexture.h"
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
class OpenGLRenderer {
|
||||||
|
public:
|
||||||
|
/**
|
||||||
|
* Create a new instance of the OpenGLRenderer that draws to an on-screen window surface.
|
||||||
|
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
|
||||||
|
*
|
||||||
|
* Note: The `surface` is considered moved, and the OpenGL context will release it when it is
|
||||||
|
* being deleted.
|
||||||
|
*/
|
||||||
|
static std::unique_ptr<OpenGLRenderer> CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
|
||||||
|
/**
|
||||||
|
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
|
||||||
|
*/
|
||||||
|
~OpenGLRenderer();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Renders the given Texture to the Surface
|
||||||
|
*/
|
||||||
|
void renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Destroys the OpenGL context. This needs to be called on the same thread that `use()` was
|
||||||
|
* called. After calling `destroy()`, it is legal to call `use()` again, which will re-construct
|
||||||
|
* everything.
|
||||||
|
*/
|
||||||
|
void destroy();
|
||||||
|
|
||||||
|
private:
|
||||||
|
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
|
||||||
|
|
||||||
|
private:
|
||||||
|
int _width = 0, _height = 0;
|
||||||
|
std::shared_ptr<OpenGLContext> _context;
|
||||||
|
ANativeWindow* _outputSurface;
|
||||||
|
EGLSurface _surface = EGL_NO_SURFACE;
|
||||||
|
|
||||||
|
private:
|
||||||
|
PassThroughShader _passThroughShader;
|
||||||
|
|
||||||
|
private:
|
||||||
|
static constexpr auto TAG = "OpenGLRenderer";
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace vision
|
22
package/android/src/main/cpp/OpenGLTexture.h
Normal file
22
package/android/src/main/cpp/OpenGLTexture.h
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 30.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <GLES2/gl2ext.h>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
struct OpenGLTexture {
|
||||||
|
enum Type { Texture2D, ExternalOES };
|
||||||
|
|
||||||
|
// The ID of the texture as returned in glGenTextures(..)
|
||||||
|
GLuint id;
|
||||||
|
// GL_TEXTURE_2D or GL_TEXTURE_EXTERNAL_OES
|
||||||
|
GLenum target;
|
||||||
|
|
||||||
|
// Width and height of the texture
|
||||||
|
int width = 0;
|
||||||
|
int height = 0;
|
||||||
|
};
|
111
package/android/src/main/cpp/PassThroughShader.cpp
Normal file
111
package/android/src/main/cpp/PassThroughShader.cpp
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 28.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "PassThroughShader.h"
|
||||||
|
#include "OpenGLError.h"
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <GLES2/gl2ext.h>
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
PassThroughShader::~PassThroughShader() {
|
||||||
|
if (_programId != NO_SHADER) {
|
||||||
|
glDeleteProgram(_programId);
|
||||||
|
_programId = NO_SHADER;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_vertexBuffer != NO_BUFFER) {
|
||||||
|
glDeleteBuffers(1, &_vertexBuffer);
|
||||||
|
_vertexBuffer = NO_BUFFER;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
|
||||||
|
// 1. Set up Shader Program
|
||||||
|
if (_programId == NO_SHADER) {
|
||||||
|
_programId = createProgram();
|
||||||
|
glUseProgram(_programId);
|
||||||
|
_vertexParameters = {
|
||||||
|
.aPosition = glGetAttribLocation(_programId, "aPosition"),
|
||||||
|
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
|
||||||
|
.uTransformMatrix = glGetUniformLocation(_programId, "uTransformMatrix"),
|
||||||
|
};
|
||||||
|
_fragmentParameters = {
|
||||||
|
.uTexture = glGetUniformLocation(_programId, "uTexture"),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
glUseProgram(_programId);
|
||||||
|
|
||||||
|
// 2. Set up Vertices Buffer
|
||||||
|
if (_vertexBuffer == NO_BUFFER) {
|
||||||
|
glGenBuffers(1, &_vertexBuffer);
|
||||||
|
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||||
|
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Pass all uniforms/attributes for vertex shader
|
||||||
|
glEnableVertexAttribArray(_vertexParameters.aPosition);
|
||||||
|
glVertexAttribPointer(_vertexParameters.aPosition, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
|
||||||
|
reinterpret_cast<void*>(offsetof(Vertex, position)));
|
||||||
|
|
||||||
|
glEnableVertexAttribArray(_vertexParameters.aTexCoord);
|
||||||
|
glVertexAttribPointer(_vertexParameters.aTexCoord, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
|
||||||
|
reinterpret_cast<void*>(offsetof(Vertex, texCoord)));
|
||||||
|
|
||||||
|
glUniformMatrix4fv(_vertexParameters.uTransformMatrix, 1, GL_FALSE, transformMatrix);
|
||||||
|
|
||||||
|
// 4. Pass texture to fragment shader
|
||||||
|
glActiveTexture(GL_TEXTURE0);
|
||||||
|
glBindTexture(texture.target, texture.id);
|
||||||
|
glUniform1i(_fragmentParameters.uTexture, 0);
|
||||||
|
|
||||||
|
// 5. Draw!
|
||||||
|
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode) {
|
||||||
|
GLuint shader = glCreateShader(shaderType);
|
||||||
|
if (shader == 0)
|
||||||
|
throw OpenGLError("Failed to load shader!");
|
||||||
|
|
||||||
|
glShaderSource(shader, 1, &shaderCode, nullptr);
|
||||||
|
glCompileShader(shader);
|
||||||
|
GLint compileStatus = GL_FALSE;
|
||||||
|
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
|
||||||
|
if (compileStatus == GL_FALSE) {
|
||||||
|
glDeleteShader(shader);
|
||||||
|
throw OpenGLError("Failed to compile shader!");
|
||||||
|
}
|
||||||
|
return shader;
|
||||||
|
}
|
||||||
|
|
||||||
|
GLuint PassThroughShader::createProgram() {
|
||||||
|
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
|
||||||
|
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
|
||||||
|
|
||||||
|
GLuint program = glCreateProgram();
|
||||||
|
if (program == 0)
|
||||||
|
throw OpenGLError("Failed to create pass-through program!");
|
||||||
|
|
||||||
|
glAttachShader(program, vertexShader);
|
||||||
|
OpenGLError::checkIfError("Failed to attach Vertex Shader!");
|
||||||
|
|
||||||
|
glAttachShader(program, fragmentShader);
|
||||||
|
OpenGLError::checkIfError("Failed to attach Fragment Shader!");
|
||||||
|
|
||||||
|
glLinkProgram(program);
|
||||||
|
GLint linkStatus = GL_FALSE;
|
||||||
|
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
|
||||||
|
if (!linkStatus) {
|
||||||
|
glDeleteProgram(program);
|
||||||
|
throw OpenGLError("Failed to load pass-through program!");
|
||||||
|
}
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace vision
|
84
package/android/src/main/cpp/PassThroughShader.h
Normal file
84
package/android/src/main/cpp/PassThroughShader.h
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 28.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
|
||||||
|
#include "OpenGLTexture.h"
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
#define NO_SHADER 0
|
||||||
|
#define NO_POSITION 0
|
||||||
|
#define NO_BUFFER 0
|
||||||
|
|
||||||
|
struct Vertex {
|
||||||
|
GLfloat position[2];
|
||||||
|
GLfloat texCoord[2];
|
||||||
|
};
|
||||||
|
|
||||||
|
class PassThroughShader {
|
||||||
|
public:
|
||||||
|
PassThroughShader() = default;
|
||||||
|
~PassThroughShader();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Draw the texture using this shader.
|
||||||
|
* Note: At the moment, only EXTERNAL textures are supported by the Shader.
|
||||||
|
*/
|
||||||
|
void draw(const OpenGLTexture& texture, float* transformMatrix);
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Loading
|
||||||
|
static GLuint loadShader(GLenum shaderType, const char* shaderCode);
|
||||||
|
static GLuint createProgram();
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Parameters
|
||||||
|
GLuint _programId = NO_SHADER;
|
||||||
|
GLuint _vertexBuffer = NO_BUFFER;
|
||||||
|
struct VertexParameters {
|
||||||
|
GLint aPosition = NO_POSITION;
|
||||||
|
GLint aTexCoord = NO_POSITION;
|
||||||
|
GLint uTransformMatrix = NO_POSITION;
|
||||||
|
} _vertexParameters;
|
||||||
|
struct FragmentParameters {
|
||||||
|
GLint uTexture = NO_POSITION;
|
||||||
|
} _fragmentParameters;
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Statics
|
||||||
|
static constexpr Vertex VERTICES[] = {
|
||||||
|
{{-1.0f, -1.0f}, {0.0f, 0.0f}}, // bottom-left
|
||||||
|
{{1.0f, -1.0f}, {1.0f, 0.0f}}, // bottom-right
|
||||||
|
{{-1.0f, 1.0f}, {0.0f, 1.0f}}, // top-left
|
||||||
|
{{1.0f, 1.0f}, {1.0f, 1.0f}} // top-right
|
||||||
|
};
|
||||||
|
|
||||||
|
static constexpr char VERTEX_SHADER[] = R"(
|
||||||
|
attribute vec4 aPosition;
|
||||||
|
attribute vec2 aTexCoord;
|
||||||
|
uniform mat4 uTransformMatrix;
|
||||||
|
varying vec2 vTexCoord;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
gl_Position = aPosition;
|
||||||
|
vTexCoord = (uTransformMatrix * vec4(aTexCoord, 0.0, 1.0)).xy;
|
||||||
|
}
|
||||||
|
)";
|
||||||
|
static constexpr char FRAGMENT_SHADER[] = R"(
|
||||||
|
#extension GL_OES_EGL_image_external : require
|
||||||
|
precision mediump float;
|
||||||
|
varying vec2 vTexCoord;
|
||||||
|
uniform samplerExternalOES uTexture;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
gl_FragColor = texture2D(uTexture, vTexCoord);
|
||||||
|
}
|
||||||
|
)";
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace vision
|
119
package/android/src/main/cpp/VideoPipeline.cpp
Normal file
119
package/android/src/main/cpp/VideoPipeline.cpp
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 25.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "VideoPipeline.h"
|
||||||
|
#include "OpenGLError.h"
|
||||||
|
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <EGL/eglext.h>
|
||||||
|
#include <GLES/gl.h>
|
||||||
|
#include <GLES2/gl2.h>
|
||||||
|
#include <GLES2/gl2ext.h>
|
||||||
|
#include <android/native_window_jni.h>
|
||||||
|
|
||||||
|
#include <chrono>
|
||||||
|
|
||||||
|
#include "JFrameProcessor.h"
|
||||||
|
#include "OpenGLTexture.h"
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
jni::local_ref<VideoPipeline::jhybriddata> VideoPipeline::initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height) {
|
||||||
|
return makeCxxInstance(jThis, width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height) : _javaPart(jni::make_global(jThis)) {
|
||||||
|
_width = width;
|
||||||
|
_height = height;
|
||||||
|
_context = OpenGLContext::CreateWithOffscreenSurface();
|
||||||
|
}
|
||||||
|
|
||||||
|
VideoPipeline::~VideoPipeline() {
|
||||||
|
// 1. Remove output surfaces
|
||||||
|
removeFrameProcessorOutputSurface();
|
||||||
|
removeRecordingSessionOutputSurface();
|
||||||
|
// 2. Delete the input textures
|
||||||
|
if (_inputTexture != std::nullopt) {
|
||||||
|
glDeleteTextures(1, &_inputTexture->id);
|
||||||
|
_inputTexture = std::nullopt;
|
||||||
|
}
|
||||||
|
// 3. Destroy the OpenGL context
|
||||||
|
_context = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::removeFrameProcessorOutputSurface() {
|
||||||
|
if (_frameProcessorOutput)
|
||||||
|
_frameProcessorOutput->destroy();
|
||||||
|
_frameProcessorOutput = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
|
||||||
|
// 1. Delete existing output surface
|
||||||
|
removeFrameProcessorOutputSurface();
|
||||||
|
|
||||||
|
// 2. Set new output surface if it is not null
|
||||||
|
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
|
||||||
|
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::removeRecordingSessionOutputSurface() {
|
||||||
|
if (_recordingSessionOutput)
|
||||||
|
_recordingSessionOutput->destroy();
|
||||||
|
_recordingSessionOutput = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
|
||||||
|
// 1. Delete existing output surface
|
||||||
|
removeRecordingSessionOutputSurface();
|
||||||
|
|
||||||
|
// 2. Set new output surface if it is not null
|
||||||
|
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
|
||||||
|
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
|
||||||
|
}
|
||||||
|
|
||||||
|
int VideoPipeline::getInputTextureId() {
|
||||||
|
if (_inputTexture == std::nullopt) {
|
||||||
|
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
|
||||||
|
}
|
||||||
|
|
||||||
|
return static_cast<int>(_inputTexture->id);
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::onBeforeFrame() {
|
||||||
|
_context->use();
|
||||||
|
|
||||||
|
glBindTexture(_inputTexture->target, _inputTexture->id);
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
|
||||||
|
// Get the OpenGL transform Matrix (transforms, scales, rotations)
|
||||||
|
float transformMatrix[16];
|
||||||
|
transformMatrixParam->getRegion(0, 16, transformMatrix);
|
||||||
|
|
||||||
|
OpenGLTexture& texture = _inputTexture.value();
|
||||||
|
|
||||||
|
if (_frameProcessorOutput) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
|
||||||
|
_frameProcessorOutput->renderTextureToSurface(texture, transformMatrix);
|
||||||
|
}
|
||||||
|
if (_recordingSessionOutput) {
|
||||||
|
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||||
|
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void VideoPipeline::registerNatives() {
|
||||||
|
registerHybrid({
|
||||||
|
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
|
||||||
|
makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface),
|
||||||
|
makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface),
|
||||||
|
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
|
||||||
|
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
|
||||||
|
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
|
||||||
|
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
|
||||||
|
makeNativeMethod("onFrame", VideoPipeline::onFrame),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace vision
|
66
package/android/src/main/cpp/VideoPipeline.h
Normal file
66
package/android/src/main/cpp/VideoPipeline.h
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
//
|
||||||
|
// Created by Marc Rousavy on 25.08.23.
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "OpenGLContext.h"
|
||||||
|
#include "OpenGLRenderer.h"
|
||||||
|
#include "PassThroughShader.h"
|
||||||
|
#include <EGL/egl.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
#include <fbjni/fbjni.h>
|
||||||
|
#include <jni.h>
|
||||||
|
#include <memory>
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
namespace vision {
|
||||||
|
|
||||||
|
using namespace facebook;
|
||||||
|
|
||||||
|
class VideoPipeline : public jni::HybridClass<VideoPipeline> {
|
||||||
|
public:
|
||||||
|
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/core/VideoPipeline;";
|
||||||
|
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height);
|
||||||
|
static void registerNatives();
|
||||||
|
|
||||||
|
public:
|
||||||
|
~VideoPipeline();
|
||||||
|
|
||||||
|
// -> SurfaceTexture input
|
||||||
|
int getInputTextureId();
|
||||||
|
|
||||||
|
// <- Frame Processor output
|
||||||
|
void setFrameProcessorOutputSurface(jobject surface);
|
||||||
|
void removeFrameProcessorOutputSurface();
|
||||||
|
|
||||||
|
// <- MediaRecorder output
|
||||||
|
void setRecordingSessionOutputSurface(jobject surface);
|
||||||
|
void removeRecordingSessionOutputSurface();
|
||||||
|
|
||||||
|
// Frame callbacks
|
||||||
|
void onBeforeFrame();
|
||||||
|
void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix);
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Private constructor. Use `create(..)` to create new instances.
|
||||||
|
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Input Surface Texture
|
||||||
|
std::optional<OpenGLTexture> _inputTexture = std::nullopt;
|
||||||
|
int _width = 0;
|
||||||
|
int _height = 0;
|
||||||
|
|
||||||
|
// Output Contexts
|
||||||
|
std::shared_ptr<OpenGLContext> _context = nullptr;
|
||||||
|
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
|
||||||
|
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
|
||||||
|
|
||||||
|
private:
|
||||||
|
friend HybridBase;
|
||||||
|
jni::global_ref<javaobject> _javaPart;
|
||||||
|
static constexpr auto TAG = "VideoPipeline";
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace vision
|
@ -1,6 +1,7 @@
|
|||||||
#include "JFrameProcessor.h"
|
#include "JFrameProcessor.h"
|
||||||
#include "JVisionCameraProxy.h"
|
#include "JVisionCameraProxy.h"
|
||||||
#include "JVisionCameraScheduler.h"
|
#include "JVisionCameraScheduler.h"
|
||||||
|
#include "VideoPipeline.h"
|
||||||
#include "VisionCameraProxy.h"
|
#include "VisionCameraProxy.h"
|
||||||
#include <fbjni/fbjni.h>
|
#include <fbjni/fbjni.h>
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
@ -10,6 +11,7 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
|
|||||||
vision::VisionCameraInstaller::registerNatives();
|
vision::VisionCameraInstaller::registerNatives();
|
||||||
vision::JVisionCameraProxy::registerNatives();
|
vision::JVisionCameraProxy::registerNatives();
|
||||||
vision::JVisionCameraScheduler::registerNatives();
|
vision::JVisionCameraScheduler::registerNatives();
|
||||||
|
vision::VideoPipeline::registerNatives();
|
||||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||||
vision::JFrameProcessor::registerNatives();
|
vision::JFrameProcessor::registerNatives();
|
||||||
#endif
|
#endif
|
||||||
|
@ -4,12 +4,27 @@ import com.mrousavy.camera.core.outputs.CameraOutputs
|
|||||||
import com.mrousavy.camera.parsers.CameraDeviceError
|
import com.mrousavy.camera.parsers.CameraDeviceError
|
||||||
|
|
||||||
abstract class CameraError(
|
abstract class CameraError(
|
||||||
// example: "permission"
|
/**
|
||||||
|
* The domain of the error. Error domains are used to group errors.
|
||||||
|
*
|
||||||
|
* Example: "permission"
|
||||||
|
*/
|
||||||
val domain: String,
|
val domain: String,
|
||||||
// example: "microphone-permission-denied"
|
/**
|
||||||
|
* The id of the error. Errors are uniquely identified under a given domain.
|
||||||
|
*
|
||||||
|
* Example: "microphone-permission-denied"
|
||||||
|
*/
|
||||||
val id: String,
|
val id: String,
|
||||||
// example: "The microphone permission was denied!"
|
/**
|
||||||
|
* A detailed error description of "what went wrong".
|
||||||
|
*
|
||||||
|
* Example: "The microphone permission was denied!"
|
||||||
|
*/
|
||||||
message: String,
|
message: String,
|
||||||
|
/**
|
||||||
|
* A throwable that caused this error.
|
||||||
|
*/
|
||||||
cause: Throwable? = null
|
cause: Throwable? = null
|
||||||
) : Throwable("[$domain/$id] $message", cause)
|
) : Throwable("[$domain/$id] $message", cause)
|
||||||
|
|
||||||
@ -31,6 +46,13 @@ class NoCameraDeviceError :
|
|||||||
CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
|
||||||
class PixelFormatNotSupportedError(format: String) :
|
class PixelFormatNotSupportedError(format: String) :
|
||||||
CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
|
CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
|
||||||
|
class PixelFormatNotSupportedInVideoPipelineError(format: String) :
|
||||||
|
CameraError(
|
||||||
|
"device",
|
||||||
|
"pixel-format-not-supported",
|
||||||
|
"The pixelFormat $format is currently not supported in the VideoPipeline! " +
|
||||||
|
"See this issue for more details ($4.000 bounty!): https://github.com/mrousavy/react-native-vision-camera/issues/1837"
|
||||||
|
)
|
||||||
|
|
||||||
class CameraNotReadyError :
|
class CameraNotReadyError :
|
||||||
CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
|
||||||
|
@ -1,84 +1,203 @@
|
|||||||
package com.mrousavy.camera.core
|
package com.mrousavy.camera.core
|
||||||
|
|
||||||
import android.graphics.ImageFormat
|
import android.graphics.ImageFormat
|
||||||
|
import android.graphics.SurfaceTexture
|
||||||
import android.media.ImageReader
|
import android.media.ImageReader
|
||||||
import android.media.ImageWriter
|
|
||||||
import android.util.Log
|
import android.util.Log
|
||||||
import android.view.Surface
|
import android.view.Surface
|
||||||
|
import com.facebook.jni.HybridData
|
||||||
import com.mrousavy.camera.CameraQueues
|
import com.mrousavy.camera.CameraQueues
|
||||||
|
import com.mrousavy.camera.PixelFormatNotSupportedInVideoPipelineError
|
||||||
import com.mrousavy.camera.frameprocessor.Frame
|
import com.mrousavy.camera.frameprocessor.Frame
|
||||||
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
||||||
import com.mrousavy.camera.parsers.Orientation
|
import com.mrousavy.camera.parsers.Orientation
|
||||||
|
import com.mrousavy.camera.parsers.PixelFormat
|
||||||
import java.io.Closeable
|
import java.io.Closeable
|
||||||
|
|
||||||
@Suppress("JoinDeclarationAndAssignment")
|
/**
|
||||||
|
* An OpenGL pipeline for streaming Camera Frames to one or more outputs.
|
||||||
|
* Currently, [VideoPipeline] can stream to a [FrameProcessor] and a [MediaRecorder].
|
||||||
|
*
|
||||||
|
* @param [width] The width of the Frames to stream (> 0)
|
||||||
|
* @param [height] The height of the Frames to stream (> 0)
|
||||||
|
* @param [format] The format of the Frames to stream. ([ImageFormat.PRIVATE], [ImageFormat.YUV_420_888] or [ImageFormat.JPEG])
|
||||||
|
*/
|
||||||
|
@Suppress("KotlinJniMissingFunction")
|
||||||
class VideoPipeline(val width: Int, val height: Int, val format: Int = ImageFormat.PRIVATE, private val isMirrored: Boolean = false) :
|
class VideoPipeline(val width: Int, val height: Int, val format: Int = ImageFormat.PRIVATE, private val isMirrored: Boolean = false) :
|
||||||
ImageReader.OnImageAvailableListener,
|
SurfaceTexture.OnFrameAvailableListener,
|
||||||
Closeable {
|
Closeable {
|
||||||
companion object {
|
companion object {
|
||||||
private const val MAX_IMAGES = 3
|
private const val MAX_IMAGES = 3
|
||||||
private const val TAG = "VideoPipeline"
|
private const val TAG = "VideoPipeline"
|
||||||
}
|
|
||||||
|
|
||||||
// Output 1
|
init {
|
||||||
private var frameProcessor: FrameProcessor? = null
|
try {
|
||||||
|
System.loadLibrary("VisionCamera")
|
||||||
// Output 2
|
} catch (e: UnsatisfiedLinkError) {
|
||||||
private var recordingSession: RecordingSession? = null
|
Log.e(
|
||||||
private var recordingSessionImageWriter: ImageWriter? = null
|
TAG,
|
||||||
|
"Failed to load VisionCamera C++ library! " +
|
||||||
// Input
|
"OpenGL GPU VideoPipeline cannot be used.",
|
||||||
private val imageReader: ImageReader
|
e
|
||||||
val surface: Surface
|
)
|
||||||
|
throw e
|
||||||
init {
|
|
||||||
imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
|
|
||||||
imageReader.setOnImageAvailableListener(this, CameraQueues.videoQueue.handler)
|
|
||||||
surface = imageReader.surface
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
synchronized(this) {
|
|
||||||
imageReader.close()
|
|
||||||
frameProcessor = null
|
|
||||||
recordingSessionImageWriter?.close()
|
|
||||||
recordingSessionImageWriter = null
|
|
||||||
recordingSession = null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
|
|
||||||
this.frameProcessor = frameProcessor
|
|
||||||
}
|
|
||||||
|
|
||||||
fun setRecordingSessionOutput(recordingSession: RecordingSession?) {
|
|
||||||
synchronized(this) {
|
|
||||||
this.recordingSessionImageWriter?.close()
|
|
||||||
this.recordingSessionImageWriter = null
|
|
||||||
this.recordingSession = recordingSession
|
|
||||||
|
|
||||||
if (recordingSession != null) {
|
|
||||||
this.recordingSessionImageWriter = ImageWriter.newInstance(recordingSession.surface, MAX_IMAGES)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun onImageAvailable(reader: ImageReader) {
|
private val mHybridData: HybridData
|
||||||
val image = reader.acquireLatestImage()
|
private var openGLTextureId: Int? = null
|
||||||
if (image == null) {
|
private var transformMatrix = FloatArray(16)
|
||||||
Log.w(TAG, "ImageReader failed to acquire a new image!")
|
private var isActive = true
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we have a Frame Processor, call it
|
// Output 1
|
||||||
frameProcessor?.let { fp ->
|
private var frameProcessor: FrameProcessor? = null
|
||||||
|
private var imageReader: ImageReader? = null
|
||||||
|
|
||||||
|
// Output 2
|
||||||
|
private var recordingSession: RecordingSession? = null
|
||||||
|
|
||||||
|
// Input
|
||||||
|
private val surfaceTexture: SurfaceTexture
|
||||||
|
val surface: Surface
|
||||||
|
|
||||||
|
init {
|
||||||
|
Log.i(
|
||||||
|
TAG,
|
||||||
|
"Initializing $width x $height Video Pipeline " +
|
||||||
|
"(format: ${PixelFormat.fromImageFormat(format)} #$format)"
|
||||||
|
)
|
||||||
|
// TODO: We currently use OpenGL for the Video Pipeline.
|
||||||
|
// OpenGL only works in the RGB (RGBA_8888; 0x23) pixel-format, so we cannot
|
||||||
|
// override the pixel-format to something like YUV or PRIVATE.
|
||||||
|
// This absolutely sucks and I would prefer to replace the OpenGL pipeline with
|
||||||
|
// something similar to how iOS works where we just pass GPU buffers around,
|
||||||
|
// but android.media APIs are just not as advanced yet.
|
||||||
|
// For example, ImageReader/ImageWriter is way too buggy and does not work with MediaRecorder.
|
||||||
|
// See this issue ($4.000 bounty!) for more details:
|
||||||
|
// https://github.com/mrousavy/react-native-vision-camera/issues/1837
|
||||||
|
if (format != ImageFormat.PRIVATE && format != 0x23) {
|
||||||
|
throw PixelFormatNotSupportedInVideoPipelineError(PixelFormat.fromImageFormat(format).unionValue)
|
||||||
|
}
|
||||||
|
mHybridData = initHybrid(width, height)
|
||||||
|
surfaceTexture = SurfaceTexture(false)
|
||||||
|
surfaceTexture.setDefaultBufferSize(width, height)
|
||||||
|
surfaceTexture.setOnFrameAvailableListener(this)
|
||||||
|
surface = Surface(surfaceTexture)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
synchronized(this) {
|
||||||
|
isActive = false
|
||||||
|
imageReader?.close()
|
||||||
|
imageReader = null
|
||||||
|
frameProcessor = null
|
||||||
|
recordingSession = null
|
||||||
|
surfaceTexture.release()
|
||||||
|
mHybridData.resetNative()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onFrameAvailable(surfaceTexture: SurfaceTexture) {
|
||||||
|
synchronized(this) {
|
||||||
|
if (!isActive) return@synchronized
|
||||||
|
|
||||||
|
// 1. Attach Surface to OpenGL context
|
||||||
|
if (openGLTextureId == null) {
|
||||||
|
openGLTextureId = getInputTextureId()
|
||||||
|
surfaceTexture.attachToGLContext(openGLTextureId!!)
|
||||||
|
Log.i(TAG, "Attached Texture to Context $openGLTextureId")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Prepare the OpenGL context (eglMakeCurrent)
|
||||||
|
onBeforeFrame()
|
||||||
|
|
||||||
|
// 3. Update the OpenGL texture
|
||||||
|
surfaceTexture.updateTexImage()
|
||||||
|
|
||||||
|
// 4. Get the transform matrix from the SurfaceTexture (rotations/scales applied by Camera)
|
||||||
|
surfaceTexture.getTransformMatrix(transformMatrix)
|
||||||
|
|
||||||
|
// 5. Draw it with applied rotation/mirroring
|
||||||
|
onFrame(transformMatrix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getImageReader(): ImageReader {
|
||||||
|
if (format != ImageFormat.PRIVATE) {
|
||||||
|
Log.w(
|
||||||
|
TAG,
|
||||||
|
"Warning: pixelFormat \"${PixelFormat.fromImageFormat(format).unionValue}\" might " +
|
||||||
|
"not be supported on this device because the C++ OpenGL GPU Video Pipeline operates in RGBA_8888. " +
|
||||||
|
"I wanted to use an ImageReader -> ImageWriter setup for this, but I couldn't get it to work. " +
|
||||||
|
"See this PR for more details: https://github.com/mrousavy/react-native-vision-camera/pull/1836"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
val imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
|
||||||
|
imageReader.setOnImageAvailableListener({ reader ->
|
||||||
|
Log.i("VideoPipeline", "ImageReader::onImageAvailable!")
|
||||||
|
val image = reader.acquireNextImage() ?: return@setOnImageAvailableListener
|
||||||
|
|
||||||
|
// TODO: Get correct orientation and isMirrored
|
||||||
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, isMirrored)
|
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, isMirrored)
|
||||||
frame.incrementRefCount()
|
frame.incrementRefCount()
|
||||||
fp.call(frame)
|
frameProcessor?.call(frame)
|
||||||
frame.decrementRefCount()
|
frame.decrementRefCount()
|
||||||
}
|
}, CameraQueues.videoQueue.handler)
|
||||||
|
return imageReader
|
||||||
// If we have a RecordingSession, pass the image through
|
|
||||||
recordingSessionImageWriter?.queueInputImage(image)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures the Pipeline to also call the given [FrameProcessor] (or null).
|
||||||
|
*/
|
||||||
|
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
|
||||||
|
synchronized(this) {
|
||||||
|
Log.i(TAG, "Setting $width x $height FrameProcessor Output...")
|
||||||
|
this.frameProcessor = frameProcessor
|
||||||
|
|
||||||
|
if (frameProcessor != null) {
|
||||||
|
if (this.imageReader == null) {
|
||||||
|
// 1. Create new ImageReader that just calls the Frame Processor
|
||||||
|
this.imageReader = getImageReader()
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Configure OpenGL pipeline to stream Frames into the ImageReader's surface
|
||||||
|
setFrameProcessorOutputSurface(imageReader!!.surface)
|
||||||
|
} else {
|
||||||
|
// 1. Configure OpenGL pipeline to stop streaming Frames into the ImageReader's surface
|
||||||
|
removeFrameProcessorOutputSurface()
|
||||||
|
|
||||||
|
// 2. Close the ImageReader
|
||||||
|
this.imageReader?.close()
|
||||||
|
this.imageReader = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures the Pipeline to also write Frames to a Surface from a [MediaRecorder] (or null)
|
||||||
|
*/
|
||||||
|
fun setRecordingSessionOutput(recordingSession: RecordingSession?) {
|
||||||
|
synchronized(this) {
|
||||||
|
Log.i(TAG, "Setting $width x $height RecordingSession Output...")
|
||||||
|
if (recordingSession != null) {
|
||||||
|
// Configure OpenGL pipeline to stream Frames into the Recording Session's surface
|
||||||
|
setRecordingSessionOutputSurface(recordingSession.surface)
|
||||||
|
this.recordingSession = recordingSession
|
||||||
|
} else {
|
||||||
|
// Configure OpenGL pipeline to stop streaming Frames into the Recording Session's surface
|
||||||
|
removeRecordingSessionOutputSurface()
|
||||||
|
this.recordingSession = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private external fun getInputTextureId(): Int
|
||||||
|
private external fun onBeforeFrame()
|
||||||
|
private external fun onFrame(transformMatrix: FloatArray)
|
||||||
|
private external fun setFrameProcessorOutputSurface(surface: Any)
|
||||||
|
private external fun removeFrameProcessorOutputSurface()
|
||||||
|
private external fun setRecordingSessionOutputSurface(surface: Any)
|
||||||
|
private external fun removeRecordingSessionOutputSurface()
|
||||||
|
private external fun initHybrid(width: Int, height: Int): HybridData
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user