chore: Move everything into package/ (#1745)

* Move everything into package

* Remove .DS_Store

* Move scripts and eslintrc to package

* Create CODE_OF_CONDUCT.md

* fix some links

* Update all links (I think)

* Update generated docs

* Update notice-yarn-changes.yml

* Update validate-android.yml

* Update validate-cpp.yml

* Delete notice-yarn-changes.yml

* Update validate-cpp.yml

* Update validate-cpp.yml

* Update validate-js.yml

* Update validate-cpp.yml

* Update validate-cpp.yml

* wrong c++ style

* Revert "wrong c++ style"

This reverts commit 55a3575589c6f13f8b05134d83384f55e0601ab2.
This commit is contained in:
Marc Rousavy
2023-09-01 18:15:28 +02:00
committed by GitHub
parent 2a5c33323b
commit 036856aed5
347 changed files with 3088 additions and 154 deletions

View File

@@ -0,0 +1,163 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#include "OpenGLContext.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/log.h>
#include <android/native_window.h>
#include "OpenGLError.h"
namespace vision {
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface() {
return std::unique_ptr<OpenGLContext>(new OpenGLContext());
}
OpenGLContext::~OpenGLContext() {
destroy();
}
void OpenGLContext::destroy() {
if (display != EGL_NO_DISPLAY) {
eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
if (offscreenSurface != EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
eglDestroySurface(display, offscreenSurface);
offscreenSurface = EGL_NO_SURFACE;
}
if (context != EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Context...");
eglDestroyContext(display, context);
context = EGL_NO_CONTEXT;
}
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Display...");
eglTerminate(display);
display = EGL_NO_DISPLAY;
config = nullptr;
}
}
void OpenGLContext::ensureOpenGL() {
bool successful;
// EGLDisplay
if (display == EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLDisplay..");
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (display == EGL_NO_DISPLAY)
throw OpenGLError("Failed to get default OpenGL Display!");
EGLint major;
EGLint minor;
successful = eglInitialize(display, &major, &minor);
if (!successful)
throw OpenGLError("Failed to initialize OpenGL!");
}
// EGLConfig
if (config == nullptr) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
EGLint attributes[] = {EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE,
EGL_WINDOW_BIT,
EGL_RED_SIZE,
8,
EGL_GREEN_SIZE,
8,
EGL_BLUE_SIZE,
8,
EGL_ALPHA_SIZE,
8,
EGL_DEPTH_SIZE,
0,
EGL_STENCIL_SIZE,
0,
EGL_NONE};
EGLint numConfigs;
successful = eglChooseConfig(display, attributes, &config, 1, &numConfigs);
if (!successful || numConfigs == 0)
throw OpenGLError("Failed to choose OpenGL config!");
}
// EGLContext
if (context == EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLContext..");
EGLint contextAttributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
context = eglCreateContext(display, config, nullptr, contextAttributes);
if (context == EGL_NO_CONTEXT)
throw OpenGLError("Failed to create OpenGL context!");
}
// EGLSurface
if (offscreenSurface == EGL_NO_SURFACE) {
// If we don't have a surface at all
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing 1x1 offscreen pbuffer EGLSurface..");
EGLint attributes[] = {EGL_WIDTH, 1, EGL_HEIGHT, 1, EGL_NONE};
offscreenSurface = eglCreatePbufferSurface(display, config, attributes);
if (offscreenSurface == EGL_NO_SURFACE)
throw OpenGLError("Failed to create OpenGL Surface!");
}
}
void OpenGLContext::use() {
this->use(offscreenSurface);
}
void OpenGLContext::use(EGLSurface surface) {
if (surface == EGL_NO_SURFACE)
throw OpenGLError("Cannot render to a null Surface!");
// 1. Make sure the OpenGL context is initialized
this->ensureOpenGL();
// 2. Make the OpenGL context current
bool successful = eglMakeCurrent(display, surface, surface, context);
if (!successful || eglGetError() != EGL_SUCCESS)
throw OpenGLError("Failed to use current OpenGL context!");
// 3. Caller can now render to this surface
}
void OpenGLContext::flush() const {
bool successful = eglSwapBuffers(display, eglGetCurrentSurface(EGL_DRAW));
if (!successful || eglGetError() != EGL_SUCCESS)
throw OpenGLError("Failed to swap OpenGL buffers!");
}
OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width, int height) {
// 1. Make sure the OpenGL context is initialized
this->ensureOpenGL();
// 2. Make the OpenGL context current
bool successful = eglMakeCurrent(display, offscreenSurface, offscreenSurface, context);
if (!successful || eglGetError() != EGL_SUCCESS)
throw OpenGLError("Failed to use current OpenGL context!");
GLuint textureId;
glGenTextures(1, &textureId);
GLenum target;
switch (type) {
case OpenGLTexture::Type::ExternalOES:
target = GL_TEXTURE_EXTERNAL_OES;
break;
case OpenGLTexture::Type::Texture2D:
target = GL_TEXTURE_2D;
break;
default:
throw std::runtime_error("Invalid OpenGL Texture Type!");
}
glBindTexture(target, textureId);
glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
return {.id = textureId, .target = target, .width = width, .height = height};
}
} // namespace vision

View File

@@ -0,0 +1,73 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <functional>
#include <memory>
#include "OpenGLTexture.h"
#include "PassThroughShader.h"
namespace vision {
/**
* An OpenGL Context that can be used to render to different surfaces.
* By default, it creates an off-screen PixelBuffer surface.
*/
class OpenGLContext {
public:
/**
* Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*/
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface();
/**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/
~OpenGLContext();
/**
* Use this OpenGL Context to render to the given EGLSurface.
* After the `renderFunc` returns, the default offscreen PixelBuffer surface becomes active again.
*/
void use(EGLSurface surface);
/**
* Use this OpenGL Context to render to the offscreen PixelBuffer surface.
*/
void use();
/**
* Flushes all drawing operations by swapping the buffers and submitting the Frame to the GPU
*/
void flush() const;
/**
* Create a new texture on this context
*/
OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
public:
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
EGLSurface offscreenSurface = EGL_NO_SURFACE;
EGLConfig config = nullptr;
private:
OpenGLContext() = default;
void destroy();
void ensureOpenGL();
private:
PassThroughShader _passThroughShader;
private:
static constexpr auto TAG = "OpenGLContext";
};
} // namespace vision

View File

@@ -0,0 +1,35 @@
//
// Created by Marc Rousavy on 09.08.23.
//
#pragma once
#include <GLES2/gl2.h>
#include <stdexcept>
#include <string>
namespace vision {
inline std::string getEglErrorIfAny() {
EGLint error = glGetError();
if (error != GL_NO_ERROR)
return " Error: " + std::to_string(error);
error = eglGetError();
if (error != EGL_SUCCESS)
return " Error: " + std::to_string(error);
return "";
}
class OpenGLError : public std::runtime_error {
public:
explicit OpenGLError(const std::string&& message)
: std::runtime_error(message + getEglErrorIfAny()) {}
static inline void checkIfError(const std::string&& message) {
auto error = getEglErrorIfAny();
if (error.length() > 0)
throw std::runtime_error(message + error);
}
};
} // namespace vision

View File

@@ -0,0 +1,76 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#include "OpenGLRenderer.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/log.h>
#include <android/native_window.h>
#include <utility>
#include "OpenGLError.h"
namespace vision {
std::unique_ptr<OpenGLRenderer>
OpenGLRenderer::CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context,
ANativeWindow* surface) {
return std::unique_ptr<OpenGLRenderer>(new OpenGLRenderer(std::move(context), surface));
}
OpenGLRenderer::OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface) {
_context = std::move(context);
_outputSurface = surface;
_width = ANativeWindow_getWidth(surface);
_height = ANativeWindow_getHeight(surface);
}
OpenGLRenderer::~OpenGLRenderer() {
if (_outputSurface != nullptr) {
ANativeWindow_release(_outputSurface);
}
destroy();
}
void OpenGLRenderer::destroy() {
if (_context != nullptr && _surface != EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
eglDestroySurface(_context->display, _surface);
_surface = EGL_NO_SURFACE;
}
}
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
if (_surface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
_context->use();
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
}
// 1. Activate the OpenGL context for this surface
_context->use(_surface);
// 2. Set the viewport for rendering
glViewport(0, 0, _width, _height);
glDisable(GL_BLEND);
// 3. Bind the input texture
glBindTexture(texture.target, texture.id);
glTexParameteri(texture.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(texture.target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(texture.target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 4. Draw it using the pass-through shader which also applies transforms
_passThroughShader.draw(texture, transformMatrix);
// 5. Swap buffers to pass it to the window surface
eglSwapBuffers(_context->display, _surface);
}
} // namespace vision

View File

@@ -0,0 +1,62 @@
//
// Created by Marc Rousavy on 29.08.23.
//
#pragma once
#include "PassThroughShader.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <android/native_window.h>
#include <memory>
#include "OpenGLContext.h"
#include "OpenGLTexture.h"
namespace vision {
class OpenGLRenderer {
public:
/**
* Create a new instance of the OpenGLRenderer that draws to an on-screen window surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*
* Note: The `surface` is considered moved, and the OpenGL context will release it when it is
* being deleted.
*/
static std::unique_ptr<OpenGLRenderer>
CreateWithWindowSurface(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
/**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/
~OpenGLRenderer();
/**
* Renders the given Texture to the Surface
*/
void renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix);
/**
* Destroys the OpenGL context. This needs to be called on the same thread that `use()` was
* called. After calling `destroy()`, it is legal to call `use()` again, which will re-construct
* everything.
*/
void destroy();
private:
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
private:
int _width = 0, _height = 0;
std::shared_ptr<OpenGLContext> _context;
ANativeWindow* _outputSurface;
EGLSurface _surface = EGL_NO_SURFACE;
private:
PassThroughShader _passThroughShader;
private:
static constexpr auto TAG = "OpenGLRenderer";
};
} // namespace vision

View File

@@ -0,0 +1,22 @@
//
// Created by Marc Rousavy on 30.08.23.
//
#pragma once
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdexcept>
struct OpenGLTexture {
enum Type { Texture2D, ExternalOES };
// The ID of the texture as returned in glGenTextures(..)
GLuint id;
// GL_TEXTURE_2D or GL_TEXTURE_EXTERNAL_OES
GLenum target;
// Width and height of the texture
int width = 0;
int height = 0;
};

View File

@@ -0,0 +1,111 @@
//
// Created by Marc Rousavy on 28.08.23.
//
#include "PassThroughShader.h"
#include "OpenGLError.h"
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <memory>
#include <string>
namespace vision {
PassThroughShader::~PassThroughShader() {
if (_programId != NO_SHADER) {
glDeleteProgram(_programId);
_programId = NO_SHADER;
}
if (_vertexBuffer != NO_BUFFER) {
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = NO_BUFFER;
}
}
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
// 1. Set up Shader Program
if (_programId == NO_SHADER) {
_programId = createProgram();
glUseProgram(_programId);
_vertexParameters = {
.aPosition = glGetAttribLocation(_programId, "aPosition"),
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
.uTransformMatrix = glGetUniformLocation(_programId, "uTransformMatrix"),
};
_fragmentParameters = {
.uTexture = glGetUniformLocation(_programId, "uTexture"),
};
}
glUseProgram(_programId);
// 2. Set up Vertices Buffer
if (_vertexBuffer == NO_BUFFER) {
glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
}
// 3. Pass all uniforms/attributes for vertex shader
glEnableVertexAttribArray(_vertexParameters.aPosition);
glVertexAttribPointer(_vertexParameters.aPosition, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, position)));
glEnableVertexAttribArray(_vertexParameters.aTexCoord);
glVertexAttribPointer(_vertexParameters.aTexCoord, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<void*>(offsetof(Vertex, texCoord)));
glUniformMatrix4fv(_vertexParameters.uTransformMatrix, 1, GL_FALSE, transformMatrix);
// 4. Pass texture to fragment shader
glActiveTexture(GL_TEXTURE0);
glBindTexture(texture.target, texture.id);
glUniform1i(_fragmentParameters.uTexture, 0);
// 5. Draw!
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode) {
GLuint shader = glCreateShader(shaderType);
if (shader == 0)
throw OpenGLError("Failed to load shader!");
glShaderSource(shader, 1, &shaderCode, nullptr);
glCompileShader(shader);
GLint compileStatus = GL_FALSE;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
glDeleteShader(shader);
throw OpenGLError("Failed to compile shader!");
}
return shader;
}
GLuint PassThroughShader::createProgram() {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
GLuint program = glCreateProgram();
if (program == 0)
throw OpenGLError("Failed to create pass-through program!");
glAttachShader(program, vertexShader);
OpenGLError::checkIfError("Failed to attach Vertex Shader!");
glAttachShader(program, fragmentShader);
OpenGLError::checkIfError("Failed to attach Fragment Shader!");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (!linkStatus) {
glDeleteProgram(program);
throw OpenGLError("Failed to load pass-through program!");
}
return program;
}
} // namespace vision

View File

@@ -0,0 +1,84 @@
//
// Created by Marc Rousavy on 28.08.23.
//
#pragma once
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include "OpenGLTexture.h"
namespace vision {
#define NO_SHADER 0
#define NO_POSITION 0
#define NO_BUFFER 0
struct Vertex {
GLfloat position[2];
GLfloat texCoord[2];
};
class PassThroughShader {
public:
PassThroughShader() = default;
~PassThroughShader();
/**
* Draw the texture using this shader.
* Note: At the moment, only EXTERNAL textures are supported by the Shader.
*/
void draw(const OpenGLTexture& texture, float* transformMatrix);
private:
// Loading
static GLuint loadShader(GLenum shaderType, const char* shaderCode);
static GLuint createProgram();
private:
// Parameters
GLuint _programId = NO_SHADER;
GLuint _vertexBuffer = NO_BUFFER;
struct VertexParameters {
GLint aPosition = NO_POSITION;
GLint aTexCoord = NO_POSITION;
GLint uTransformMatrix = NO_POSITION;
} _vertexParameters;
struct FragmentParameters {
GLint uTexture = NO_POSITION;
} _fragmentParameters;
private:
// Statics
static constexpr Vertex VERTICES[] = {
{{-1.0f, -1.0f}, {0.0f, 0.0f}}, // bottom-left
{{1.0f, -1.0f}, {1.0f, 0.0f}}, // bottom-right
{{-1.0f, 1.0f}, {0.0f, 1.0f}}, // top-left
{{1.0f, 1.0f}, {1.0f, 1.0f}} // top-right
};
static constexpr char VERTEX_SHADER[] = R"(
attribute vec4 aPosition;
attribute vec2 aTexCoord;
uniform mat4 uTransformMatrix;
varying vec2 vTexCoord;
void main() {
gl_Position = aPosition;
vTexCoord = (uTransformMatrix * vec4(aTexCoord, 0.0, 1.0)).xy;
}
)";
static constexpr char FRAGMENT_SHADER[] = R"(
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTexCoord;
uniform samplerExternalOES uTexture;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoord);
}
)";
};
} // namespace vision

View File

@@ -0,0 +1,125 @@
//
// Created by Marc Rousavy on 25.08.23.
//
#include "VideoPipeline.h"
#include "OpenGLError.h"
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES/gl.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <android/native_window_jni.h>
#include <chrono>
#include "JFrameProcessor.h"
#include "OpenGLTexture.h"
namespace vision {
jni::local_ref<VideoPipeline::jhybriddata>
VideoPipeline::initHybrid(jni::alias_ref<jhybridobject> jThis, int width, int height) {
return makeCxxInstance(jThis, width, height);
}
VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height)
: _javaPart(jni::make_global(jThis)) {
_width = width;
_height = height;
_context = OpenGLContext::CreateWithOffscreenSurface();
}
VideoPipeline::~VideoPipeline() {
// 1. Remove output surfaces
removeFrameProcessorOutputSurface();
removeRecordingSessionOutputSurface();
// 2. Delete the input textures
if (_inputTexture != std::nullopt) {
glDeleteTextures(1, &_inputTexture->id);
_inputTexture = std::nullopt;
}
// 3. Destroy the OpenGL context
_context = nullptr;
}
void VideoPipeline::removeFrameProcessorOutputSurface() {
if (_frameProcessorOutput)
_frameProcessorOutput->destroy();
_frameProcessorOutput = nullptr;
}
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
// 1. Delete existing output surface
removeFrameProcessorOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
void VideoPipeline::removeRecordingSessionOutputSurface() {
if (_recordingSessionOutput)
_recordingSessionOutput->destroy();
_recordingSessionOutput = nullptr;
}
void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
// 1. Delete existing output surface
removeRecordingSessionOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
int VideoPipeline::getInputTextureId() {
if (_inputTexture == std::nullopt) {
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
}
return static_cast<int>(_inputTexture->id);
}
void VideoPipeline::onBeforeFrame() {
_context->use();
glBindTexture(_inputTexture->target, _inputTexture->id);
}
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
// Get the OpenGL transform Matrix (transforms, scales, rotations)
float transformMatrix[16];
transformMatrixParam->getRegion(0, 16, transformMatrix);
OpenGLTexture& texture = _inputTexture.value();
if (_frameProcessorOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
_frameProcessorOutput->renderTextureToSurface(texture, transformMatrix);
}
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
}
}
void VideoPipeline::registerNatives() {
registerHybrid({
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("setFrameProcessorOutputSurface",
VideoPipeline::setFrameProcessorOutputSurface),
makeNativeMethod("removeFrameProcessorOutputSurface",
VideoPipeline::removeFrameProcessorOutputSurface),
makeNativeMethod("setRecordingSessionOutputSurface",
VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface",
VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame),
});
}
} // namespace vision

View File

@@ -0,0 +1,67 @@
//
// Created by Marc Rousavy on 25.08.23.
//
#pragma once
#include "OpenGLContext.h"
#include "OpenGLRenderer.h"
#include "PassThroughShader.h"
#include <EGL/egl.h>
#include <android/native_window.h>
#include <fbjni/fbjni.h>
#include <jni.h>
#include <memory>
#include <optional>
namespace vision {
using namespace facebook;
class VideoPipeline : public jni::HybridClass<VideoPipeline> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/core/VideoPipeline;";
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> jThis, int width,
int height);
static void registerNatives();
public:
~VideoPipeline();
// -> SurfaceTexture input
int getInputTextureId();
// <- Frame Processor output
void setFrameProcessorOutputSurface(jobject surface);
void removeFrameProcessorOutputSurface();
// <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface);
void removeRecordingSessionOutputSurface();
// Frame callbacks
void onBeforeFrame();
void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix);
private:
// Private constructor. Use `create(..)` to create new instances.
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
private:
// Input Surface Texture
std::optional<OpenGLTexture> _inputTexture = std::nullopt;
int _width = 0;
int _height = 0;
// Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr;
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
private:
friend HybridBase;
jni::global_ref<javaobject> _javaPart;
static constexpr auto TAG = "VideoPipeline";
};
} // namespace vision

View File

@@ -0,0 +1,19 @@
#include "JFrameProcessor.h"
#include "JVisionCameraProxy.h"
#include "JVisionCameraScheduler.h"
#include "VideoPipeline.h"
#include "VisionCameraProxy.h"
#include <fbjni/fbjni.h>
#include <jni.h>
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void*) {
return facebook::jni::initialize(vm, [] {
vision::VisionCameraInstaller::registerNatives();
vision::JVisionCameraProxy::registerNatives();
vision::JVisionCameraScheduler::registerNatives();
vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives();
#endif
});
}

View File

@@ -0,0 +1,158 @@
//
// Created by Marc on 19/06/2021.
//
#include "FrameHostObject.h"
#include <fbjni/fbjni.h>
#include <jni.h>
#include "JSITypedArray.h"
#include <string>
#include <vector>
namespace vision {
using namespace facebook;
FrameHostObject::FrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame)
: frame(make_global(frame)) {}
FrameHostObject::~FrameHostObject() {
// Hermes' Garbage Collector (Hades GC) calls destructors on a separate Thread
// which might not be attached to JNI. Ensure that we use the JNI class loader when
// deallocating the `frame` HybridClass, because otherwise JNI cannot call the Java
// destroy() function.
jni::ThreadScope::WithClassLoader([&] { frame.reset(); });
}
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
// Ref Management
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
return result;
}
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "incrementRefCount") {
jsi::HostFunctionType incrementRefCount = [=](jsi::Runtime& runtime, const jsi::Value& thisArg,
const jsi::Value* args,
size_t count) -> jsi::Value {
// Increment retain count by one.
this->frame->incrementRefCount();
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "incrementRefCount"), 0, incrementRefCount);
}
if (name == "decrementRefCount") {
auto decrementRefCount = [=](jsi::Runtime& runtime, const jsi::Value& thisArg,
const jsi::Value* args, size_t count) -> jsi::Value {
// Decrement retain count by one. If the retain count is zero, the Frame gets closed.
this->frame->decrementRefCount();
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "decrementRefCount"), 0, decrementRefCount);
}
if (name == "toString") {
jsi::HostFunctionType toString = [=](jsi::Runtime& runtime, const jsi::Value& thisArg,
const jsi::Value* args, size_t count) -> jsi::Value {
if (!this->frame) {
return jsi::String::createFromUtf8(runtime, "[closed frame]");
}
auto width = this->frame->getWidth();
auto height = this->frame->getHeight();
auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame";
return jsi::String::createFromUtf8(runtime, str);
};
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
}
if (name == "toArrayBuffer") {
jsi::HostFunctionType toArrayBuffer = [=](jsi::Runtime& runtime, const jsi::Value& thisArg,
const jsi::Value* args, size_t count) -> jsi::Value {
auto buffer = this->frame->toByteBuffer();
if (!buffer->isDirect()) {
throw std::runtime_error(
"Failed to get byte content of Frame - array is not direct ByteBuffer!");
}
auto size = buffer->getDirectSize();
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime, size);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
// Get from global JS cache
auto arrayBufferCache =
runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache)
.get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
if (arrayBuffer.size(runtime) != size) {
arrayBuffer = vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, size);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
// directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime);
memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size);
return arrayBuffer;
};
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
if (name == "isValid") {
return jsi::Value(this->frame && this->frame->getIsValid());
}
if (name == "width") {
return jsi::Value(this->frame->getWidth());
}
if (name == "height") {
return jsi::Value(this->frame->getHeight());
}
if (name == "isMirrored") {
return jsi::Value(this->frame->getIsMirrored());
}
if (name == "orientation") {
auto string = this->frame->getOrientation();
return jsi::String::createFromUtf8(runtime, string->toStdString());
}
if (name == "pixelFormat") {
auto string = this->frame->getPixelFormat();
return jsi::String::createFromUtf8(runtime, string->toStdString());
}
if (name == "timestamp") {
return jsi::Value(static_cast<double>(this->frame->getTimestamp()));
}
if (name == "bytesPerRow") {
return jsi::Value(this->frame->getBytesPerRow());
}
if (name == "planesCount") {
return jsi::Value(this->frame->getPlanesCount());
}
// fallback to base implementation
return HostObject::get(runtime, propName);
}
} // namespace vision

View File

@@ -0,0 +1,32 @@
//
// Created by Marc on 19/06/2021.
//
#pragma once
#include <fbjni/fbjni.h>
#include <jni.h>
#include <jsi/jsi.h>
#include <string>
#include <vector>
#include "JFrame.h"
namespace vision {
using namespace facebook;
class JSI_EXPORT FrameHostObject : public jsi::HostObject {
public:
explicit FrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame);
~FrameHostObject();
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
public:
jni::global_ref<JFrame> frame;
};
} // namespace vision

View File

@@ -0,0 +1,54 @@
//
// Created by Marc Rousavy on 21.07.23.
//
#include "FrameProcessorPluginHostObject.h"
#include "FrameHostObject.h"
#include "JSIJNIConversion.h"
#include <string>
#include <vector>
namespace vision {
using namespace facebook;
std::vector<jsi::PropNameID>
FrameProcessorPluginHostObject::getPropertyNames(jsi::Runtime& runtime) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("call")));
return result;
}
jsi::Value FrameProcessorPluginHostObject::get(jsi::Runtime& runtime,
const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "call") {
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "call"), 2,
[=](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
size_t count) -> jsi::Value {
// Frame is first argument
auto frameHostObject =
arguments[0].asObject(runtime).asHostObject<FrameHostObject>(runtime);
auto frame = frameHostObject->frame;
// Options are second argument (possibly undefined)
local_ref<JMap<jstring, jobject>> options = nullptr;
if (count > 1) {
options =
JSIJNIConversion::convertJSIObjectToJNIMap(runtime, arguments[1].asObject(runtime));
}
// Call actual plugin
auto result = _plugin->callback(frame, options);
// Convert result value to jsi::Value (possibly undefined)
return JSIJNIConversion::convertJNIObjectToJSIValue(runtime, result);
});
}
return jsi::Value::undefined();
}
} // namespace vision

View File

@@ -0,0 +1,31 @@
//
// Created by Marc Rousavy on 21.07.23.
//
#pragma once
#include "JFrameProcessorPlugin.h"
#include <fbjni/fbjni.h>
#include <jsi/jsi.h>
#include <memory>
#include <vector>
namespace vision {
using namespace facebook;
class FrameProcessorPluginHostObject : public jsi::HostObject {
public:
explicit FrameProcessorPluginHostObject(jni::alias_ref<JFrameProcessorPlugin::javaobject> plugin)
: _plugin(make_global(plugin)) {}
~FrameProcessorPluginHostObject() {}
public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
private:
jni::global_ref<JFrameProcessorPlugin::javaobject> _plugin;
};
} // namespace vision

View File

@@ -0,0 +1,150 @@
//
// Created by Marc Rousavy on 22.06.21.
//
#include "JSIJNIConversion.h"
#include <android/log.h>
#include <fbjni/fbjni.h>
#include <jni.h>
#include <jsi/jsi.h>
#include <memory>
#include <string>
#include <utility>
#include "FrameHostObject.h"
#include "JFrame.h"
namespace vision {
using namespace facebook;
jni::local_ref<jni::JMap<jstring, jobject>>
JSIJNIConversion::convertJSIObjectToJNIMap(jsi::Runtime& runtime, const jsi::Object& object) {
auto propertyNames = object.getPropertyNames(runtime);
auto size = propertyNames.size(runtime);
auto hashMap = jni::JHashMap<jstring, jobject>::create();
for (size_t i = 0; i < size; i++) {
auto propName = propertyNames.getValueAtIndex(runtime, i).asString(runtime);
auto key = jni::make_jstring(propName.utf8(runtime));
auto value = object.getProperty(runtime, propName);
if (value.isNull() || value.isUndefined()) {
// null
hashMap->put(key, nullptr);
} else if (value.isBool()) {
// Boolean
auto boolean = value.getBool();
hashMap->put(key, jni::JBoolean::valueOf(boolean));
} else if (value.isNumber()) {
// Double
auto number = value.getNumber();
hashMap->put(key, jni::JDouble::valueOf(number));
} else if (value.isString()) {
// String
auto str = value.getString(runtime).utf8(runtime);
hashMap->put(key, jni::make_jstring(str));
} else if (value.isObject()) {
// Object
auto valueAsObject = value.getObject(runtime);
if (valueAsObject.isArray(runtime)) {
// List<Object>
} else if (valueAsObject.isHostObject(runtime)) {
throw std::runtime_error("You can't pass HostObjects here.");
} else {
// Map<String, Object>
auto map = convertJSIObjectToJNIMap(runtime, valueAsObject);
hashMap->put(key, map);
}
} else {
auto stringRepresentation = value.toString(runtime).utf8(runtime);
throw std::runtime_error("Failed to convert jsi::Value to JNI value - unsupported type!" +
stringRepresentation);
}
}
return hashMap;
}
jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime& runtime,
const jni::local_ref<jobject>& object) {
if (object == nullptr) {
// null
return jsi::Value::undefined();
} else if (object->isInstanceOf(jni::JBoolean::javaClassStatic())) {
// Boolean
static const auto getBooleanFunc =
jni::findClassLocal("java/lang/Boolean")->getMethod<jboolean()>("booleanValue");
auto boolean = getBooleanFunc(object.get());
return jsi::Value(boolean == true);
} else if (object->isInstanceOf(jni::JDouble::javaClassStatic())) {
// Double
static const auto getDoubleFunc =
jni::findClassLocal("java/lang/Double")->getMethod<jdouble()>("doubleValue");
auto d = getDoubleFunc(object.get());
return jsi::Value(d);
} else if (object->isInstanceOf(jni::JInteger::javaClassStatic())) {
// Integer
static const auto getIntegerFunc =
jni::findClassLocal("java/lang/Integer")->getMethod<jint()>("intValue");
auto i = getIntegerFunc(object.get());
return jsi::Value(i);
} else if (object->isInstanceOf(jni::JString::javaClassStatic())) {
// String
return jsi::String::createFromUtf8(runtime, object->toString());
} else if (object->isInstanceOf(JList<jobject>::javaClassStatic())) {
// List<E>
auto arrayList = static_ref_cast<JList<jobject>>(object);
auto size = arrayList->size();
auto result = jsi::Array(runtime, size);
size_t i = 0;
for (const auto& item : *arrayList) {
result.setValueAtIndex(runtime, i, convertJNIObjectToJSIValue(runtime, item));
i++;
}
return result;
} else if (object->isInstanceOf(JMap<jstring, jobject>::javaClassStatic())) {
// Map<K, V>
auto map = static_ref_cast<JMap<jstring, jobject>>(object);
auto result = jsi::Object(runtime);
for (const auto& entry : *map) {
auto key = entry.first->toString();
auto value = entry.second;
auto jsiValue = convertJNIObjectToJSIValue(runtime, value);
result.setProperty(runtime, key.c_str(), jsiValue);
}
return result;
} else if (object->isInstanceOf(JFrame::javaClassStatic())) {
// Frame
auto frame = static_ref_cast<JFrame>(object);
// box into HostObject
auto hostObject = std::make_shared<FrameHostObject>(frame);
return jsi::Object::createFromHostObject(runtime, hostObject);
}
auto type = object->getClass()->toString();
auto message = "Received unknown JNI type \"" + type + "\"! Cannot convert to jsi::Value.";
__android_log_write(ANDROID_LOG_ERROR, "VisionCamera", message.c_str());
throw std::runtime_error(message);
}
} // namespace vision

View File

@@ -0,0 +1,25 @@
//
// Created by Marc Rousavy on 22.06.21.
//
#pragma once
#include <fbjni/fbjni.h>
#include <jni.h>
#include <jsi/jsi.h>
namespace vision {
namespace JSIJNIConversion {
using namespace facebook;
jni::local_ref<jni::JMap<jstring, jobject>> convertJSIObjectToJNIMap(jsi::Runtime& runtime,
const jsi::Object& object);
jsi::Value convertJNIObjectToJSIValue(jsi::Runtime& runtime,
const jni::local_ref<jobject>& object);
} // namespace JSIJNIConversion
} // namespace vision

View File

@@ -0,0 +1,116 @@
//
// Created by Marc Rousavy on 21.07.23.
//
#include "VisionCameraProxy.h"
#include <jsi/jsi.h>
#include "JFrameProcessor.h"
#include "JFrameProcessorPlugin.h"
#include "JSIJNIConversion.h"
#include <android/log.h>
#include <fbjni/fbjni.h>
#include "FrameProcessorPluginHostObject.h"
#include "JSITypedArray.h"
#include <memory>
#include <string>
#include <vector>
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#include <react-native-worklets-core/WKTJsiWorkletContext.h>
#endif
namespace vision {
using namespace facebook;
VisionCameraProxy::VisionCameraProxy(
const jni::alias_ref<JVisionCameraProxy::javaobject>& javaProxy) {
_javaProxy = make_global(javaProxy);
}
VisionCameraProxy::~VisionCameraProxy() {}
std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& runtime) {
std::vector<jsi::PropNameID> result;
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
return result;
}
void VisionCameraProxy::setFrameProcessor(int viewTag, jsi::Runtime& runtime,
const jsi::Object& object) {
_javaProxy->cthis()->setFrameProcessor(viewTag, runtime, object);
}
void VisionCameraProxy::removeFrameProcessor(int viewTag) {
_javaProxy->cthis()->removeFrameProcessor(viewTag);
}
jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime,
const std::string& name,
const jsi::Object& jsOptions) {
auto options = JSIJNIConversion::convertJSIObjectToJNIMap(runtime, jsOptions);
auto plugin = _javaProxy->cthis()->getFrameProcessorPlugin(name, options);
auto pluginHostObject = std::make_shared<FrameProcessorPluginHostObject>(plugin);
return jsi::Object::createFromHostObject(runtime, pluginHostObject);
}
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "setFrameProcessor") {
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"), 1,
[this](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto viewTag = arguments[0].asNumber();
auto object = arguments[1].asObject(runtime);
this->setFrameProcessor(static_cast<int>(viewTag), runtime, object);
return jsi::Value::undefined();
});
}
if (name == "removeFrameProcessor") {
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "removeFrameProcessor"), 1,
[this](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
size_t count) -> jsi::Value {
auto viewTag = arguments[0].asNumber();
this->removeFrameProcessor(static_cast<int>(viewTag));
return jsi::Value::undefined();
});
}
if (name == "getFrameProcessorPlugin") {
return jsi::Function::createFromHostFunction(
runtime, jsi::PropNameID::forUtf8(runtime, "getFrameProcessorPlugin"), 1,
[this](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
size_t count) -> jsi::Value {
if (count < 1 || !arguments[0].isString()) {
throw jsi::JSError(runtime, "First argument needs to be a string (pluginName)!");
}
auto pluginName = arguments[0].asString(runtime).utf8(runtime);
auto options = count > 1 ? arguments[1].asObject(runtime) : jsi::Object(runtime);
return this->getFrameProcessorPlugin(runtime, pluginName, options);
});
}
return jsi::Value::undefined();
}
void VisionCameraInstaller::install(jni::alias_ref<jni::JClass>,
jni::alias_ref<JVisionCameraProxy::javaobject> proxy) {
// global.VisionCameraProxy
auto visionCameraProxy = std::make_shared<VisionCameraProxy>(proxy);
jsi::Runtime& runtime = *proxy->cthis()->getJSRuntime();
runtime.global().setProperty(runtime, "VisionCameraProxy",
jsi::Object::createFromHostObject(runtime, visionCameraProxy));
}
} // namespace vision

View File

@@ -0,0 +1,51 @@
//
// Created by Marc Rousavy on 21.07.23.
//
#pragma once
#include <jsi/jsi.h>
#include "JVisionCameraProxy.h"
#include "JVisionCameraScheduler.h"
#include <string>
#include <vector>
namespace vision {
using namespace facebook;
class VisionCameraProxy : public jsi::HostObject {
public:
explicit VisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::javaobject>& javaProxy);
~VisionCameraProxy();
public:
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
private:
void setFrameProcessor(int viewTag, jsi::Runtime& runtime, const jsi::Object& frameProcessor);
void removeFrameProcessor(int viewTag);
jsi::Value getFrameProcessorPlugin(jsi::Runtime& runtime, const std::string& name,
const jsi::Object& options);
private:
jni::global_ref<JVisionCameraProxy::javaobject> _javaProxy;
static constexpr const char* TAG = "VisionCameraProxy";
};
class VisionCameraInstaller : public jni::JavaClass<VisionCameraInstaller> {
public:
static auto constexpr kJavaDescriptor =
"Lcom/mrousavy/camera/frameprocessor/VisionCameraInstaller;";
static void registerNatives() {
javaClassStatic()->registerNatives(
{makeNativeMethod("install", VisionCameraInstaller::install)});
}
static void install(jni::alias_ref<jni::JClass> clazz,
jni::alias_ref<JVisionCameraProxy::javaobject> proxy);
};
} // namespace vision

View File

@@ -0,0 +1,81 @@
//
// Created by Marc on 21.07.2023.
//
#include "JFrame.h"
#include <fbjni/ByteBuffer.h>
#include <fbjni/fbjni.h>
#include <jni.h>
namespace vision {
using namespace facebook;
using namespace jni;
int JFrame::getWidth() const {
static const auto getWidthMethod = getClass()->getMethod<jint()>("getWidth");
return getWidthMethod(self());
}
int JFrame::getHeight() const {
static const auto getWidthMethod = getClass()->getMethod<jint()>("getHeight");
return getWidthMethod(self());
}
bool JFrame::getIsValid() const {
static const auto getIsValidMethod = getClass()->getMethod<jboolean()>("getIsValid");
return getIsValidMethod(self());
}
bool JFrame::getIsMirrored() const {
static const auto getIsMirroredMethod = getClass()->getMethod<jboolean()>("getIsMirrored");
return getIsMirroredMethod(self());
}
jlong JFrame::getTimestamp() const {
static const auto getTimestampMethod = getClass()->getMethod<jlong()>("getTimestamp");
return getTimestampMethod(self());
}
local_ref<JString> JFrame::getOrientation() const {
static const auto getOrientationMethod = getClass()->getMethod<JString()>("getOrientation");
return getOrientationMethod(self());
}
local_ref<JString> JFrame::getPixelFormat() const {
static const auto getPixelFormatMethod = getClass()->getMethod<JString()>("getPixelFormat");
return getPixelFormatMethod(self());
}
int JFrame::getPlanesCount() const {
static const auto getPlanesCountMethod = getClass()->getMethod<jint()>("getPlanesCount");
return getPlanesCountMethod(self());
}
int JFrame::getBytesPerRow() const {
static const auto getBytesPerRowMethod = getClass()->getMethod<jint()>("getBytesPerRow");
return getBytesPerRowMethod(self());
}
local_ref<JByteBuffer> JFrame::toByteBuffer() const {
static const auto toByteBufferMethod = getClass()->getMethod<JByteBuffer()>("toByteBuffer");
return toByteBufferMethod(self());
}
void JFrame::incrementRefCount() {
static const auto incrementRefCountMethod = getClass()->getMethod<void()>("incrementRefCount");
incrementRefCountMethod(self());
}
void JFrame::decrementRefCount() {
static const auto decrementRefCountMethod = getClass()->getMethod<void()>("decrementRefCount");
decrementRefCountMethod(self());
}
void JFrame::close() {
static const auto closeMethod = getClass()->getMethod<void()>("close");
closeMethod(self());
}
} // namespace vision

View File

@@ -0,0 +1,35 @@
//
// Created by Marc on 21.07.2023.
//
#pragma once
#include <fbjni/ByteBuffer.h>
#include <fbjni/fbjni.h>
#include <jni.h>
namespace vision {
using namespace facebook;
using namespace jni;
struct JFrame : public JavaClass<JFrame> {
static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;";
public:
int getWidth() const;
int getHeight() const;
bool getIsValid() const;
bool getIsMirrored() const;
int getPlanesCount() const;
int getBytesPerRow() const;
jlong getTimestamp() const;
local_ref<JString> getOrientation() const;
local_ref<JString> getPixelFormat() const;
local_ref<JByteBuffer> toByteBuffer() const;
void incrementRefCount();
void decrementRefCount();
void close();
};
} // namespace vision

View File

@@ -0,0 +1,70 @@
//
// Created by Marc Rousavy on 29.09.21.
//
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#include "JFrameProcessor.h"
#include <fbjni/fbjni.h>
#include <jni.h>
#include "JFrame.h"
#include <utility>
namespace vision {
using namespace facebook;
using namespace jni;
void JFrameProcessor::registerNatives() {
registerHybrid({makeNativeMethod("call", JFrameProcessor::call)});
}
using TSelf = jni::local_ref<JFrameProcessor::javaobject>;
JFrameProcessor::JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet,
std::shared_ptr<RNWorklet::JsiWorkletContext> context) {
_workletContext = std::move(context);
_workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
}
TSelf JFrameProcessor::create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context) {
return JFrameProcessor::newObjectCxxArgs(worklet, context);
}
void JFrameProcessor::callWithFrameHostObject(
const std::shared_ptr<FrameHostObject>& frameHostObject) const {
// Call the Frame Processor on the Worklet Runtime
jsi::Runtime& runtime = _workletContext->getWorkletRuntime();
try {
// Wrap HostObject as JSI Value
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
jsi::Value jsValue(std::move(argument));
// Call the Worklet with the Frame JS Host Object as an argument
_workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
} catch (jsi::JSError& jsError) {
// JS Error occured, print it to console.
const std::string& message = jsError.getMessage();
_workletContext->invokeOnJsThread([message](jsi::Runtime& jsRuntime) {
auto logFn = jsRuntime.global()
.getPropertyAsObject(jsRuntime, "console")
.getPropertyAsFunction(jsRuntime, "error");
logFn.call(jsRuntime, jsi::String::createFromUtf8(
jsRuntime, "Frame Processor threw an error: " + message));
});
}
}
void JFrameProcessor::call(jni::alias_ref<JFrame::javaobject> frame) {
// Create the Frame Host Object wrapping the internal Frame
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
callWithFrameHostObject(frameHostObject);
}
} // namespace vision
#endif

View File

@@ -0,0 +1,54 @@
//
// Created by Marc Rousavy on 29.09.21
//
#pragma once
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#include <fbjni/fbjni.h>
#include <jni.h>
#include <memory>
#include <string>
#include <react-native-worklets-core/WKTJsiHostObject.h>
#include <react-native-worklets-core/WKTJsiWorklet.h>
#include "FrameHostObject.h"
#include "JFrame.h"
namespace vision {
using namespace facebook;
struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;";
static void registerNatives();
static jni::local_ref<JFrameProcessor::javaobject>
create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context);
public:
/**
* Call the JS Frame Processor.
*/
void call(alias_ref<JFrame::javaobject> frame);
private:
// Private constructor. Use `create(..)` to create new instances.
explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet,
std::shared_ptr<RNWorklet::JsiWorkletContext> context);
private:
void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const;
private:
friend HybridBase;
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
};
} // namespace vision
#endif

View File

@@ -0,0 +1,26 @@
//
// Created by Marc Rousavy on 29.09.21.
//
#include "JFrameProcessorPlugin.h"
#include <fbjni/fbjni.h>
#include <jni.h>
namespace vision {
using namespace facebook;
using namespace jni;
using TCallback = jobject(alias_ref<JFrame::javaobject>, alias_ref<JMap<jstring, jobject>> params);
local_ref<jobject>
JFrameProcessorPlugin::callback(const alias_ref<JFrame::javaobject>& frame,
const alias_ref<JMap<jstring, jobject>>& params) const {
auto callbackMethod = getClass()->getMethod<TCallback>("callback");
auto result = callbackMethod(self(), frame, params);
return make_local(result);
}
} // namespace vision

View File

@@ -0,0 +1,30 @@
//
// Created by Marc Rousavy on 29.09.21
//
#pragma once
#include <fbjni/fbjni.h>
#include <jni.h>
#include <string>
#include "JFrame.h"
namespace vision {
using namespace facebook;
using namespace jni;
struct JFrameProcessorPlugin : public JavaClass<JFrameProcessorPlugin> {
static constexpr auto kJavaDescriptor =
"Lcom/mrousavy/camera/frameprocessor/FrameProcessorPlugin;";
public:
/**
* Call the plugin.
*/
local_ref<jobject> callback(const alias_ref<JFrame::javaobject>& frame,
const alias_ref<JMap<jstring, jobject>>& params) const;
};
} // namespace vision

View File

@@ -0,0 +1,117 @@
//
// Created by Marc Rousavy on 21.07.23.
//
#include "JVisionCameraProxy.h"
#include <memory>
#include <string>
#include <utility>
#include <jsi/jsi.h>
#include "FrameProcessorPluginHostObject.h"
#include "JSITypedArray.h"
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#include <react-native-worklets-core/WKTJsiWorklet.h>
#include <react-native-worklets-core/WKTJsiWorkletContext.h>
#endif
namespace vision {
using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>;
using TJSCallInvokerHolder = jni::alias_ref<facebook::react::CallInvokerHolder::javaobject>;
using TScheduler = jni::alias_ref<JVisionCameraScheduler::javaobject>;
using TOptions = jni::local_ref<JMap<jstring, jobject>>;
JVisionCameraProxy::JVisionCameraProxy(
const jni::alias_ref<JVisionCameraProxy::jhybridobject>& javaThis, jsi::Runtime* runtime,
const std::shared_ptr<facebook::react::CallInvoker>& callInvoker,
const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) {
_javaPart = make_global(javaThis);
_runtime = runtime;
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
__android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context...");
auto runOnJS = [callInvoker](std::function<void()>&& f) {
// Run on React JS Runtime
callInvoker->invokeAsync(std::move(f));
};
auto runOnWorklet = [scheduler](std::function<void()>&& f) {
// Run on Frame Processor Worklet Runtime
scheduler->cthis()->dispatchAsync([f = std::move(f)]() { f(); });
};
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera", runtime, runOnJS,
runOnWorklet);
__android_log_write(ANDROID_LOG_INFO, TAG, "Worklet Context created!");
#else
__android_log_write(ANDROID_LOG_INFO, TAG, "Frame Processors are disabled!");
#endif
}
JVisionCameraProxy::~JVisionCameraProxy() {
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
__android_log_write(ANDROID_LOG_INFO, TAG, "Destroying Context...");
// Destroy ArrayBuffer cache for both the JS and the Worklet Runtime.
invalidateArrayBufferCache(*_workletContext->getJsRuntime());
invalidateArrayBufferCache(_workletContext->getWorkletRuntime());
#endif
}
void JVisionCameraProxy::setFrameProcessor(int viewTag, jsi::Runtime& runtime,
const jsi::Object& frameProcessorObject) {
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
auto frameProcessorType =
frameProcessorObject.getProperty(runtime, "type").asString(runtime).utf8(runtime);
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(
runtime, frameProcessorObject.getProperty(runtime, "frameProcessor"));
jni::local_ref<JFrameProcessor::javaobject> frameProcessor;
if (frameProcessorType == "frame-processor") {
frameProcessor = JFrameProcessor::create(worklet, _workletContext);
} else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
}
auto setFrameProcessorMethod =
javaClassLocal()->getMethod<void(int, alias_ref<JFrameProcessor::javaobject>)>(
"setFrameProcessor");
setFrameProcessorMethod(_javaPart, viewTag, frameProcessor);
#else
throw std::runtime_error("system/frame-processors-unavailable: Frame Processors are disabled!");
#endif
}
void JVisionCameraProxy::removeFrameProcessor(int viewTag) {
auto removeFrameProcessorMethod = javaClassLocal()->getMethod<void(int)>("removeFrameProcessor");
removeFrameProcessorMethod(_javaPart, viewTag);
}
local_ref<JFrameProcessorPlugin::javaobject>
JVisionCameraProxy::getFrameProcessorPlugin(const std::string& name, TOptions options) {
auto getFrameProcessorPluginMethod =
javaClassLocal()->getMethod<JFrameProcessorPlugin(local_ref<jstring>, TOptions)>(
"getFrameProcessorPlugin");
return getFrameProcessorPluginMethod(_javaPart, make_jstring(name), std::move(options));
}
void JVisionCameraProxy::registerNatives() {
registerHybrid({makeNativeMethod("initHybrid", JVisionCameraProxy::initHybrid)});
}
TSelf JVisionCameraProxy::initHybrid(alias_ref<jhybridobject> jThis, jlong jsRuntimePointer,
TJSCallInvokerHolder jsCallInvokerHolder,
const TScheduler& scheduler) {
__android_log_write(ANDROID_LOG_INFO, TAG, "Initializing VisionCameraProxy...");
// cast from JNI hybrid objects to C++ instances
auto jsRuntime = reinterpret_cast<jsi::Runtime*>(jsRuntimePointer);
auto jsCallInvoker = jsCallInvokerHolder->cthis()->getCallInvoker();
auto sharedScheduler = make_global(scheduler);
return makeCxxInstance(jThis, jsRuntime, jsCallInvoker, sharedScheduler);
}
} // namespace vision

View File

@@ -0,0 +1,61 @@
//
// Created by Marc Rousavy on 21.07.23.
//
#pragma once
#include <ReactCommon/CallInvokerHolder.h>
#include <fbjni/fbjni.h>
#include <jsi/jsi.h>
#include "JFrameProcessor.h"
#include "JFrameProcessorPlugin.h"
#include "JVisionCameraScheduler.h"
#include <memory>
#include <string>
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#include <react-native-worklets-core/WKTJsiWorkletContext.h>
#endif
namespace vision {
using namespace facebook;
class JVisionCameraProxy : public jni::HybridClass<JVisionCameraProxy> {
public:
~JVisionCameraProxy();
static void registerNatives();
void setFrameProcessor(int viewTag, jsi::Runtime& runtime, const jsi::Object& frameProcessor);
void removeFrameProcessor(int viewTag);
jni::local_ref<JFrameProcessorPlugin::javaobject>
getFrameProcessorPlugin(const std::string& name, jni::local_ref<JMap<jstring, jobject>> options);
jsi::Runtime* getJSRuntime() {
return _runtime;
}
private:
friend HybridBase;
jni::global_ref<JVisionCameraProxy::javaobject> _javaPart;
jsi::Runtime* _runtime;
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
#endif
static auto constexpr TAG = "VisionCameraProxy";
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/VisionCameraProxy;";
explicit JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::jhybridobject>& javaThis,
jsi::Runtime* jsRuntime,
const std::shared_ptr<facebook::react::CallInvoker>& jsCallInvoker,
const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler);
static jni::local_ref<jhybriddata>
initHybrid(jni::alias_ref<jhybridobject> javaThis, jlong jsRuntimePointer,
jni::alias_ref<facebook::react::CallInvokerHolder::javaobject> jsCallInvokerHolder,
const jni::alias_ref<JVisionCameraScheduler::javaobject>& scheduler);
};
} // namespace vision

View File

@@ -0,0 +1,44 @@
//
// Created by Marc Rousavy on 25.07.21.
//
#include "JVisionCameraScheduler.h"
#include <fbjni/fbjni.h>
namespace vision {
using namespace facebook;
using TSelf = jni::local_ref<JVisionCameraScheduler::jhybriddata>;
TSelf JVisionCameraScheduler::initHybrid(jni::alias_ref<jhybridobject> jThis) {
return makeCxxInstance(jThis);
}
void JVisionCameraScheduler::dispatchAsync(const std::function<void()>& job) {
// 1. add job to queue
_jobs.push(job);
scheduleTrigger();
}
void JVisionCameraScheduler::scheduleTrigger() {
// 2. schedule `triggerUI` to be called on the java thread
static auto method = _javaPart->getClass()->getMethod<void()>("scheduleTrigger");
method(_javaPart.get());
}
void JVisionCameraScheduler::trigger() {
std::unique_lock<std::mutex> lock(_mutex);
// 3. call job we enqueued in step 1.
auto job = _jobs.front();
job();
_jobs.pop();
}
void JVisionCameraScheduler::registerNatives() {
registerHybrid({
makeNativeMethod("initHybrid", JVisionCameraScheduler::initHybrid),
makeNativeMethod("trigger", JVisionCameraScheduler::trigger),
});
}
} // namespace vision

View File

@@ -0,0 +1,51 @@
//
// Created by Marc Rousavy on 25.07.21.
//
#pragma once
#include <fbjni/fbjni.h>
#include <jni.h>
#include <mutex>
#include <queue>
namespace vision {
using namespace facebook;
/**
* A Scheduler that runs methods on the Frame Processor Thread (which is a Java Thread).
* In order to call something on the Java Frame Processor Thread, you have to:
*
* 1. Call `dispatchAsync(..)` with the given C++ Method.
* 2. Internally, `scheduleTrigger()` will get called, which is a Java Method.
* 3. The `scheduleTrigger()` Java Method will switch to the Frame Processor Java Thread and call
* `trigger()` on there
* 4. `trigger()` is a C++ function here that just calls the passed C++ Method from step 1.
*/
class JVisionCameraScheduler : public jni::HybridClass<JVisionCameraScheduler> {
public:
static auto constexpr kJavaDescriptor =
"Lcom/mrousavy/camera/frameprocessor/VisionCameraScheduler;";
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> jThis);
static void registerNatives();
// schedules the given job to be run on the VisionCamera FP Thread at some future point in time
void dispatchAsync(const std::function<void()>& job);
private:
friend HybridBase;
jni::global_ref<JVisionCameraScheduler::javaobject> _javaPart;
std::queue<std::function<void()>> _jobs;
std::mutex _mutex;
explicit JVisionCameraScheduler(jni::alias_ref<JVisionCameraScheduler::jhybridobject> jThis)
: _javaPart(jni::make_global(jThis)) {}
// Schedules a call to `trigger` on the VisionCamera FP Thread
void scheduleTrigger();
// Calls the latest job in the job queue
void trigger();
};
} // namespace vision