feat: Skia for Android (#1731)

* feat: Call Skia Renderer

* Use default NativePreviewView for Skia

* Render to separate FBO

* It appears once

* Refactor a lot lol

* Pass width/height

* Read width/heights

* Update SkiaRenderer.cpp

* Read stencil/samples

* Use switch for target

* Clear full red

* Update VideoPipeline.cpp

* fix: Use `BorrowTextureFrom` instead of `AdoptTextureFrom`

* Get it to work

* Draw Camera Frame again (only works for first frame)

* glDisable(GL_BLEND)

* Use Frame Buffer again

* Simplify Skia offscreen surface creation

* fix: Get it to kinda work?

* fix: Remove `sampler2D` shader

Only the EXTERNAL_OES one kinda works

* Revert "fix: Remove `sampler2D` shader"

This reverts commit bf241a82f440f5a442f23a2b10329b813e7cdb3e.

* Revert "fix: Get it to kinda work?"

This reverts commit ea6a8784ad8dc7d05e8076591874f021b51dd84a.

* fix: Use Skia for rendering

* Simplify drawing code a lot

* Clean up drawing loop a bit more

* Some docs

* Update SkiaRenderer.cpp

* Surface

* try to use Matrix

* Use BottomLeft as a surface origin again

* Get actual surface dimensions

* Use 1x1 pbuffer instead

* Update SkiaRenderer.cpp

* Update SkiaRenderer.cpp

* feat: Implement Skia Frame Processor (#1735)

* feat: Implement JS Skia Frame Processor

* Update SkiaRenderer.cpp

* push

* Create Frame from C++

* compile

* Compile

* Update VideoPipeline.cpp

* Fix JNI local ref

* Use `HardwareBuffer` for implementation

* feat: Custom `Frame` implementation that uses CPU `ByteBuffer` (#1736)

* feat: Implement JS Skia Frame Processor

* Update SkiaRenderer.cpp

* push

* Create Frame from C++

* compile

* Compile

* Update VideoPipeline.cpp

* Fix JNI local ref

* Use `HardwareBuffer` for implementation

* try: Try to just create a CPU based ByteBuffer

* fix: Fix Java Type

* fix remaining errors

* try fixing FrameFactory

* Use `free`

* fix: Fix scene mode crash on some emulators

* fix: Fix scene mode crash on some emulators

* Fix getting pixels

* fix: Fix buffer not being freed

* Add some docs to `Frame`

* Test Skia again

* Use `getCurrentPresentationTime()`

* Remove `FrameFactory.cpp`

* Update VideoPipeline.h

* Update VideoPipeline.cpp
This commit is contained in:
Marc Rousavy 2023-09-01 10:43:19 +02:00 committed by GitHub
parent 6bbb44d541
commit a7c137da07
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 1099 additions and 962 deletions

View File

@ -12,7 +12,7 @@ find_package(ReactAndroid REQUIRED CONFIG)
find_package(fbjni REQUIRED CONFIG) find_package(fbjni REQUIRED CONFIG)
find_library(LOG_LIB log) find_library(LOG_LIB log)
add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS} -DVISION_CAMERA_ENABLE_SKIA=${ENABLE_SKIA}) add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS} -DVISION_CAMERA_ENABLE_SKIA=${ENABLE_SKIA} -DEGL_EGLEXT_PROTOTYPES=1)
# Add react-native-vision-camera sources # Add react-native-vision-camera sources
@ -37,6 +37,9 @@ add_library(
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
# Skia Frame Processor # Skia Frame Processor
src/main/cpp/skia/SkiaRenderer.cpp src/main/cpp/skia/SkiaRenderer.cpp
src/main/cpp/skia/JSkiaFrameProcessor.cpp
src/main/cpp/skia/DrawableFrameHostObject.cpp
src/main/cpp/skia/VisionCameraSkiaContext.cpp
) )
# Header Search Paths (includes) # Header Search Paths (includes)
@ -48,6 +51,7 @@ target_include_directories(
"src/main/cpp/frameprocessor" "src/main/cpp/frameprocessor"
"src/main/cpp/frameprocessor/java-bindings" "src/main/cpp/frameprocessor/java-bindings"
"src/main/cpp/skia" "src/main/cpp/skia"
"src/main/cpp/skia/java-bindings"
"${NODE_MODULES_DIR}/react-native/ReactCommon" "${NODE_MODULES_DIR}/react-native/ReactCommon"
"${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker" "${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker"
"${NODE_MODULES_DIR}/react-native/ReactAndroid/src/main/jni/react/turbomodule" # <-- CallInvokerHolder JNI wrapper "${NODE_MODULES_DIR}/react-native/ReactAndroid/src/main/jni/react/turbomodule" # <-- CallInvokerHolder JNI wrapper
@ -97,16 +101,18 @@ if(ENABLE_FRAME_PROCESSORS)
target_include_directories( target_include_directories(
${PACKAGE_NAME} ${PACKAGE_NAME}
PRIVATE PRIVATE
"${RNSKIA_PATH}/cpp/api/"
"${RNSKIA_PATH}/cpp/jsi/"
"${RNSKIA_PATH}/cpp/rnskia/"
"${RNSKIA_PATH}/cpp/skia" "${RNSKIA_PATH}/cpp/skia"
"${RNSKIA_PATH}/cpp/skia/include/"
"${RNSKIA_PATH}/cpp/skia/include/config/" "${RNSKIA_PATH}/cpp/skia/include/config/"
"${RNSKIA_PATH}/cpp/skia/include/core/" "${RNSKIA_PATH}/cpp/skia/include/core/"
"${RNSKIA_PATH}/cpp/skia/include/effects/" "${RNSKIA_PATH}/cpp/skia/include/effects/"
"${RNSKIA_PATH}/cpp/skia/include/utils/" "${RNSKIA_PATH}/cpp/skia/include/utils/"
"${RNSKIA_PATH}/cpp/skia/include/pathops/" "${RNSKIA_PATH}/cpp/skia/include/pathops/"
"${RNSKIA_PATH}/cpp/skia/modules/" "${RNSKIA_PATH}/cpp/skia/modules/"
# "${RNSKIA_PATH}/cpp/skia/modules/skparagraph/include/" "${RNSKIA_PATH}/cpp/utils/"
"${RNSKIA_PATH}/cpp/skia/include/"
"${RNSKIA_PATH}/cpp/skia"
) )
target_link_libraries( target_link_libraries(

View File

@ -10,18 +10,14 @@
#include <android/native_window.h> #include <android/native_window.h>
#include <android/log.h> #include <android/log.h>
#include <chrono>
#include "OpenGLError.h" #include "OpenGLError.h"
namespace vision { namespace vision {
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface(int width, int height) { std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface() {
return std::unique_ptr<OpenGLContext>(new OpenGLContext(width, height)); return std::unique_ptr<OpenGLContext>(new OpenGLContext());
}
OpenGLContext::OpenGLContext(int width, int height) {
_width = width;
_height = height;
} }
OpenGLContext::~OpenGLContext() { OpenGLContext::~OpenGLContext() {
@ -67,10 +63,10 @@ void OpenGLContext::ensureOpenGL() {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig.."); __android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
EGLint attributes[] = {EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGLint attributes[] = {EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_ALPHA_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8, EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 0, EGL_DEPTH_SIZE, 0,
EGL_STENCIL_SIZE, 0, EGL_STENCIL_SIZE, 0,
EGL_NONE}; EGL_NONE};
@ -90,9 +86,9 @@ void OpenGLContext::ensureOpenGL() {
// EGLSurface // EGLSurface
if (offscreenSurface == EGL_NO_SURFACE) { if (offscreenSurface == EGL_NO_SURFACE) {
// If we don't have a surface at all // If we don't have a surface at all
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing %i x %i offscreen pbuffer EGLSurface..", _width, _height); __android_log_print(ANDROID_LOG_INFO, TAG, "Initializing 1x1 offscreen pbuffer EGLSurface..");
EGLint attributes[] = {EGL_WIDTH, _width, EGLint attributes[] = {EGL_WIDTH, 1,
EGL_HEIGHT, _height, EGL_HEIGHT, 1,
EGL_NONE}; EGL_NONE};
offscreenSurface = eglCreatePbufferSurface(display, config, attributes); offscreenSurface = eglCreatePbufferSurface(display, config, attributes);
if (offscreenSurface == EGL_NO_SURFACE) throw OpenGLError("Failed to create OpenGL Surface!"); if (offscreenSurface == EGL_NO_SURFACE) throw OpenGLError("Failed to create OpenGL Surface!");
@ -116,7 +112,12 @@ void OpenGLContext::use(EGLSurface surface) {
// 3. Caller can now render to this surface // 3. Caller can now render to this surface
} }
GLuint OpenGLContext::createTexture() { void OpenGLContext::flush() const {
bool successful = eglSwapBuffers(display, eglGetCurrentSurface(EGL_DRAW));
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to swap OpenGL buffers!");
}
OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width, int height) {
// 1. Make sure the OpenGL context is initialized // 1. Make sure the OpenGL context is initialized
this->ensureOpenGL(); this->ensureOpenGL();
@ -127,7 +128,42 @@ GLuint OpenGLContext::createTexture() {
GLuint textureId; GLuint textureId;
glGenTextures(1, &textureId); glGenTextures(1, &textureId);
return textureId; GLenum target;
switch (type) {
case OpenGLTexture::Type::ExternalOES:
target = GL_TEXTURE_EXTERNAL_OES;
break;
case OpenGLTexture::Type::Texture2D:
target = GL_TEXTURE_2D;
break;
default:
throw std::runtime_error("Invalid OpenGL Texture Type!");
}
glBindTexture(target, textureId);
glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
return {
.id = textureId,
.target = target,
.width = width,
.height = height
};
}
void OpenGLContext::getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels) {
glActiveTexture(GL_TEXTURE0);
glBindTexture(texture.target, texture.id);
glReadPixels(0, 0, texture.width, texture.height, GL_RGBA, GL_UNSIGNED_BYTE, *outPixels);
// height * width * components per pixel (4 for RGBA) * size of one number (byte)
*outSize = texture.height * texture.width * 4 * sizeof(uint8_t);
}
long OpenGLContext::getCurrentPresentationTime() {
auto now = std::chrono::steady_clock::now();
auto duration = now.time_since_epoch();
long long milliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
return static_cast<long>(milliseconds);
} }
} // namespace vision } // namespace vision

View File

@ -6,9 +6,11 @@
#include <EGL/egl.h> #include <EGL/egl.h>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
#include "OpenGLTexture.h"
#include <memory> #include <memory>
#include <functional> #include <functional>
#include <chrono>
#include "PassThroughShader.h" #include "PassThroughShader.h"
@ -24,7 +26,7 @@ class OpenGLContext {
* Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface. * Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface.
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread. * This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
*/ */
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface(int width, int height); static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface();
/** /**
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called. * Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
*/ */
@ -41,10 +43,25 @@ class OpenGLContext {
*/ */
void use(); void use();
/**
* Flushes all drawing operations by swapping the buffers and submitting the Frame to the GPU
*/
void flush() const;
/** /**
* Create a new texture on this context * Create a new texture on this context
*/ */
GLuint createTexture(); OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
/**
* Gets the pixels as CPU accessible memory of the given input texture
*/
void getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels);
/**
* Gets the current presentation time for this OpenGL surface.
*/
long getCurrentPresentationTime();
public: public:
EGLDisplay display = EGL_NO_DISPLAY; EGLDisplay display = EGL_NO_DISPLAY;
@ -53,13 +70,13 @@ class OpenGLContext {
EGLConfig config = nullptr; EGLConfig config = nullptr;
private: private:
int _width = 0, _height = 0; explicit OpenGLContext() = default;
explicit OpenGLContext(int width, int height);
void destroy(); void destroy();
void ensureOpenGL(); void ensureOpenGL();
private: private:
PassThroughShader _passThroughShader; PassThroughShader _passThroughShader;
std::chrono::time_point<std::chrono::system_clock> _startTime;
private: private:
static constexpr auto TAG = "OpenGLContext"; static constexpr auto TAG = "OpenGLContext";

View File

@ -4,8 +4,6 @@
#pragma once #pragma once
#if VISION_CAMERA_ENABLE_SKIA
#include <string> #include <string>
#include <stdexcept> #include <stdexcept>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
@ -23,8 +21,11 @@ inline std::string getEglErrorIfAny() {
class OpenGLError: public std::runtime_error { class OpenGLError: public std::runtime_error {
public: public:
explicit OpenGLError(const std::string&& message): std::runtime_error(message + getEglErrorIfAny()) {} explicit OpenGLError(const std::string&& message): std::runtime_error(message + getEglErrorIfAny()) {}
static inline void checkIfError(const std::string&& message) {
auto error = getEglErrorIfAny();
if (error.length() > 0) throw std::runtime_error(message + error);
}
}; };
} // namespace vision } // namespace vision
#endif

View File

@ -43,32 +43,35 @@ void OpenGLRenderer::destroy() {
} }
} }
void OpenGLRenderer::renderTextureToSurface(GLuint textureId, float* transformMatrix) { EGLSurface OpenGLRenderer::getEGLSurface() {
if (_surface == EGL_NO_SURFACE) { if (_surface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface..."); __android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
_context->use(); _context->use();
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr); _surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
} }
return _surface;
}
// 1. Activate the OpenGL context for this surface void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
_context->use(_surface); // 1. Get (or create) the OpenGL EGLSurface which is the window render target (Android Surface)
EGLSurface surface = getEGLSurface();
// 2. Set the viewport for rendering // 2. Activate the OpenGL context for this surface
_context->use(surface);
OpenGLError::checkIfError("Failed to use context!");
// 3. Set the viewport for rendering
glViewport(0, 0, _width, _height); glViewport(0, 0, _width, _height);
glDisable(GL_BLEND); glDisable(GL_BLEND);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// 3. Bind the input texture // 4. Draw it using the pass-through shader which binds the texture and applies transforms
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId); _passThroughShader.draw(texture, transformMatrix);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 4. Draw it using the pass-through shader which also applies transforms // 5 Swap buffers to pass it to the window surface
_passThroughShader.draw(textureId, transformMatrix); _context->flush();
OpenGLError::checkIfError("Failed to render Frame to Surface!");
// 5. Swap buffers to pass it to the window surface
eglSwapBuffers(_context->display, _surface);
} }
} // namespace vision } // namespace vision

View File

@ -29,9 +29,9 @@ class OpenGLRenderer {
~OpenGLRenderer(); ~OpenGLRenderer();
/** /**
* Renders the given Texture ID to the Surface * Renders the given Texture to the Surface
*/ */
void renderTextureToSurface(GLuint textureId, float* transformMatrix); void renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix);
/** /**
* Destroys the OpenGL context. This needs to be called on the same thread that `use()` was called. * Destroys the OpenGL context. This needs to be called on the same thread that `use()` was called.
@ -39,6 +39,11 @@ class OpenGLRenderer {
*/ */
void destroy(); void destroy();
/**
* Gets the EGLSurface (window surface) that this OpenGL renderer is configured to render to.
*/
EGLSurface getEGLSurface();
private: private:
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface); explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);

View File

@ -0,0 +1,22 @@
//
// Created by Marc Rousavy on 30.08.23.
//
#pragma once
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdexcept>
struct OpenGLTexture {
enum Type { Texture2D, ExternalOES };
// The ID of the texture as returned in glGenTextures(..)
GLuint id;
// GL_TEXTURE_2D or GL_TEXTURE_EXTERNAL_OES
GLenum target;
// Width and height of the texture
int width = 0;
int height = 0;
};

View File

@ -10,29 +10,29 @@
#include "OpenGLError.h" #include "OpenGLError.h"
#include <string> #include <string>
#include <android/log.h>
namespace vision { namespace vision {
PassThroughShader::~PassThroughShader() { PassThroughShader::~PassThroughShader() {
if (_programId != NO_SHADER) {
glDeleteProgram(_programId);
_programId = NO_SHADER;
}
if (_vertexBuffer != NO_BUFFER) { if (_vertexBuffer != NO_BUFFER) {
glDeleteBuffers(1, &_vertexBuffer); glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = NO_BUFFER; _vertexBuffer = NO_BUFFER;
} }
if (_programId != NO_SHADER) {
glDeleteProgram(_programId);
_programId = NO_SHADER;
}
} }
void PassThroughShader::draw(GLuint textureId, float* transformMatrix) { void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
// 1. Set up Shader Program // 1. Set up Shader Program
if (_programId == NO_SHADER) { if (_programId == NO_SHADER || _shaderTarget != texture.target) {
_programId = createProgram(); if (_programId != NO_SHADER) {
} glDeleteProgram(_programId);
}
glUseProgram(_programId); _programId = createProgram(texture.target);
glUseProgram(_programId);
if (_vertexParameters.aPosition == NO_POSITION) {
_vertexParameters = { _vertexParameters = {
.aPosition = glGetAttribLocation(_programId, "aPosition"), .aPosition = glGetAttribLocation(_programId, "aPosition"),
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"), .aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
@ -41,15 +41,19 @@ void PassThroughShader::draw(GLuint textureId, float* transformMatrix) {
_fragmentParameters = { _fragmentParameters = {
.uTexture = glGetUniformLocation(_programId, "uTexture"), .uTexture = glGetUniformLocation(_programId, "uTexture"),
}; };
_shaderTarget = texture.target;
} }
glUseProgram(_programId);
// 2. Set up Vertices Buffer // 2. Set up Vertices Buffer
if (_vertexBuffer == NO_BUFFER) { if (_vertexBuffer == NO_BUFFER) {
glGenBuffers(1, &_vertexBuffer); glGenBuffers(1, &_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
} }
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
// 3. Pass all uniforms/attributes for vertex shader // 3. Pass all uniforms/attributes for vertex shader
glEnableVertexAttribArray(_vertexParameters.aPosition); glEnableVertexAttribArray(_vertexParameters.aPosition);
glVertexAttribPointer(_vertexParameters.aPosition, glVertexAttribPointer(_vertexParameters.aPosition,
@ -71,7 +75,7 @@ void PassThroughShader::draw(GLuint textureId, float* transformMatrix) {
// 4. Pass texture to fragment shader // 4. Pass texture to fragment shader
glActiveTexture(GL_TEXTURE0); glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId); glBindTexture(texture.target, texture.id);
glUniform1i(_fragmentParameters.uTexture, 0); glUniform1i(_fragmentParameters.uTexture, 0);
// 5. Draw! // 5. Draw!
@ -93,18 +97,19 @@ GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode)
return shader; return shader;
} }
GLuint PassThroughShader::createProgram() { GLuint PassThroughShader::createProgram(GLenum textureTarget) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER); GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER); auto fragmentShaderCode = textureTarget == GL_TEXTURE_EXTERNAL_OES ? FRAGMENT_SHADER_EXTERNAL_TEXTURE : FRAGMENT_SHADER;
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
GLuint program = glCreateProgram(); GLuint program = glCreateProgram();
if (program == 0) throw OpenGLError("Failed to create pass-through program!"); if (program == 0) throw OpenGLError("Failed to create pass-through program!");
glAttachShader(program, vertexShader); glAttachShader(program, vertexShader);
if (glGetError() != GL_NO_ERROR) throw OpenGLError("Failed to attach Vertex Shader!"); OpenGLError::checkIfError("Failed to attach Vertex Shader!");
glAttachShader(program, fragmentShader); glAttachShader(program, fragmentShader);
if (glGetError() != GL_NO_ERROR) throw OpenGLError("Failed to attach Fragment Shader!"); OpenGLError::checkIfError("Failed to attach Fragment Shader!");
glLinkProgram(program); glLinkProgram(program);
GLint linkStatus = GL_FALSE; GLint linkStatus = GL_FALSE;

View File

@ -7,11 +7,14 @@
#include <EGL/egl.h> #include <EGL/egl.h>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
#include "OpenGLTexture.h"
namespace vision { namespace vision {
#define NO_SHADER 0 #define NO_SHADER 0
#define NO_POSITION 0 #define NO_POSITION 0
#define NO_BUFFER 0 #define NO_BUFFER 0
#define NO_SHADER_TARGET 0
struct Vertex { struct Vertex {
GLfloat position[2]; GLfloat position[2];
@ -26,15 +29,16 @@ class PassThroughShader {
/** /**
* Draw the texture using this shader. * Draw the texture using this shader.
*/ */
void draw(GLuint textureId, float* transformMatrix); void draw(const OpenGLTexture& texture, float* transformMatrix);
private: private:
// Loading // Loading
static GLuint loadShader(GLenum shaderType, const char* shaderCode); static GLuint loadShader(GLenum shaderType, const char* shaderCode);
static GLuint createProgram(); static GLuint createProgram(GLenum textureTarget);
private: private:
// Parameters // Shader program in memory
GLenum _shaderTarget = NO_SHADER_TARGET;
GLuint _programId = NO_SHADER; GLuint _programId = NO_SHADER;
GLuint _vertexBuffer = NO_BUFFER; GLuint _vertexBuffer = NO_BUFFER;
struct VertexParameters { struct VertexParameters {
@ -67,7 +71,17 @@ class PassThroughShader {
} }
)"; )";
static constexpr char FRAGMENT_SHADER[] = R"( static constexpr char FRAGMENT_SHADER[] = R"(
precision mediump float;
varying vec2 vTexCoord;
uniform sampler2D uTexture;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoord);
}
)";
static constexpr char FRAGMENT_SHADER_EXTERNAL_TEXTURE[] = R"(
#extension GL_OES_EGL_image_external : require #extension GL_OES_EGL_image_external : require
precision mediump float; precision mediump float;
varying vec2 vTexCoord; varying vec2 vTexCoord;
uniform samplerExternalOES uTexture; uniform samplerExternalOES uTexture;

View File

@ -9,6 +9,14 @@
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
#include <GLES2/gl2ext.h> #include <GLES2/gl2ext.h>
#include <EGL/egl.h> #include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES/gl.h>
#include <chrono>
#include "OpenGLTexture.h"
#include "JFrameProcessor.h"
#include "JSkiaFrameProcessor.h"
namespace vision { namespace vision {
@ -19,39 +27,28 @@ jni::local_ref<VideoPipeline::jhybriddata> VideoPipeline::initHybrid(jni::alias_
VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height): _javaPart(jni::make_global(jThis)) { VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height): _javaPart(jni::make_global(jThis)) {
_width = width; _width = width;
_height = height; _height = height;
_context = OpenGLContext::CreateWithOffscreenSurface(width, height); _context = OpenGLContext::CreateWithOffscreenSurface();
} }
VideoPipeline::~VideoPipeline() { VideoPipeline::~VideoPipeline() {
// 1. Remove output surfaces // 1. Remove output surfaces
removeFrameProcessorOutputSurface(); removeFrameProcessor();
removeRecordingSessionOutputSurface(); removeRecordingSessionOutputSurface();
removePreviewOutputSurface(); removePreviewOutputSurface();
// 2. Delete the input textures // 2. Delete the input textures
if (_inputTextureId != NO_TEXTURE) { if (_inputTexture != std::nullopt) {
glDeleteTextures(1, &_inputTextureId); glDeleteTextures(1, &_inputTexture->id);
_inputTextureId = NO_TEXTURE;
} }
// 4. Destroy all surfaces // 3. Destroy the OpenGL context
_previewOutput = nullptr;
_frameProcessorOutput = nullptr;
_recordingSessionOutput = nullptr;
// 5. Destroy the OpenGL context
_context = nullptr; _context = nullptr;
} }
void VideoPipeline::removeFrameProcessorOutputSurface() { void VideoPipeline::removeFrameProcessor() {
if (_frameProcessorOutput) _frameProcessorOutput->destroy(); _frameProcessor = nullptr;
_frameProcessorOutput = nullptr;
} }
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) { void VideoPipeline::setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor) {
// 1. Delete existing output surface _frameProcessor = jni::make_global(frameProcessor);
removeFrameProcessorOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
} }
void VideoPipeline::removeRecordingSessionOutputSurface() { void VideoPipeline::removeRecordingSessionOutputSurface() {
@ -73,6 +70,11 @@ void VideoPipeline::removePreviewOutputSurface() {
_previewOutput = nullptr; _previewOutput = nullptr;
} }
jni::local_ref<JFrame> VideoPipeline::createFrame() {
static const auto createFrameMethod = javaClassLocal()->getMethod<JFrame()>("createFrame");
return createFrameMethod(_javaPart);
}
void VideoPipeline::setPreviewOutputSurface(jobject surface) { void VideoPipeline::setPreviewOutputSurface(jobject surface) {
// 1. Delete existing output surface // 1. Delete existing output surface
removePreviewOutputSurface(); removePreviewOutputSurface();
@ -83,48 +85,119 @@ void VideoPipeline::setPreviewOutputSurface(jobject surface) {
} }
int VideoPipeline::getInputTextureId() { int VideoPipeline::getInputTextureId() {
if (_inputTextureId != NO_TEXTURE) return static_cast<int>(_inputTextureId); if (_inputTexture == std::nullopt) {
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
_inputTextureId = _context->createTexture(); }
return static_cast<int>(_inputTexture->id);
return static_cast<int>(_inputTextureId);
} }
void VideoPipeline::onBeforeFrame() { void VideoPipeline::onBeforeFrame() {
// 1. Activate the offscreen context
_context->use(); _context->use();
glBindTexture(GL_TEXTURE_EXTERNAL_OES, _inputTextureId); // 2. Prepare the external texture so the Camera can render into it
OpenGLTexture& texture = _inputTexture.value();
glBindTexture(texture.target, texture.id);
} }
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) { void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
// Get the OpenGL transform Matrix (transforms, scales, rotations) // 1. Activate the offscreen context
_context->use();
// 2. Get the OpenGL transform Matrix (transforms, scales, rotations)
float transformMatrix[16]; float transformMatrix[16];
transformMatrixParam->getRegion(0, 16, transformMatrix); transformMatrixParam->getRegion(0, 16, transformMatrix);
if (_previewOutput) { // 3. Prepare the texture we are going to render
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview.."); OpenGLTexture& texture = _inputTexture.value();
_previewOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
} // 4. Render to all outputs!
if (_frameProcessorOutput) { auto isSkiaFrameProcessor = _frameProcessor != nullptr && _frameProcessor->isInstanceOf(JSkiaFrameProcessor::javaClassStatic());
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor.."); if (isSkiaFrameProcessor) {
_frameProcessorOutput->renderTextureToSurface(_inputTextureId, transformMatrix); // 4.1. If we have a Skia Frame Processor, prepare to render to an offscreen surface using Skia
} jni::global_ref<JSkiaFrameProcessor::javaobject> skiaFrameProcessor = jni::static_ref_cast<JSkiaFrameProcessor::javaobject>(_frameProcessor);
if (_recordingSessionOutput) { SkiaRenderer& skiaRenderer = skiaFrameProcessor->cthis()->getSkiaRenderer();
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.."); auto drawCallback = [=](SkCanvas* canvas) {
_recordingSessionOutput->renderTextureToSurface(_inputTextureId, transformMatrix); // Create a JFrame instance (this uses queues/recycling)
auto frame = JFrame::create(texture.width,
texture.height,
texture.width * 4,
_context->getCurrentPresentationTime(),
"portrait",
false);
// Fill the Frame with the contents of the GL surface
_context->getPixelsOfTexture(texture,
&frame->cthis()->pixelsSize,
&frame->cthis()->pixels);
// Call the Frame processor with the Frame
frame->cthis()->incrementRefCount();
skiaFrameProcessor->cthis()->call(frame, canvas);
frame->cthis()->decrementRefCount();
};
// 4.2. Render to the offscreen surface using Skia
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using Skia..");
OpenGLTexture offscreenTexture = skiaRenderer.renderTextureToOffscreenSurface(*_context,
texture,
transformMatrix,
drawCallback);
// 4.3. Now render the result of the offscreen surface to all output surfaces!
if (_previewOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _previewOutput->getEGLSurface());
}
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _recordingSessionOutput->getEGLSurface());
}
} else {
// 4.1. If we have a Frame Processor, call it
if (_frameProcessor != nullptr) {
// Create a JFrame instance (this uses queues/recycling)
auto frame = JFrame::create(texture.width,
texture.height,
texture.width * 4,
_context->getCurrentPresentationTime(),
"portrait",
false);
// Fill the Frame with the contents of the GL surface
_context->getPixelsOfTexture(texture,
&frame->cthis()->pixelsSize,
&frame->cthis()->pixels);
// Call the Frame processor with the Frame
frame->cthis()->incrementRefCount();
_frameProcessor->cthis()->call(frame);
frame->cthis()->decrementRefCount();
}
// 4.2. Simply pass-through shader to render the texture to all output EGLSurfaces
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using pass-through OpenGL Shader..");
if (_previewOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
_previewOutput->renderTextureToSurface(texture, transformMatrix);
}
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
}
} }
} }
void VideoPipeline::registerNatives() { void VideoPipeline::registerNatives() {
registerHybrid({ registerHybrid({
makeNativeMethod("initHybrid", VideoPipeline::initHybrid), makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface), makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface), makeNativeMethod("setFrameProcessor", VideoPipeline::setFrameProcessor),
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface), makeNativeMethod("removeFrameProcessor", VideoPipeline::removeFrameProcessor),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("setPreviewOutputSurface", VideoPipeline::setPreviewOutputSurface), makeNativeMethod("setPreviewOutputSurface", VideoPipeline::setPreviewOutputSurface),
makeNativeMethod("removePreviewOutputSurface", VideoPipeline::removePreviewOutputSurface), makeNativeMethod("removePreviewOutputSurface", VideoPipeline::removePreviewOutputSurface),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId), makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame), makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame), makeNativeMethod("onFrame", VideoPipeline::onFrame),
}); });

View File

@ -8,14 +8,15 @@
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <EGL/egl.h> #include <EGL/egl.h>
#include <android/native_window.h> #include <android/native_window.h>
#include "PassThroughShader.h"
#include "OpenGLRenderer.h"
#include "OpenGLContext.h"
#include <memory> #include <memory>
namespace vision { #include "OpenGLRenderer.h"
#include "OpenGLContext.h"
#define NO_TEXTURE 0 #include "OpenGLTexture.h"
#include "JFrameProcessor.h"
namespace vision {
using namespace facebook; using namespace facebook;
@ -32,8 +33,8 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
int getInputTextureId(); int getInputTextureId();
// <- Frame Processor output // <- Frame Processor output
void setFrameProcessorOutputSurface(jobject surface); void setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor);
void removeFrameProcessorOutputSurface(); void removeFrameProcessor();
// <- MediaRecorder output // <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface); void setRecordingSessionOutputSurface(jobject surface);
@ -50,16 +51,20 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
private: private:
// Private constructor. Use `create(..)` to create new instances. // Private constructor. Use `create(..)` to create new instances.
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height); explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
// Creates a new Frame instance which should be filled with data.
jni::local_ref<JFrame> createFrame();
private: private:
// Input Surface Texture // Input Surface Texture
GLuint _inputTextureId = NO_TEXTURE; std::optional<OpenGLTexture> _inputTexture;
int _width = 0; int _width = 0;
int _height = 0; int _height = 0;
// (Optional) Frame Processor that processes frames before they go into output
jni::global_ref<JFrameProcessor::javaobject> _frameProcessor = nullptr;
// Output Contexts // Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr; std::shared_ptr<OpenGLContext> _context = nullptr;
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr; std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _previewOutput = nullptr; std::unique_ptr<OpenGLRenderer> _previewOutput = nullptr;

View File

@ -4,7 +4,7 @@
#include "JFrameProcessor.h" #include "JFrameProcessor.h"
#include "JVisionCameraProxy.h" #include "JVisionCameraProxy.h"
#include "VisionCameraProxy.h" #include "VisionCameraProxy.h"
#include "SkiaRenderer.h" #include "JSkiaFrameProcessor.h"
#include "VideoPipeline.h" #include "VideoPipeline.h"
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) { JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
@ -17,7 +17,7 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
vision::JFrameProcessor::registerNatives(); vision::JFrameProcessor::registerNatives();
#endif #endif
#if VISION_CAMERA_ENABLE_SKIA #if VISION_CAMERA_ENABLE_SKIA
vision::SkiaRenderer::registerNatives(); vision::JSkiaFrameProcessor::registerNatives();
#endif #endif
}); });
} }

View File

@ -31,7 +31,6 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
@ -55,7 +54,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
const jsi::Value* args, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
// Increment retain count by one. // Increment retain count by one.
this->frame->incrementRefCount(); this->frame->cthis()->incrementRefCount();
return jsi::Value::undefined(); return jsi::Value::undefined();
}; };
return jsi::Function::createFromHostFunction(runtime, return jsi::Function::createFromHostFunction(runtime,
@ -69,7 +68,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
const jsi::Value* args, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
// Decrement retain count by one. If the retain count is zero, the Frame gets closed. // Decrement retain count by one. If the retain count is zero, the Frame gets closed.
this->frame->decrementRefCount(); this->frame->cthis()->decrementRefCount();
return jsi::Value::undefined(); return jsi::Value::undefined();
}; };
return jsi::Function::createFromHostFunction(runtime, return jsi::Function::createFromHostFunction(runtime,
@ -85,8 +84,8 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
if (!this->frame) { if (!this->frame) {
return jsi::String::createFromUtf8(runtime, "[closed frame]"); return jsi::String::createFromUtf8(runtime, "[closed frame]");
} }
auto width = this->frame->getWidth(); auto width = this->frame->cthis()->getWidth();
auto height = this->frame->getHeight(); auto height = this->frame->cthis()->getHeight();
auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame"; auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame";
return jsi::String::createFromUtf8(runtime, str); return jsi::String::createFromUtf8(runtime, str);
}; };
@ -97,11 +96,8 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
const jsi::Value& thisArg, const jsi::Value& thisArg,
const jsi::Value* args, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
auto buffer = this->frame->toByteBuffer(); size_t size = frame->cthis()->pixelsSize;
if (!buffer->isDirect()) { uint8_t* pixels = frame->cthis()->pixels;
throw std::runtime_error("Failed to get byte content of Frame - array is not direct ByteBuffer!");
}
auto size = buffer->getDirectSize();
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache"; static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) { if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
@ -119,7 +115,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
// directly write to C++ JSI ArrayBuffer // directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime); auto destinationBuffer = arrayBuffer.data(runtime);
memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size); memcpy(destinationBuffer, pixels, sizeof(uint8_t) * size);
return arrayBuffer; return arrayBuffer;
}; };
@ -127,33 +123,30 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
} }
if (name == "isValid") { if (name == "isValid") {
return jsi::Value(this->frame && this->frame->getIsValid()); return jsi::Value(this->frame && this->frame->cthis()->getIsValid());
} }
if (name == "width") { if (name == "width") {
return jsi::Value(this->frame->getWidth()); return jsi::Value(this->frame->cthis()->getWidth());
} }
if (name == "height") { if (name == "height") {
return jsi::Value(this->frame->getHeight()); return jsi::Value(this->frame->cthis()->getHeight());
} }
if (name == "isMirrored") { if (name == "isMirrored") {
return jsi::Value(this->frame->getIsMirrored()); return jsi::Value(this->frame->cthis()->getIsMirrored());
} }
if (name == "orientation") { if (name == "orientation") {
auto string = this->frame->getOrientation(); auto string = this->frame->cthis()->getOrientation();
return jsi::String::createFromUtf8(runtime, string->toStdString()); return jsi::String::createFromUtf8(runtime, string->toStdString());
} }
if (name == "pixelFormat") { if (name == "pixelFormat") {
auto string = this->frame->getPixelFormat(); auto string = this->frame->cthis()->getPixelFormat();
return jsi::String::createFromUtf8(runtime, string->toStdString()); return jsi::String::createFromUtf8(runtime, string->toStdString());
} }
if (name == "timestamp") { if (name == "timestamp") {
return jsi::Value(static_cast<double>(this->frame->getTimestamp())); return jsi::Value(static_cast<double>(this->frame->cthis()->getTimestamp()));
} }
if (name == "bytesPerRow") { if (name == "bytesPerRow") {
return jsi::Value(this->frame->getBytesPerRow()); return jsi::Value(this->frame->cthis()->getBytesPerRow());
}
if (name == "planesCount") {
return jsi::Value(this->frame->getPlanesCount());
} }
// fallback to base implementation // fallback to base implementation

View File

@ -26,7 +26,7 @@ class JSI_EXPORT FrameHostObject : public jsi::HostObject {
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override; std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override;
public: public:
jni::global_ref<JFrame> frame; jni::global_ref<JFrame::javaobject> frame;
}; };
} // namespace vision } // namespace vision

View File

@ -111,10 +111,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
return jsi::String::createFromUtf8(runtime, object->toString()); return jsi::String::createFromUtf8(runtime, object->toString());
} else if (object->isInstanceOf(JList<jobject>::javaClassStatic())) { } else if (object->isInstanceOf(jni::JList<jobject>::javaClassStatic())) {
// List<E> // List<E>
auto arrayList = static_ref_cast<JList<jobject>>(object); auto arrayList = jni::static_ref_cast<jni::JList<jobject>>(object);
auto size = arrayList->size(); auto size = arrayList->size();
auto result = jsi::Array(runtime, size); auto result = jsi::Array(runtime, size);
@ -125,10 +125,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
} }
return result; return result;
} else if (object->isInstanceOf(JMap<jstring, jobject>::javaClassStatic())) { } else if (object->isInstanceOf(jni::JMap<jstring, jobject>::javaClassStatic())) {
// Map<K, V> // Map<K, V>
auto map = static_ref_cast<JMap<jstring, jobject>>(object); auto map = jni::static_ref_cast<jni::JMap<jstring, jobject>>(object);
auto result = jsi::Object(runtime); auto result = jsi::Object(runtime);
for (const auto& entry : *map) { for (const auto& entry : *map) {
@ -140,7 +140,7 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
return result; return result;
} else if (object->isInstanceOf(JFrame::javaClassStatic())) { } else if (object->isInstanceOf(JFrame::javaClassStatic())) {
// Frame // Frame
auto frame = static_ref_cast<JFrame>(object); auto frame = jni::static_ref_cast<JFrame::javaobject>(object);
// box into HostObject // box into HostObject
auto hostObject = std::make_shared<FrameHostObject>(frame); auto hostObject = std::make_shared<FrameHostObject>(frame);

View File

@ -11,71 +11,85 @@
namespace vision { namespace vision {
using namespace facebook; using namespace facebook;
using namespace jni;
int JFrame::getWidth() const { void JFrame::registerNatives() {
static const auto getWidthMethod = getClass()->getMethod<jint()>("getWidth"); registerHybrid({
return getWidthMethod(self()); makeNativeMethod("getWidth", JFrame::getWidth),
makeNativeMethod("getHeight", JFrame::getHeight),
makeNativeMethod("getBytesPerRow", JFrame::getBytesPerRow),
makeNativeMethod("getTimestamp", JFrame::getTimestamp),
makeNativeMethod("getOrientation", JFrame::getOrientation),
makeNativeMethod("getIsMirrored", JFrame::getIsMirrored),
makeNativeMethod("getPixelFormat", JFrame::getPixelFormat),
makeNativeMethod("getByteBuffer", JFrame::getByteBuffer),
makeNativeMethod("getIsValid", JFrame::getIsValid),
});
} }
int JFrame::getHeight() const { jni::local_ref<JFrame::javaobject> JFrame::create(int width,
static const auto getWidthMethod = getClass()->getMethod<jint()>("getHeight"); int height,
return getWidthMethod(self()); int bytesPerRow,
long timestamp,
const std::string& orientation,
bool isMirrored) {
return newObjectCxxArgs(width,
height,
bytesPerRow,
timestamp,
orientation,
isMirrored);
} }
bool JFrame::getIsValid() const { JFrame::JFrame(int width,
static const auto getIsValidMethod = getClass()->getMethod<jboolean()>("getIsValid"); int height,
return getIsValidMethod(self()); int bytesPerRow,
long timestamp,
const std::string& orientation,
bool isMirrored) {
_width = width;
_height = height;
_bytesPerRow = bytesPerRow;
_timestamp = timestamp;
_orientation = orientation;
_isMirrored = isMirrored;
_refCount = 0;
pixelsSize = height * bytesPerRow;
pixels = (uint8_t*) malloc(pixelsSize);
} }
bool JFrame::getIsMirrored() const { JFrame::~JFrame() noexcept {
static const auto getIsMirroredMethod = getClass()->getMethod<jboolean()>("getIsMirrored"); close();
return getIsMirroredMethod(self());
} }
jlong JFrame::getTimestamp() const { bool JFrame::getIsValid() {
static const auto getTimestampMethod = getClass()->getMethod<jlong()>("getTimestamp"); return _refCount > 0 && !_isClosed;
return getTimestampMethod(self());
} }
local_ref<JString> JFrame::getOrientation() const { jni::local_ref<jni::JByteBuffer> JFrame::getByteBuffer() {
static const auto getOrientationMethod = getClass()->getMethod<JString()>("getOrientation"); if (!getIsValid()) {
return getOrientationMethod(self()); [[unlikely]]
} throw std::runtime_error("Frame is no longer valid, cannot access getByteBuffer!");
}
local_ref<JString> JFrame::getPixelFormat() const { return jni::JByteBuffer::wrapBytes(pixels, pixelsSize);
static const auto getPixelFormatMethod = getClass()->getMethod<JString()>("getPixelFormat");
return getPixelFormatMethod(self());
}
int JFrame::getPlanesCount() const {
static const auto getPlanesCountMethod = getClass()->getMethod<jint()>("getPlanesCount");
return getPlanesCountMethod(self());
}
int JFrame::getBytesPerRow() const {
static const auto getBytesPerRowMethod = getClass()->getMethod<jint()>("getBytesPerRow");
return getBytesPerRowMethod(self());
}
local_ref<JByteBuffer> JFrame::toByteBuffer() const {
static const auto toByteBufferMethod = getClass()->getMethod<JByteBuffer()>("toByteBuffer");
return toByteBufferMethod(self());
} }
void JFrame::incrementRefCount() { void JFrame::incrementRefCount() {
static const auto incrementRefCountMethod = getClass()->getMethod<void()>("incrementRefCount"); std::unique_lock lock(_mutex);
incrementRefCountMethod(self()); _refCount++;
} }
void JFrame::decrementRefCount() { void JFrame::decrementRefCount() {
static const auto decrementRefCountMethod = getClass()->getMethod<void()>("decrementRefCount"); std::unique_lock lock(_mutex);
decrementRefCountMethod(self()); _refCount--;
if (_refCount <= 0) {
this->close();
}
} }
void JFrame::close() { void JFrame::close() {
static const auto closeMethod = getClass()->getMethod<void()>("close"); _isClosed = true;
closeMethod(self()); free(pixels);
pixels = nullptr;
} }
} // namespace vision } // namespace vision

View File

@ -7,29 +7,70 @@
#include <jni.h> #include <jni.h>
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <fbjni/ByteBuffer.h> #include <fbjni/ByteBuffer.h>
#include <android/hardware_buffer.h>
#include <android/hardware_buffer_jni.h>
#include <mutex>
namespace vision { namespace vision {
using namespace facebook; using namespace facebook;
using namespace jni;
struct JFrame : public JavaClass<JFrame> { class JFrame : public jni::HybridClass<JFrame> {
public:
static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;"; static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;";
static void registerNatives();
static jni::local_ref<JFrame::javaobject> create(int width,
int height,
int bytesPerRow,
long timestamp,
const std::string& orientation,
bool isMirrored);
~JFrame() noexcept;
protected:
friend HybridBase;
explicit JFrame(int width,
int height,
int bytesPerRow,
long timestamp,
const std::string& orientation,
bool isMirrored);
public: public:
int getWidth() const; int getWidth() { return _width; }
int getHeight() const; int getHeight() { return _height; }
bool getIsValid() const; int getBytesPerRow() { return _bytesPerRow; }
bool getIsMirrored() const; jlong getTimestamp() { return _timestamp; }
int getPlanesCount() const; jni::local_ref<jni::JString> getOrientation() { return jni::make_jstring(_orientation); }
int getBytesPerRow() const; bool getIsMirrored() { return _isMirrored; }
jlong getTimestamp() const;
local_ref<JString> getOrientation() const; // TODO: Can this be something other than RGB?
local_ref<JString> getPixelFormat() const; jni::local_ref<jni::JString> getPixelFormat() { return jni::make_jstring("rgb"); }
local_ref<JByteBuffer> toByteBuffer() const;
bool getIsValid();
jni::local_ref<jni::JByteBuffer> getByteBuffer();
void incrementRefCount(); void incrementRefCount();
void decrementRefCount(); void decrementRefCount();
void close(); void close();
// Backing byte data
uint8_t* pixels = nullptr;
size_t pixelsSize = 0;
private:
// Frame info
int _width = 0;
int _height = 0;
int _bytesPerRow = 0;
long _timestamp = 0;
std::string _orientation = {};
bool _isMirrored = false;
// Ref-counting
int _refCount = 0;
bool _isClosed = false;
std::mutex _mutex;
}; };
} // namespace vision } // namespace vision

View File

@ -17,9 +17,6 @@ using namespace facebook;
using namespace jni; using namespace jni;
void JFrameProcessor::registerNatives() { void JFrameProcessor::registerNatives() {
registerHybrid({
makeNativeMethod("call", JFrameProcessor::call)
});
} }
using TSelf = jni::local_ref<JFrameProcessor::javaobject>; using TSelf = jni::local_ref<JFrameProcessor::javaobject>;

View File

@ -21,7 +21,7 @@ namespace vision {
using namespace facebook; using namespace facebook;
struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> { class JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
public: public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;"; static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;";
static void registerNatives(); static void registerNatives();
@ -30,20 +30,25 @@ struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
public: public:
/** /**
* Call the JS Frame Processor. * Wrap the Frame in a HostObject and call the Frame Processor.
*/ */
void call(alias_ref<JFrame::javaobject> frame); void call(jni::alias_ref<JFrame::javaobject> frame);
private: protected:
// Private constructor. Use `create(..)` to create new instances. friend HybridBase;
// C++ only constructor. Use `create(..)` to create new instances.
explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet, explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet,
std::shared_ptr<RNWorklet::JsiWorkletContext> context); std::shared_ptr<RNWorklet::JsiWorkletContext> context);
JFrameProcessor(const JFrameProcessor &) = delete;
JFrameProcessor &operator=(const JFrameProcessor &) = delete;
private: protected:
/**
* Call the JS Frame Processor with the given Frame Host Object.
*/
void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const; void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const;
private: private:
friend HybridBase;
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker; std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext; std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
}; };

View File

@ -18,6 +18,10 @@
#include <react-native-worklets-core/WKTJsiWorkletContext.h> #include <react-native-worklets-core/WKTJsiWorkletContext.h>
#endif #endif
#if VISION_CAMERA_ENABLE_SKIA
#include "JSkiaFrameProcessor.h"
#endif
namespace vision { namespace vision {
using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>; using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>;
@ -31,6 +35,7 @@ JVisionCameraProxy::JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::
const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) { const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) {
_javaPart = make_global(javaThis); _javaPart = make_global(javaThis);
_runtime = runtime; _runtime = runtime;
_callInvoker = callInvoker;
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
__android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context..."); __android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context...");
@ -84,7 +89,7 @@ void JVisionCameraProxy::setFrameProcessor(int viewTag,
frameProcessor = JFrameProcessor::create(worklet, _workletContext); frameProcessor = JFrameProcessor::create(worklet, _workletContext);
} else if (frameProcessorType == "skia-frame-processor") { } else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA #if VISION_CAMERA_ENABLE_SKIA
throw std::runtime_error("system/skia-unavailable: Skia is not yet implemented on Android!"); frameProcessor = JSkiaFrameProcessor::create(worklet, _workletContext, _callInvoker);
#else #else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!"); throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif #endif

View File

@ -36,11 +36,13 @@ class JVisionCameraProxy : public jni::HybridClass<JVisionCameraProxy> {
jni::local_ref<JMap<jstring, jobject>> options); jni::local_ref<JMap<jstring, jobject>> options);
jsi::Runtime* getJSRuntime() { return _runtime; } jsi::Runtime* getJSRuntime() { return _runtime; }
std::shared_ptr<react::CallInvoker> getCallInvoker() { return _callInvoker; }
private: private:
friend HybridBase; friend HybridBase;
jni::global_ref<JVisionCameraProxy::javaobject> _javaPart; jni::global_ref<JVisionCameraProxy::javaobject> _javaPart;
jsi::Runtime* _runtime; jsi::Runtime* _runtime;
std::shared_ptr<react::CallInvoker> _callInvoker;
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext; std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
#endif #endif

View File

@ -0,0 +1,72 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#include "DrawableFrameHostObject.h"
#include <SkCanvas.h>
#include "FrameHostObject.h"
namespace vision {
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
auto result = FrameHostObject::getPropertyNames(rt);
// Skia - Render Frame
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (_canvas != nullptr) {
auto canvasPropNames = _canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (_canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
throw std::runtime_error("render() is not yet implemented!");
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "isDrawable") {
return jsi::Value(_canvas != nullptr);
}
if (_canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
auto result = _canvas->get(runtime, propName);
if (!result.isUndefined()) {
return result;
}
}
// fallback to base implementation
return FrameHostObject::get(runtime, propName);
}
void DrawableFrameHostObject::invalidateCanvas() {
_canvas = nullptr;
}
} // namespace vision

View File

@ -0,0 +1,33 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#pragma once
#include <jsi/jsi.h>
#include "FrameHostObject.h"
#include "JFrame.h"
#include <SkCanvas.h>
#include <JsiSkCanvas.h>
namespace vision {
using namespace facebook;
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
public:
explicit DrawableFrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas): FrameHostObject(frame), _canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
void invalidateCanvas();
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
};
} // namespace vision

View File

@ -0,0 +1,61 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
#include "JSkiaFrameProcessor.h"
#include <jni.h>
#include <fbjni/fbjni.h>
#include <utility>
#include "JFrame.h"
#include "DrawableFrameHostObject.h"
#include <RNSkPlatformContext.h>
#include "VisionCameraSkiaContext.h"
namespace vision {
using namespace facebook;
using namespace jni;
void JSkiaFrameProcessor::registerNatives() {
}
using TSelf = jni::local_ref<JSkiaFrameProcessor::javaobject>;
JSkiaFrameProcessor::JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker)
: JSkiaFrameProcessor::HybridBase(worklet, context) {
// TODO: Can I use the Android Platform Context from react-native-skia here?
auto skiaPlatformContext = std::make_shared<VisionCameraSkiaContext>(context->getJsRuntime(),
callInvoker,
1.0f);
_jsiCanvas = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
_skiaRenderer = std::make_shared<SkiaRenderer>();
}
TSelf JSkiaFrameProcessor::create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker) {
return JSkiaFrameProcessor::newObjectCxxArgs(worklet, context, callInvoker);
}
void JSkiaFrameProcessor::call(alias_ref<JFrame::javaobject> frame,
SkCanvas* canvas) {
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
_jsiCanvas->setCanvas(canvas);
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, _jsiCanvas);
// Call the base function in JFrameProcessor
callWithFrameHostObject(frameHostObject);
// Remove Skia Canvas from Host Object because it is no longer valid
frameHostObject->invalidateCanvas();
}
} // namespace vision
#endif

View File

@ -0,0 +1,59 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#pragma once
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
#include <string>
#include <memory>
#include <jni.h>
#include <fbjni/fbjni.h>
#include <react-native-worklets-core/WKTJsiWorklet.h>
#include <react-native-worklets-core/WKTJsiHostObject.h>
#include "JFrame.h"
#include "FrameHostObject.h"
#include "SkiaRenderer.h"
#include "JFrameProcessor.h"
#include <JsiSkCanvas.h>
#include <RNSkPlatformContext.h>
namespace vision {
using namespace facebook;
class JSkiaFrameProcessor : public jni::HybridClass<JSkiaFrameProcessor, JFrameProcessor> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/skia/SkiaFrameProcessor;";
static void registerNatives();
static jni::local_ref<JSkiaFrameProcessor::javaobject> create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker);
public:
/**
* Call the JS Frame Processor with the given valid Canvas to draw on.
*/
void call(jni::alias_ref<JFrame::javaobject> frame,
SkCanvas* canvas);
SkiaRenderer& getSkiaRenderer() { return *_skiaRenderer; }
protected:
friend HybridBase;
// Private constructor. Use `create(..)` to create new instances.
explicit JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker);
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _jsiCanvas;
std::shared_ptr<SkiaRenderer> _skiaRenderer;
};
} // namespace vision
#endif

View File

@ -8,6 +8,8 @@
#include <android/log.h> #include <android/log.h>
#include "OpenGLError.h" #include "OpenGLError.h"
#include <GLES2/gl2ext.h>
#include <core/SkColorSpace.h> #include <core/SkColorSpace.h>
#include <core/SkCanvas.h> #include <core/SkCanvas.h>
#include <core/SkYUVAPixmaps.h> #include <core/SkYUVAPixmaps.h>
@ -22,308 +24,209 @@
#include <android/surface_texture_jni.h> #include <android/surface_texture_jni.h>
// from <gpu/ganesh/gl/GrGLDefines.h> // from <gpu/ganesh/gl/GrGLDefines.h>
#define GR_GL_TEXTURE_EXTERNAL 0x8D65
#define GR_GL_RGBA8 0x8058 #define GR_GL_RGBA8 0x8058
#define ACTIVE_SURFACE_ID 0 #define DEFAULT_FBO 0
namespace vision { namespace vision {
jni::local_ref<SkiaRenderer::jhybriddata> SkiaRenderer::initHybrid(jni::alias_ref<jhybridobject> javaPart) {
return makeCxxInstance(javaPart);
}
SkiaRenderer::SkiaRenderer(const jni::alias_ref<jhybridobject>& javaPart) {
_javaPart = jni::make_global(javaPart);
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing SkiaRenderer...");
_previewSurface = nullptr;
_previewWidth = 0;
_previewHeight = 0;
_inputSurfaceTextureId = NO_INPUT_TEXTURE;
}
SkiaRenderer::~SkiaRenderer() { SkiaRenderer::~SkiaRenderer() {
if (_glDisplay != EGL_NO_DISPLAY) { _offscreenSurface = nullptr;
eglMakeCurrent(_glDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); _offscreenSurfaceTextureId = NO_TEXTURE;
if (_glSurface != EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface..."); // 3. Delete the Skia context
eglDestroySurface(_glDisplay, _glSurface);
_glSurface = EGL_NO_SURFACE;
}
if (_glContext != EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Context...");
eglDestroyContext(_glDisplay, _glContext);
_glContext = EGL_NO_CONTEXT;
}
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Display...");
eglTerminate(_glDisplay);
_glDisplay = EGL_NO_DISPLAY;
}
if (_skiaContext != nullptr) { if (_skiaContext != nullptr) {
_skiaContext->abandonContext(); _skiaContext->abandonContext();
_skiaContext = nullptr; _skiaContext = nullptr;
} }
destroyOutputSurface();
} }
void SkiaRenderer::ensureOpenGL(ANativeWindow* surface) { sk_sp<GrDirectContext> SkiaRenderer::getSkiaContext() {
bool successful; if (_skiaContext == nullptr) {
// EGLDisplay
if (_glDisplay == EGL_NO_DISPLAY) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLDisplay..");
_glDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (_glDisplay == EGL_NO_DISPLAY) throw OpenGLError("Failed to get default OpenGL Display!");
EGLint major;
EGLint minor;
successful = eglInitialize(_glDisplay, &major, &minor);
if (!successful) throw OpenGLError("Failed to initialize OpenGL!");
}
// EGLConfig
if (_glConfig == nullptr) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
EGLint attributes[] = {EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_ALPHA_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 0,
EGL_STENCIL_SIZE, 0,
EGL_NONE};
EGLint numConfigs;
successful = eglChooseConfig(_glDisplay, attributes, &_glConfig, 1, &numConfigs);
if (!successful || numConfigs == 0) throw OpenGLError("Failed to choose OpenGL config!");
}
// EGLContext
if (_glContext == EGL_NO_CONTEXT) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLContext..");
EGLint contextAttributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
_glContext = eglCreateContext(_glDisplay, _glConfig, nullptr, contextAttributes);
if (_glContext == EGL_NO_CONTEXT) throw OpenGLError("Failed to create OpenGL context!");
}
// EGLSurface
if (_glSurface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLSurface..");
_glSurface = eglCreateWindowSurface(_glDisplay, _glConfig, surface, nullptr);
_skiaContext = GrDirectContext::MakeGL(); _skiaContext = GrDirectContext::MakeGL();
} }
return _skiaContext;
successful = eglMakeCurrent(_glDisplay, _glSurface, _glSurface, _glContext);
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to use current OpenGL context!");
} }
void SkiaRenderer::setOutputSurface(jobject previewSurface) { sk_sp<SkImage> SkiaRenderer::wrapTextureAsImage(OpenGLTexture &texture) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Setting Output Surface.."); GrGLTextureInfo textureInfo {
destroyOutputSurface(); // OpenGL will automatically convert YUV -> RGB - if it's an EXTERNAL texture
.fTarget = texture.target,
_previewSurface = ANativeWindow_fromSurface(jni::Environment::current(), previewSurface); .fID = texture.id,
_glSurface = EGL_NO_SURFACE; .fFormat = GR_GL_RGBA8,
};
GrBackendTexture skiaTexture(texture.width,
texture.height,
GrMipMapped::kNo,
textureInfo);
sk_sp<SkImage> image = SkImages::BorrowTextureFrom(_skiaContext.get(),
skiaTexture,
kBottomLeft_GrSurfaceOrigin,
kN32_SkColorType,
kOpaque_SkAlphaType,
nullptr,
nullptr);
if (image == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to create Skia Image! Cannot wrap input texture (frame) using Skia.");
}
return image;
} }
void SkiaRenderer::destroyOutputSurface() { sk_sp<SkSurface> SkiaRenderer::wrapEglSurfaceAsSurface(EGLSurface eglSurface) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying Output Surface.."); GLint sampleCnt;
if (_glSurface != EGL_NO_SURFACE) { glGetIntegerv(GL_SAMPLES, &sampleCnt);
eglDestroySurface(_glDisplay, _glSurface); GLint stencilBits;
_glSurface = EGL_NO_SURFACE; glGetIntegerv(GL_STENCIL_BITS, &stencilBits);
if (_skiaContext != nullptr) { GrGLFramebufferInfo fboInfo {
_skiaContext->abandonContext(); // DEFAULT_FBO is FBO0, meaning the default on-screen FBO for that given surface
_skiaContext = nullptr; .fFBOID = DEFAULT_FBO,
.fFormat = GR_GL_RGBA8
};
EGLint width = 0, height = 0;
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_WIDTH, &width);
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_HEIGHT, &height);
GrBackendRenderTarget renderTarget(width,
height,
sampleCnt,
stencilBits,
fboInfo);
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
renderTarget,
kBottomLeft_GrSurfaceOrigin,
kN32_SkColorType,
nullptr,
&props,
nullptr,
nullptr);
if (surface == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to create Skia Surface! Cannot wrap EGLSurface/FrameBuffer using Skia.");
}
return surface;
}
sk_sp<SkSurface> SkiaRenderer::getOffscreenSurface(int width, int height) {
if (_offscreenSurface == nullptr || _offscreenSurface->width() != width || _offscreenSurface->height() != height) {
// 1. Get Skia Context
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
// 2. Create a backend texture (TEXTURE_2D + Frame Buffer)
GrBackendTexture backendTexture = skiaContext->createBackendTexture(width,
height,
SkColorType::kN32_SkColorType,
GrMipMapped::kNo,
GrRenderable::kYes);
// 3. Get it's Texture ID
GrGLTextureInfo info;
backendTexture.getGLTextureInfo(&info);
_offscreenSurfaceTextureId = info.fID;
struct ReleaseContext {
GrDirectContext* context;
GrBackendTexture texture;
};
auto releaseCtx = new ReleaseContext(
{skiaContext.get(), backendTexture});
SkSurfaces::TextureReleaseProc releaseProc = [] (void* address) {
// 5. Once done using, delete the backend OpenGL texture.
auto releaseCtx = reinterpret_cast<ReleaseContext*>(address);
releaseCtx->context->deleteBackendTexture(releaseCtx->texture);
};
// 4. Wrap the newly created texture as an SkSurface
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
_offscreenSurface = SkSurfaces::WrapBackendTexture(skiaContext.get(),
backendTexture,
kBottomLeft_GrSurfaceOrigin,
0,
SkColorType::kN32_SkColorType,
nullptr,
&props,
releaseProc,
releaseCtx);
if (_offscreenSurface == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to create offscreen Skia Surface!");
} }
} }
if (_previewSurface != nullptr) {
ANativeWindow_release(_previewSurface); return _offscreenSurface;
_previewSurface = nullptr;
}
} }
void SkiaRenderer::setOutputSurfaceSize(int width, int height) { OpenGLTexture SkiaRenderer::renderTextureToOffscreenSurface(OpenGLContext& glContext,
_previewWidth = width; OpenGLTexture& texture,
_previewHeight = height; float* transformMatrix,
} const DrawCallback& drawCallback) {
// 1. Activate the OpenGL context (eglMakeCurrent)
glContext.use();
void SkiaRenderer::setInputTextureSize(int width, int height) { // 2. Initialize Skia
_inputWidth = width; sk_sp<GrDirectContext> skiaContext = getSkiaContext();
_inputHeight = height;
}
void SkiaRenderer::renderLatestFrameToPreview() { // 3. Create the offscreen Skia Surface
__android_log_print(ANDROID_LOG_INFO, TAG, "renderLatestFrameToPreview()"); sk_sp<SkSurface> surface = getOffscreenSurface(texture.width, texture.height);
if (_previewSurface == nullptr) {
throw std::runtime_error("Cannot render latest frame to preview without a preview surface! " // 4. Wrap the input texture as an image so we can draw it to the surface
"renderLatestFrameToPreview() needs to be called after setPreviewSurface()."); sk_sp<SkImage> frame = wrapTextureAsImage(texture);
// 5. Prepare the Canvas
SkCanvas* canvas = _offscreenSurface->getCanvas();
if (canvas == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to get Skia Canvas!");
} }
return;
if (_inputSurfaceTextureId == NO_INPUT_TEXTURE) {
throw std::runtime_error("Cannot render latest frame to preview without an input texture! "
"renderLatestFrameToPreview() needs to be called after prepareInputTexture().");
}
ensureOpenGL(_previewSurface);
if (_skiaContext == nullptr) { // TODO: Apply Matrix. No idea how though.
_skiaContext = GrDirectContext::MakeGL(); SkM44 matrix = SkM44::ColMajor(transformMatrix);
}
_skiaContext->resetContext();
GrGLTextureInfo textureInfo {
// OpenGL will automatically convert YUV -> RGB because it's an EXTERNAL texture
.fTarget = GR_GL_TEXTURE_EXTERNAL,
.fID = _inputSurfaceTextureId,
.fFormat = GR_GL_RGBA8,
.fProtected = skgpu::Protected::kNo,
};
GrBackendTexture texture(_inputWidth,
_inputHeight,
GrMipMapped::kNo,
textureInfo);
sk_sp<SkImage> frame = SkImages::AdoptTextureFrom(_skiaContext.get(),
texture,
kTopLeft_GrSurfaceOrigin,
kN32_SkColorType,
kOpaque_SkAlphaType);
GrGLFramebufferInfo fboInfo {
// FBO #0 is the currently active OpenGL Surface (eglMakeCurrent)
.fFBOID = ACTIVE_SURFACE_ID,
.fFormat = GR_GL_RGBA8,
.fProtected = skgpu::Protected::kNo,
};;
GrBackendRenderTarget renderTarget(_previewWidth,
_previewHeight,
0,
8,
fboInfo);
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
renderTarget,
kTopLeft_GrSurfaceOrigin,
kN32_SkColorType,
nullptr,
&props);
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering %ix%i Frame to %ix%i Preview..", frame->width(), frame->height(), surface->width(), surface->height());
auto canvas = surface->getCanvas();
// 6. Render it!
canvas->clear(SkColors::kBlack); canvas->clear(SkColors::kBlack);
auto duration = std::chrono::system_clock::now().time_since_epoch();
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
canvas->drawImage(frame, 0, 0); canvas->drawImage(frame, 0, 0);
// TODO: Run Skia Frame Processor drawCallback(canvas);
auto rect = SkRect::MakeXYWH(150, 250, millis % 3000 / 10, millis % 3000 / 10);
auto paint = SkPaint();
paint.setColor(SkColors::kRed);
canvas->drawRect(rect, paint);
// Flush // 8. Flush all Skia operations to OpenGL
canvas->flush(); _offscreenSurface->flushAndSubmit();
bool successful = eglSwapBuffers(_glDisplay, _glSurface); return OpenGLTexture {
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to swap OpenGL buffers!"); .id = _offscreenSurfaceTextureId,
.target = GL_TEXTURE_2D,
.width = texture.width,
.height = texture.height,
};
} }
void SkiaRenderer::renderTextureToSurface(OpenGLContext &glContext, OpenGLTexture &texture, EGLSurface surface) {
// 1. Activate the OpenGL context (eglMakeCurrent)
glContext.use(surface);
void SkiaRenderer::renderCameraFrameToOffscreenCanvas(jni::JByteBuffer yBuffer, // 2. Initialize Skia
jni::JByteBuffer uBuffer, sk_sp<GrDirectContext> skiaContext = getSkiaContext();
jni::JByteBuffer vBuffer) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Begin render..."); // 3. Wrap the output EGLSurface in a Skia SkSurface
ensureOpenGL(_previewSurface); sk_sp<SkSurface> skSurface = wrapEglSurfaceAsSurface(surface);
if (_skiaContext == nullptr) {
_skiaContext = GrDirectContext::MakeGL(); // 4. Wrap the input texture in a Skia SkImage
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
// 5. Prepare the Canvas!
SkCanvas* canvas = skSurface->getCanvas();
if (canvas == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to get Skia Canvas!");
} }
_skiaContext->resetContext();
// See https://en.wikipedia.org/wiki/Chroma_subsampling - we're in 4:2:0
size_t bytesPerRow = sizeof(uint8_t) * _inputWidth;
SkImageInfo yInfo = SkImageInfo::MakeA8(_inputWidth, _inputHeight);
SkPixmap yPixmap(yInfo, yBuffer.getDirectAddress(), bytesPerRow);
SkImageInfo uInfo = SkImageInfo::MakeA8(_inputWidth / 2, _inputHeight / 2);
SkPixmap uPixmap(uInfo, uBuffer.getDirectAddress(), bytesPerRow / 2);
SkImageInfo vInfo = SkImageInfo::MakeA8(_inputWidth / 2, _inputHeight / 2);
SkPixmap vPixmap(vInfo, vBuffer.getDirectAddress(), bytesPerRow / 2);
SkYUVAInfo info(SkISize::Make(_inputWidth, _inputHeight),
SkYUVAInfo::PlaneConfig::kY_U_V,
SkYUVAInfo::Subsampling::k420,
SkYUVColorSpace::kRec709_Limited_SkYUVColorSpace);
SkPixmap externalPixmaps[3] = { yPixmap, uPixmap, vPixmap };
SkYUVAPixmaps pixmaps = SkYUVAPixmaps::FromExternalPixmaps(info, externalPixmaps);
sk_sp<SkImage> image = SkImages::TextureFromYUVAPixmaps(_skiaContext.get(), pixmaps);
GrGLFramebufferInfo fboInfo {
// FBO #0 is the currently active OpenGL Surface (eglMakeCurrent)
.fFBOID = ACTIVE_SURFACE_ID,
.fFormat = GR_GL_RGBA8,
.fProtected = skgpu::Protected::kNo,
};;
GrBackendRenderTarget renderTarget(_previewWidth,
_previewHeight,
0,
8,
fboInfo);
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
renderTarget,
kTopLeft_GrSurfaceOrigin,
kN32_SkColorType,
nullptr,
&props);
auto canvas = surface->getCanvas();
// 6. Render it!
canvas->clear(SkColors::kBlack); canvas->clear(SkColors::kBlack);
canvas->drawImage(frame, 0, 0);
auto duration = std::chrono::system_clock::now().time_since_epoch(); // 7. Flush all Skia operations to OpenGL
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count(); skSurface->flushAndSubmit();
canvas->drawImage(image, 0, 0); // 8. Swap the buffers so the onscreen surface gets updated.
glContext.flush();
// TODO: Run Skia Frame Processor
auto rect = SkRect::MakeXYWH(150, 250, millis % 3000 / 10, millis % 3000 / 10);
auto paint = SkPaint();
paint.setColor(SkColors::kRed);
canvas->drawRect(rect, paint);
// Flush
canvas->flush();
bool successful = eglSwapBuffers(_glDisplay, _glSurface);
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to swap OpenGL buffers!");
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendered!");
}
void SkiaRenderer::registerNatives() {
registerHybrid({
makeNativeMethod("initHybrid", SkiaRenderer::initHybrid),
makeNativeMethod("setInputTextureSize", SkiaRenderer::setInputTextureSize),
makeNativeMethod("setOutputSurface", SkiaRenderer::setOutputSurface),
makeNativeMethod("destroyOutputSurface", SkiaRenderer::destroyOutputSurface),
makeNativeMethod("setOutputSurfaceSize", SkiaRenderer::setOutputSurfaceSize),
makeNativeMethod("renderLatestFrameToPreview", SkiaRenderer::renderLatestFrameToPreview),
makeNativeMethod("renderCameraFrameToOffscreenCanvas", SkiaRenderer::renderCameraFrameToOffscreenCanvas),
});
} }
} // namespace vision } // namespace vision

View File

@ -6,72 +6,63 @@
#if VISION_CAMERA_ENABLE_SKIA #if VISION_CAMERA_ENABLE_SKIA
#include <jni.h>
#include <fbjni/fbjni.h>
#include <fbjni/ByteBuffer.h>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
#include <EGL/egl.h> #include <EGL/egl.h>
#include <include/core/SkSurface.h>
#include <android/native_window.h> #include <android/native_window.h>
#include <include/core/SkSurface.h>
#include <include/gpu/GrDirectContext.h>
#include "OpenGLContext.h"
#include "OpenGLTexture.h"
namespace vision { namespace vision {
using namespace facebook; #define NO_TEXTURE 0
#define NO_INPUT_TEXTURE 7654321 using DrawCallback = std::function<void(SkCanvas*)>;
class SkiaRenderer: public jni::HybridClass<SkiaRenderer> { class SkiaRenderer {
// JNI Stuff
public: public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/skia/SkiaRenderer;"; /**
static void registerNatives(); * Create a new Skia renderer. You need to use OpenGL outside of this context to make sure the
* Skia renderer can use the global OpenGL context.
private: */
friend HybridBase; explicit SkiaRenderer() {};
jni::global_ref<SkiaRenderer::javaobject> _javaPart;
explicit SkiaRenderer(const jni::alias_ref<jhybridobject>& javaPart);
public:
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> javaPart);
~SkiaRenderer(); ~SkiaRenderer();
private: /**
// Input Texture (Camera) * Renders the given Texture (might be a Camera Frame) to a cached offscreen Texture using Skia.
void setInputTextureSize(int width, int height); *
// Output Surface (Preview) * @returns The texture that was rendered to.
void setOutputSurface(jobject previewSurface); */
void destroyOutputSurface(); OpenGLTexture renderTextureToOffscreenSurface(OpenGLContext& glContext,
void setOutputSurfaceSize(int width, int height); OpenGLTexture& texture,
float* transformMatrix,
const DrawCallback& drawCallback);
/** /**
* Renders the latest Camera Frame from the Input Texture onto the Preview Surface. (60 FPS) * Renders the given texture to the target output surface using Skia.
*/ */
void renderLatestFrameToPreview(); void renderTextureToSurface(OpenGLContext& glContext,
/** OpenGLTexture& texture,
* Renders the latest Camera Frame into it's Input Texture and run the Skia Frame Processor (1..240 FPS) EGLSurface surface);
*/
void renderCameraFrameToOffscreenCanvas(jni::JByteBuffer yBuffer, private:
jni::JByteBuffer uBuffer, // Gets or creates the Skia context.
jni::JByteBuffer vBuffer); sk_sp<GrDirectContext> getSkiaContext();
// Wraps a Texture as an SkImage allowing you to draw it
sk_sp<SkImage> wrapTextureAsImage(OpenGLTexture& texture);
// Wraps an EGLSurface as an SkSurface allowing you to draw into it
sk_sp<SkSurface> wrapEglSurfaceAsSurface(EGLSurface eglSurface);
// Gets or creates an off-screen surface that you can draw into
sk_sp<SkSurface> getOffscreenSurface(int width, int height);
private: private:
// OpenGL Context
EGLContext _glContext = EGL_NO_CONTEXT;
EGLDisplay _glDisplay = EGL_NO_DISPLAY;
EGLSurface _glSurface = EGL_NO_SURFACE;
EGLConfig _glConfig = nullptr;
// Skia Context // Skia Context
sk_sp<GrDirectContext> _skiaContext; sk_sp<GrDirectContext> _skiaContext = nullptr;
sk_sp<SkSurface> _offscreenSurface = nullptr;
// Input Texture (Camera/Offscreen) GLuint _offscreenSurfaceTextureId = NO_TEXTURE;
GLuint _inputSurfaceTextureId = NO_INPUT_TEXTURE;
int _inputWidth, _inputHeight;
// Output Texture (Surface/Preview)
ANativeWindow* _previewSurface;
int _previewWidth, _previewHeight;
void ensureOpenGL(ANativeWindow* surface);
static auto constexpr TAG = "SkiaRenderer"; static auto constexpr TAG = "SkiaRenderer";
}; };

View File

@ -0,0 +1,8 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#include "VisionCameraSkiaContext.h"
namespace vision {
} // vision

View File

@ -0,0 +1,52 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#pragma once
#include <jsi/jsi.h>
#include <RNSkPlatformContext.h>
namespace vision {
using namespace facebook;
class VisionCameraSkiaContext: public RNSkia::RNSkPlatformContext {
public:
VisionCameraSkiaContext(jsi::Runtime* runtime,
std::shared_ptr<react::CallInvoker> callInvoker,
float pixelDensity)
: RNSkia::RNSkPlatformContext(runtime, callInvoker, pixelDensity) { }
void raiseError(const std::exception &err) override {
throw std::runtime_error("VisionCameraSkiaContext Error: " + std::string(err.what()));
}
void performStreamOperation(
const std::string &sourceUri,
const std::function<void(std::unique_ptr<SkStreamAsset>)> &op) override {
throw std::runtime_error("VisionCameraSkiaContext::performStreamOperation is not yet implemented!");
}
sk_sp<SkSurface> makeOffscreenSurface(int width, int height) override {
throw std::runtime_error("VisionCameraSkiaContext::makeOffscreenSurface is not yet implemented!");
}
void runOnMainThread(std::function<void()> task) override {
throw std::runtime_error("VisionCameraSkiaContext::runOnMainThread is not yet implemented!");
}
sk_sp<SkImage> takeScreenshotFromViewTag(size_t tag) override {
throw std::runtime_error("VisionCameraSkiaContext::takeScreenshotFromViewTag is not yet implemented!");
}
void startDrawLoop() override {
throw std::runtime_error("VisionCameraSkiaContext::startDrawLoop is not yet implemented!");
}
void stopDrawLoop() override {
throw std::runtime_error("VisionCameraSkiaContext::stopDrawLoop is not yet implemented!");
}
};
} // namespace vision

View File

@ -200,8 +200,8 @@ class CameraSession(private val context: Context,
private fun updateVideoOutputs() { private fun updateVideoOutputs() {
val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return
val previewOutput = outputs?.previewOutput val previewOutput = outputs?.previewOutput
videoPipeline.setRecordingSessionOutput(this.recording) videoPipeline.setRecordingSessionOutput(recording)
videoPipeline.setFrameProcessorOutput(this.frameProcessor) videoPipeline.setFrameProcessorOutput(frameProcessor)
videoPipeline.setPreviewOutput(previewOutput?.surface) videoPipeline.setPreviewOutput(previewOutput?.surface)
} }

View File

@ -19,16 +19,12 @@ import com.mrousavy.camera.extensions.installHierarchyFitter
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.PixelFormat import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.PreviewType
import com.mrousavy.camera.parsers.Torch import com.mrousavy.camera.parsers.Torch
import com.mrousavy.camera.parsers.VideoStabilizationMode import com.mrousavy.camera.parsers.VideoStabilizationMode
import com.mrousavy.camera.skia.SkiaPreviewView
import com.mrousavy.camera.skia.SkiaRenderer
import com.mrousavy.camera.utils.outputs.CameraOutputs import com.mrousavy.camera.utils.outputs.CameraOutputs
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import java.io.Closeable
// //
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity. // TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
@ -52,7 +48,7 @@ class CameraView(context: Context) : FrameLayout(context) {
companion object { companion object {
const val TAG = "CameraView" const val TAG = "CameraView"
private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId", "previewType") private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId")
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat") private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat")
private val propsThatRequireFormatReconfiguration = arrayListOf("fps", "hdr", "videoStabilizationMode", "lowLightBoost") private val propsThatRequireFormatReconfiguration = arrayListOf("fps", "hdr", "videoStabilizationMode", "lowLightBoost")
} }
@ -75,7 +71,6 @@ class CameraView(context: Context) : FrameLayout(context) {
var videoStabilizationMode: VideoStabilizationMode? = null var videoStabilizationMode: VideoStabilizationMode? = null
var hdr: Boolean? = null // nullable bool var hdr: Boolean? = null // nullable bool
var lowLightBoost: Boolean? = null // nullable bool var lowLightBoost: Boolean? = null // nullable bool
var previewType: PreviewType = PreviewType.NONE
// other props // other props
var isActive = false var isActive = false
var torch: Torch = Torch.OFF var torch: Torch = Torch.OFF
@ -92,11 +87,10 @@ class CameraView(context: Context) : FrameLayout(context) {
private var previewView: View? = null private var previewView: View? = null
private var previewSurface: Surface? = null private var previewSurface: Surface? = null
private var skiaRenderer: SkiaRenderer? = null
internal var frameProcessor: FrameProcessor? = null internal var frameProcessor: FrameProcessor? = null
set(value) { set(value) {
field = value field = value
cameraSession.frameProcessor = frameProcessor cameraSession.frameProcessor = value
} }
private val inputOrientation: Orientation private val inputOrientation: Orientation
@ -130,34 +124,17 @@ class CameraView(context: Context) : FrameLayout(context) {
} }
private fun setupPreviewView() { private fun setupPreviewView() {
this.previewView?.let { previewView -> removeView(previewView)
removeView(previewView)
if (previewView is Closeable) previewView.close()
}
this.previewSurface = null this.previewSurface = null
when (previewType) { val cameraId = cameraId ?: return
PreviewType.NONE -> { val previewView = NativePreviewView(context, cameraManager, cameraId) { surface ->
// Do nothing. previewSurface = surface
} configureSession()
PreviewType.NATIVE -> {
val cameraId = cameraId ?: throw NoCameraDeviceError()
this.previewView = NativePreviewView(context, cameraManager, cameraId) { surface ->
previewSurface = surface
configureSession()
}
}
PreviewType.SKIA -> {
if (skiaRenderer == null) skiaRenderer = SkiaRenderer()
this.previewView = SkiaPreviewView(context, skiaRenderer!!)
configureSession()
}
}
this.previewView?.let { previewView ->
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
addView(previewView)
} }
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
addView(previewView)
this.previewView = previewView
} }
fun update(changedProps: ArrayList<String>) { fun update(changedProps: ArrayList<String>) {
@ -218,8 +195,6 @@ class CameraView(context: Context) : FrameLayout(context) {
// TODO: Allow previewSurface to be null/none // TODO: Allow previewSurface to be null/none
val previewSurface = previewSurface ?: return val previewSurface = previewSurface ?: return
if (targetVideoSize != null) skiaRenderer?.setInputSurfaceSize(targetVideoSize.width, targetVideoSize.height)
val previewOutput = CameraOutputs.PreviewOutput(previewSurface) val previewOutput = CameraOutputs.PreviewOutput(previewSurface)
val photoOutput = if (photo == true) { val photoOutput = if (photo == true) {
CameraOutputs.PhotoOutput(targetPhotoSize) CameraOutputs.PhotoOutput(targetPhotoSize)

View File

@ -7,7 +7,6 @@ import com.facebook.react.uimanager.ViewGroupManager
import com.facebook.react.uimanager.annotations.ReactProp import com.facebook.react.uimanager.annotations.ReactProp
import com.mrousavy.camera.parsers.Orientation import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.PixelFormat import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.PreviewType
import com.mrousavy.camera.parsers.Torch import com.mrousavy.camera.parsers.Torch
import com.mrousavy.camera.parsers.VideoStabilizationMode import com.mrousavy.camera.parsers.VideoStabilizationMode
@ -102,14 +101,6 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
view.videoStabilizationMode = newMode view.videoStabilizationMode = newMode
} }
@ReactProp(name = "previewType")
fun setPreviewType(view: CameraView, previewType: String) {
val newMode = PreviewType.fromUnionValue(previewType)
if (view.previewType != newMode)
addChangedPropToTransaction(view, "previewType")
view.previewType = newMode
}
@ReactProp(name = "enableHighQualityPhotos") @ReactProp(name = "enableHighQualityPhotos")
fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) { fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) {
if (view.enableHighQualityPhotos != enableHighQualityPhotos) if (view.enableHighQualityPhotos != enableHighQualityPhotos)

View File

@ -1,147 +1,66 @@
package com.mrousavy.camera.frameprocessor; package com.mrousavy.camera.frameprocessor;
import android.graphics.ImageFormat; import com.facebook.jni.HybridData;
import android.media.Image;
import com.facebook.proguard.annotations.DoNotStrip;
import com.mrousavy.camera.parsers.PixelFormat;
import com.mrousavy.camera.parsers.Orientation;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** @noinspection JavaJniMissingFunction*/
public class Frame { public class Frame {
private final Image image; private final HybridData mHybridData;
private final boolean isMirrored;
private final long timestamp;
private final Orientation orientation;
private int refCount = 0;
public Frame(Image image, long timestamp, Orientation orientation, boolean isMirrored) { private Frame(HybridData hybridData) {
this.image = image; mHybridData = hybridData;
this.timestamp = timestamp;
this.orientation = orientation;
this.isMirrored = isMirrored;
} }
public Image getImage() { @Override
return image; protected void finalize() throws Throwable {
super.finalize();
mHybridData.resetNative();
} }
@SuppressWarnings("unused") /**
@DoNotStrip * Get the width of the Frame, in it's sensor orientation. (in pixels)
public int getWidth() { */
return image.getWidth(); public native int getWidth();
} /**
* Get the height of the Frame, in it's sensor orientation. (in pixels)
*/
public native int getHeight();
/**
* Get the number of bytes per row.
* * To get the number of components per pixel you can divide this with the Frame's width.
* * To get the total size of the byte buffer you can multiply this with the Frame's height.
*/
public native int getBytesPerRow();
/**
* Get the local timestamp of this Frame. This is always monotonically increasing for each Frame.
*/
public native long getTimestamp();
/**
* Get the Orientation of this Frame. The return value is the result of `Orientation.toUnionValue()`.
*/
public native String getOrientation();
/**
* Return whether this Frame is mirrored or not. Frames from the front-facing Camera are often mirrored.
*/
public native boolean getIsMirrored();
/**
* Get the pixel-format of this Frame. The return value is the result of `PixelFormat.toUnionValue()`.
*/
public native String getPixelFormat();
/**
* Get the actual backing pixel data of this Frame using a zero-copy C++ ByteBuffer.
*/
public native ByteBuffer getByteBuffer();
/**
* Get whether this Frame is still valid.
* A Frame is valid as long as it hasn't been closed by the Frame Processor Runtime Manager
* (either because it ran out of Frames in it's queue and needs to close old ones, or because
* a Frame Processor finished executing and you're still trying to hold onto this Frame in native)
*/
public native boolean getIsValid();
@SuppressWarnings("unused") private native void incrementRefCount();
@DoNotStrip private native void decrementRefCount();
public int getHeight() { private native void close();
return image.getHeight();
}
@SuppressWarnings("unused")
@DoNotStrip
public boolean getIsValid() {
try {
// will throw an exception if the image is already closed
image.getCropRect();
// no exception thrown, image must still be valid.
return true;
} catch (Exception e) {
// exception thrown, image has already been closed.
return false;
}
}
@SuppressWarnings("unused")
@DoNotStrip
public boolean getIsMirrored() {
return isMirrored;
}
@SuppressWarnings("unused")
@DoNotStrip
public long getTimestamp() {
return timestamp;
}
@SuppressWarnings("unused")
@DoNotStrip
public String getOrientation() {
return orientation.getUnionValue();
}
@SuppressWarnings("unused")
@DoNotStrip
public String getPixelFormat() {
PixelFormat format = PixelFormat.Companion.fromImageFormat(image.getFormat());
return format.getUnionValue();
}
@SuppressWarnings("unused")
@DoNotStrip
public int getPlanesCount() {
return image.getPlanes().length;
}
@SuppressWarnings("unused")
@DoNotStrip
public int getBytesPerRow() {
return image.getPlanes()[0].getRowStride();
}
private static ByteBuffer byteArrayCache;
@SuppressWarnings("unused")
@DoNotStrip
public ByteBuffer toByteBuffer() {
switch (image.getFormat()) {
case ImageFormat.YUV_420_888:
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
int totalSize = ySize + uSize + vSize;
if (byteArrayCache != null) byteArrayCache.rewind();
if (byteArrayCache == null || byteArrayCache.remaining() != totalSize) {
byteArrayCache = ByteBuffer.allocateDirect(totalSize);
}
byteArrayCache.put(yBuffer).put(uBuffer).put(vBuffer);
return byteArrayCache;
case ImageFormat.JPEG:
return image.getPlanes()[0].getBuffer();
default:
throw new RuntimeException("Cannot convert Frame with Format " + image.getFormat() + " to byte array!");
}
}
@SuppressWarnings("unused")
@DoNotStrip
public void incrementRefCount() {
synchronized (this) {
refCount++;
}
}
@SuppressWarnings("unused")
@DoNotStrip
public void decrementRefCount() {
synchronized (this) {
refCount--;
if (refCount <= 0) {
// If no reference is held on this Image, close it.
image.close();
}
}
}
@SuppressWarnings("unused")
@DoNotStrip
private void close() {
image.close();
}
} }

View File

@ -8,15 +8,9 @@ import com.facebook.jni.HybridData;
import com.facebook.proguard.annotations.DoNotStrip; import com.facebook.proguard.annotations.DoNotStrip;
/** /**
* Represents a JS Frame Processor * Represents a JS Frame Processor. It's actual implementation is in NDK/C++.
*/ */
@SuppressWarnings("JavaJniMissingFunction") // we're using fbjni. public class FrameProcessor {
public final class FrameProcessor {
/**
* Call the JS Frame Processor function with the given Frame
*/
public native void call(Frame frame);
@DoNotStrip @DoNotStrip
@Keep @Keep
private final HybridData mHybridData; private final HybridData mHybridData;

View File

@ -0,0 +1,11 @@
package com.mrousavy.camera.skia;
import com.facebook.jni.HybridData;
import com.mrousavy.camera.frameprocessor.FrameProcessor;
public class SkiaFrameProcessor extends FrameProcessor {
// Implementation is in JSkiaFrameProcessor.cpp
public SkiaFrameProcessor(HybridData hybridData) {
super(hybridData);
}
}

View File

@ -1,75 +0,0 @@
package com.mrousavy.camera.skia
import android.annotation.SuppressLint
import android.content.Context
import android.util.Log
import android.view.Choreographer
import android.view.SurfaceHolder
import android.view.SurfaceView
import com.mrousavy.camera.extensions.postAndWait
@SuppressLint("ViewConstructor")
class SkiaPreviewView(context: Context,
private val skiaRenderer: SkiaRenderer): SurfaceView(context), SurfaceHolder.Callback {
companion object {
private const val TAG = "SkiaPreviewView"
}
private var isAlive = true
init {
holder.addCallback(this)
}
private fun startLooping(choreographer: Choreographer) {
choreographer.postFrameCallback {
synchronized(this) {
if (!isAlive) return@synchronized
Log.i(TAG, "tick..")
// Refresh UI (60 FPS)
skiaRenderer.onPreviewFrame()
startLooping(choreographer)
}
}
}
override fun surfaceCreated(holder: SurfaceHolder) {
synchronized(this) {
Log.i(TAG, "onSurfaceCreated(..)")
skiaRenderer.thread.postAndWait {
// Create C++ part (OpenGL/Skia context)
skiaRenderer.setPreviewSurface(holder.surface)
isAlive = true
// Start updating the Preview View (~60 FPS)
startLooping(Choreographer.getInstance())
}
}
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, w: Int, h: Int) {
synchronized(this) {
Log.i(TAG, "surfaceChanged($w, $h)")
skiaRenderer.thread.postAndWait {
// Update C++ OpenGL Surface size
skiaRenderer.setPreviewSurfaceSize(w, h)
}
}
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
synchronized(this) {
isAlive = false
Log.i(TAG, "surfaceDestroyed(..)")
skiaRenderer.thread.postAndWait {
// Clean up C++ part (OpenGL/Skia context)
skiaRenderer.destroyPreviewSurface()
}
}
}
}

View File

@ -1,98 +0,0 @@
package com.mrousavy.camera.skia
import android.graphics.ImageFormat
import android.view.Surface
import com.facebook.jni.HybridData
import com.facebook.proguard.annotations.DoNotStrip
import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.frameprocessor.Frame
import java.io.Closeable
import java.nio.ByteBuffer
@Suppress("KotlinJniMissingFunction")
class SkiaRenderer: Closeable {
@DoNotStrip
private var mHybridData: HybridData
private var hasNewFrame = false
private var hasOutputSurface = false
val thread = CameraQueues.previewQueue.handler
init {
mHybridData = initHybrid()
}
override fun close() {
hasNewFrame = false
thread.post {
synchronized(this) {
destroyOutputSurface()
mHybridData.resetNative()
}
}
}
fun setPreviewSurface(surface: Surface) {
synchronized(this) {
setOutputSurface(surface)
hasOutputSurface = true
}
}
fun setPreviewSurfaceSize(width: Int, height: Int) {
synchronized(this) {
setOutputSurfaceSize(width, height)
}
}
fun destroyPreviewSurface() {
synchronized(this) {
destroyOutputSurface()
hasOutputSurface = false
}
}
fun setInputSurfaceSize(width: Int, height: Int) {
synchronized(this) {
setInputTextureSize(width, height)
}
}
/**
* Called on every Camera Frame (1..240 FPS)
*/
fun onCameraFrame(frame: Frame) {
synchronized(this) {
if (!hasOutputSurface) return
if (frame.image.format != ImageFormat.YUV_420_888) {
throw Error("Failed to render Camera Frame! Expected Image format #${ImageFormat.YUV_420_888} (ImageFormat.YUV_420_888), received #${frame.image.format}.")
}
val (y, u, v) = frame.image.planes
renderCameraFrameToOffscreenCanvas(y.buffer, u.buffer, v.buffer)
hasNewFrame = true
}
}
/**
* Called on every UI Frame (60 FPS)
*/
fun onPreviewFrame() {
synchronized(this) {
if (!hasOutputSurface) return
if (!hasNewFrame) return
renderLatestFrameToPreview()
hasNewFrame = false
}
}
private external fun initHybrid(): HybridData
private external fun renderCameraFrameToOffscreenCanvas(yBuffer: ByteBuffer,
uBuffer: ByteBuffer,
vBuffer: ByteBuffer)
private external fun renderLatestFrameToPreview()
private external fun setInputTextureSize(width: Int, height: Int)
private external fun setOutputSurface(surface: Any)
private external fun setOutputSurfaceSize(width: Int, height: Int)
private external fun destroyOutputSurface()
}

View File

@ -0,0 +1,32 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.media.Image
import android.media.ImageReader
import android.media.ImageWriter
import java.io.Closeable
class ImageCreator(private val width: Int,
private val height: Int,
private val format: Int = ImageFormat.PRIVATE,
private val maxImages: Int = 3): Closeable {
private var imageReader: ImageReader? = null
private var imageWriter: ImageWriter? = null
override fun close() {
imageWriter?.close()
imageReader?.close()
}
fun createImage(): Image {
if (imageReader == null || imageWriter == null) {
imageWriter?.close()
imageReader?.close()
imageReader = ImageReader.newInstance(width, height, format, maxImages)
imageWriter = ImageWriter.newInstance(imageReader!!.surface, maxImages)
}
return imageWriter!!.dequeueInputImage()
}
}

View File

@ -41,7 +41,6 @@ class RecordingSession(context: Context,
val surface: Surface = MediaCodec.createPersistentInputSurface() val surface: Surface = MediaCodec.createPersistentInputSurface()
init { init {
outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir) outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir)
Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}") Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}")
@ -54,7 +53,7 @@ class RecordingSession(context: Context,
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4) recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
recorder.setOutputFile(outputFile.absolutePath) recorder.setOutputFile(outputFile.absolutePath)
recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE) recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE)
recorder.setVideoSize(size.height, size.width) recorder.setVideoSize(size.width, size.height)
if (fps != null) recorder.setVideoFrameRate(fps) if (fps != null) recorder.setVideoFrameRate(fps)
Log.i(TAG, "Using $codec Video Codec..") Log.i(TAG, "Using $codec Video Codec..")
@ -67,7 +66,7 @@ class RecordingSession(context: Context,
recorder.setAudioChannels(AUDIO_CHANNELS) recorder.setAudioChannels(AUDIO_CHANNELS)
} }
recorder.setInputSurface(surface) recorder.setInputSurface(surface)
//recorder.setOrientationHint(orientation.toDegrees()) recorder.setOrientationHint(orientation.toDegrees())
recorder.setOnErrorListener { _, what, extra -> recorder.setOnErrorListener { _, what, extra ->
Log.e(TAG, "MediaRecorder Error: $what ($extra)") Log.e(TAG, "MediaRecorder Error: $what ($extra)")

View File

@ -2,15 +2,12 @@ package com.mrousavy.camera.utils
import android.graphics.ImageFormat import android.graphics.ImageFormat
import android.graphics.SurfaceTexture import android.graphics.SurfaceTexture
import android.media.ImageReader
import android.media.ImageWriter import android.media.ImageWriter
import android.media.MediaRecorder import android.media.MediaRecorder
import android.util.Log import android.util.Log
import android.view.Surface import android.view.Surface
import com.facebook.jni.HybridData import com.facebook.jni.HybridData
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.parsers.Orientation
import java.io.Closeable import java.io.Closeable
/** /**
@ -26,23 +23,24 @@ class VideoPipeline(val width: Int,
val height: Int, val height: Int,
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable { val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
companion object { companion object {
private const val MAX_IMAGES = 5 private const val MAX_IMAGES = 3
private const val TAG = "VideoPipeline" private const val TAG = "VideoPipeline"
} }
private val mHybridData: HybridData private val mHybridData: HybridData
private var openGLTextureId: Int? = null
private var transformMatrix = FloatArray(16)
private var isActive = true private var isActive = true
// Output 1 // Input Texture
private var frameProcessor: FrameProcessor? = null private var openGLTextureId: Int? = null
private var imageReader: ImageReader? = null private var transformMatrix = FloatArray(16)
// Output 2 // Processing input texture
private var frameProcessor: FrameProcessor? = null
// Output 1
private var recordingSession: RecordingSession? = null private var recordingSession: RecordingSession? = null
// Output 3 // Output 2
private var previewSurface: Surface? = null private var previewSurface: Surface? = null
// Input // Input
@ -60,8 +58,6 @@ class VideoPipeline(val width: Int,
override fun close() { override fun close() {
synchronized(this) { synchronized(this) {
isActive = false isActive = false
imageReader?.close()
imageReader = null
frameProcessor = null frameProcessor = null
recordingSession = null recordingSession = null
surfaceTexture.release() surfaceTexture.release()
@ -94,21 +90,6 @@ class VideoPipeline(val width: Int,
} }
} }
private fun getImageReader(): ImageReader {
val imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
imageReader.setOnImageAvailableListener({ reader ->
Log.i("VideoPipeline", "ImageReader::onImageAvailable!")
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
frame.incrementRefCount()
frameProcessor?.call(frame)
frame.decrementRefCount()
}, null)
return imageReader
}
/** /**
* Configures the Pipeline to also call the given [FrameProcessor]. * Configures the Pipeline to also call the given [FrameProcessor].
* * If the [frameProcessor] is `null`, this output channel will be removed. * * If the [frameProcessor] is `null`, this output channel will be removed.
@ -121,20 +102,11 @@ class VideoPipeline(val width: Int,
this.frameProcessor = frameProcessor this.frameProcessor = frameProcessor
if (frameProcessor != null) { if (frameProcessor != null) {
if (this.imageReader == null) { // Configure OpenGL pipeline to stream Frames into the Frame Processor (CPU pixel access)
// 1. Create new ImageReader that just calls the Frame Processor setFrameProcessor(frameProcessor)
this.imageReader = getImageReader()
}
// 2. Configure OpenGL pipeline to stream Frames into the ImageReader's surface
setFrameProcessorOutputSurface(imageReader!!.surface)
} else { } else {
// 1. Configure OpenGL pipeline to stop streaming Frames into the ImageReader's surface // Configure OpenGL pipeline to stop streaming Frames into a Frame Processor
removeFrameProcessorOutputSurface() removeFrameProcessor()
// 2. Close the ImageReader
this.imageReader?.close()
this.imageReader = null
} }
} }
} }
@ -175,8 +147,8 @@ class VideoPipeline(val width: Int,
private external fun getInputTextureId(): Int private external fun getInputTextureId(): Int
private external fun onBeforeFrame() private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray) private external fun onFrame(transformMatrix: FloatArray)
private external fun setFrameProcessorOutputSurface(surface: Any) private external fun setFrameProcessor(frameProcessor: FrameProcessor)
private external fun removeFrameProcessorOutputSurface() private external fun removeFrameProcessor()
private external fun setRecordingSessionOutputSurface(surface: Any) private external fun setRecordingSessionOutputSurface(surface: Any)
private external fun removeRecordingSessionOutputSurface() private external fun removeRecordingSessionOutputSurface()
private external fun setPreviewOutputSurface(surface: Any) private external fun setPreviewOutputSurface(surface: Any)

View File

@ -17,9 +17,8 @@ public class ExampleFrameProcessorPlugin extends FrameProcessorPlugin {
@Override @Override
public Object callback(@NotNull Frame frame, @Nullable Map<String, Object> params) { public Object callback(@NotNull Frame frame, @Nullable Map<String, Object> params) {
if (params == null) return null; if (params == null) return null;
Image image = frame.getImage();
Log.d("ExamplePlugin", image.getWidth() + " x " + image.getHeight() + " Image with format #" + image.getFormat() + ". Logging " + params.size() + " parameters:"); Log.d("ExamplePlugin", frame.getWidth() + " x " + frame.getHeight() + " Image with format #" + frame.getPixelFormat() + ". Logging " + params.size() + " parameters:");
for (String key : params.keySet()) { for (String key : params.keySet()) {
Object value = params.get(key); Object value = params.get(key);

View File

@ -9,6 +9,7 @@ import {
sortFormats, sortFormats,
useCameraDevices, useCameraDevices,
useFrameProcessor, useFrameProcessor,
useSkiaFrameProcessor,
VideoFile, VideoFile,
} from 'react-native-vision-camera'; } from 'react-native-vision-camera';
import { Camera } from 'react-native-vision-camera'; import { Camera } from 'react-native-vision-camera';
@ -218,11 +219,15 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
const paint = Skia.Paint(); const paint = Skia.Paint();
paint.setImageFilter(imageFilter); paint.setImageFilter(imageFilter);
const frameProcessor = useFrameProcessor((frame) => { const frameProcessor = useSkiaFrameProcessor((frame) => {
'worklet'; 'worklet';
const rect = Skia.XYWHRect(150, 150, 300, 300);
const paint = Skia.Paint();
paint.setColor(Skia.Color('red'));
frame.drawRect(rect, paint);
console.log(frame.timestamp, frame.toString(), frame.pixelFormat); console.log(frame.timestamp, frame.toString(), frame.pixelFormat);
examplePlugin(frame);
}, []); }, []);
return ( return (

View File

@ -18,7 +18,6 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
@ -176,11 +175,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
return jsi::Value((double) bytesPerRow); return jsi::Value((double) bytesPerRow);
} }
if (name == "planesCount") {
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
return jsi::Value((double) planesCount);
}
// fallback to base implementation // fallback to base implementation
return HostObject::get(runtime, propName); return HostObject::get(runtime, propName);

View File

@ -39,15 +39,15 @@ using namespace facebook;
} }
- (void)call:(Frame*)frame { - (void)call:(Frame*)frame {
[_skiaRenderer renderCameraFrameToOffscreenCanvas:frame.buffer [_skiaRenderer renderCameraFrameToOffscreenSurface:frame.buffer
withDrawCallback:^(SkiaCanvas _Nonnull canvas) { withDrawCallback:^(SkiaCanvas _Nonnull canvas) {
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas // Create the Frame Host Object wrapping the internal Frame and Skia Canvas
self->_skiaCanvas->setCanvas(static_cast<SkCanvas*>(canvas)); self->_skiaCanvas->setCanvas(static_cast<SkCanvas*>(canvas));
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, self->_skiaCanvas); auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, self->_skiaCanvas);
// Call JS Frame Processor // Call JS Frame Processor
[self callWithFrameHostObject:frameHostObject]; [self callWithFrameHostObject:frameHostObject];
// Remove Skia Canvas from Host Object because it is no longer valid // Remove Skia Canvas from Host Object because it is no longer valid
frameHostObject->invalidateCanvas(); frameHostObject->invalidateCanvas();
}]; }];

View File

@ -30,7 +30,7 @@ typedef void(^draw_callback_t)(SkiaCanvas _Nonnull);
The given callback will be executed with a reference to the Skia Canvas The given callback will be executed with a reference to the Skia Canvas
for the user to perform draw operations on (in this case, through a JS Frame Processor) for the user to perform draw operations on (in this case, through a JS Frame Processor)
*/ */
- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback; - (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
/** /**
Renders the latest Frame to the onscreen Layer. Renders the latest Frame to the onscreen Layer.
This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms. This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms.

View File

@ -35,7 +35,7 @@
std::unique_ptr<RenderContext> _layerContext; std::unique_ptr<RenderContext> _layerContext;
// The texture holding the drawn-to Frame // The texture holding the drawn-to Frame
id<MTLTexture> _texture; id<MTLTexture> _texture;
// For synchronization between the two Threads/Contexts // For synchronization between the two Threads/Contexts
std::mutex _textureMutex; std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame; std::atomic<bool> _hasNewFrame;
@ -70,7 +70,7 @@
return _texture; return _texture;
} }
- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback { - (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
// Wrap in auto release pool since we want the system to clean up after rendering // Wrap in auto release pool since we want the system to clean up after rendering
@autoreleasepool { @autoreleasepool {
// Get the Frame's PixelBuffer // Get the Frame's PixelBuffer
@ -87,7 +87,7 @@
height:CVPixelBufferGetHeight(pixelBuffer)]; height:CVPixelBufferGetHeight(pixelBuffer)];
// Get & Lock the writeable Texture from the Metal Drawable // Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo textureInfo; GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture); textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRenderTarget((int)texture.width, GrBackendRenderTarget backendRenderTarget((int)texture.width,
@ -122,7 +122,7 @@
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader, // The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry. // but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0); canvas->drawImage(image, 0, 0);
// Call the draw callback - probably a JS Frame Processor. // Call the draw callback - probably a JS Frame Processor.
callback(static_cast<void*>(canvas)); callback(static_cast<void*>(canvas));
@ -145,7 +145,7 @@
@autoreleasepool { @autoreleasepool {
auto context = _layerContext->skiaContext.get(); auto context = _layerContext->skiaContext.get();
// Create a Skia Surface from the CAMetalLayer (use to draw to the View) // Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle; GrMTLHandle drawableHandle;
auto surface = SkSurfaces::WrapCAMetalLayer(context, auto surface = SkSurfaces::WrapCAMetalLayer(context,
@ -161,14 +161,14 @@
} }
auto canvas = surface->getCanvas(); auto canvas = surface->getCanvas();
// Lock the Mutex so we can operate on the Texture atomically without // Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread // renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex); std::unique_lock lock(_textureMutex);
auto texture = _texture; auto texture = _texture;
if (texture == nil) return; if (texture == nil) return;
// Calculate Center Crop (aspectRatio: cover) transform // Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height); auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height()); auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
@ -202,7 +202,7 @@
id<MTLCommandBuffer> commandBuffer([_layerContext->commandQueue commandBuffer]); id<MTLCommandBuffer> commandBuffer([_layerContext->commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable]; [commandBuffer presentDrawable:drawable];
[commandBuffer commit]; [commandBuffer commit];
// Set flag back to false // Set flag back to false
_hasNewFrame = false; _hasNewFrame = false;
lock.unlock(); lock.unlock();

View File

@ -22,10 +22,6 @@ export interface Frame {
* Returns the amount of bytes per row. * Returns the amount of bytes per row.
*/ */
bytesPerRow: number; bytesPerRow: number;
/**
* Returns the number of planes this frame contains.
*/
planesCount: number;
/** /**
* Returns whether the Frame is mirrored (selfie camera) or not. * Returns whether the Frame is mirrored (selfie camera) or not.
*/ */