feat: Skia for Android (#1731)
* feat: Call Skia Renderer * Use default NativePreviewView for Skia * Render to separate FBO * It appears once * Refactor a lot lol * Pass width/height * Read width/heights * Update SkiaRenderer.cpp * Read stencil/samples * Use switch for target * Clear full red * Update VideoPipeline.cpp * fix: Use `BorrowTextureFrom` instead of `AdoptTextureFrom` * Get it to work * Draw Camera Frame again (only works for first frame) * glDisable(GL_BLEND) * Use Frame Buffer again * Simplify Skia offscreen surface creation * fix: Get it to kinda work? * fix: Remove `sampler2D` shader Only the EXTERNAL_OES one kinda works * Revert "fix: Remove `sampler2D` shader" This reverts commit bf241a82f440f5a442f23a2b10329b813e7cdb3e. * Revert "fix: Get it to kinda work?" This reverts commit ea6a8784ad8dc7d05e8076591874f021b51dd84a. * fix: Use Skia for rendering * Simplify drawing code a lot * Clean up drawing loop a bit more * Some docs * Update SkiaRenderer.cpp * Surface * try to use Matrix * Use BottomLeft as a surface origin again * Get actual surface dimensions * Use 1x1 pbuffer instead * Update SkiaRenderer.cpp * Update SkiaRenderer.cpp * feat: Implement Skia Frame Processor (#1735) * feat: Implement JS Skia Frame Processor * Update SkiaRenderer.cpp * push * Create Frame from C++ * compile * Compile * Update VideoPipeline.cpp * Fix JNI local ref * Use `HardwareBuffer` for implementation * feat: Custom `Frame` implementation that uses CPU `ByteBuffer` (#1736) * feat: Implement JS Skia Frame Processor * Update SkiaRenderer.cpp * push * Create Frame from C++ * compile * Compile * Update VideoPipeline.cpp * Fix JNI local ref * Use `HardwareBuffer` for implementation * try: Try to just create a CPU based ByteBuffer * fix: Fix Java Type * fix remaining errors * try fixing FrameFactory * Use `free` * fix: Fix scene mode crash on some emulators * fix: Fix scene mode crash on some emulators * Fix getting pixels * fix: Fix buffer not being freed * Add some docs to `Frame` * Test Skia again * Use `getCurrentPresentationTime()` * Remove `FrameFactory.cpp` * Update VideoPipeline.h * Update VideoPipeline.cpp
This commit is contained in:
parent
6bbb44d541
commit
a7c137da07
@ -12,7 +12,7 @@ find_package(ReactAndroid REQUIRED CONFIG)
|
||||
find_package(fbjni REQUIRED CONFIG)
|
||||
find_library(LOG_LIB log)
|
||||
|
||||
add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS} -DVISION_CAMERA_ENABLE_SKIA=${ENABLE_SKIA})
|
||||
add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS} -DVISION_CAMERA_ENABLE_SKIA=${ENABLE_SKIA} -DEGL_EGLEXT_PROTOTYPES=1)
|
||||
|
||||
|
||||
# Add react-native-vision-camera sources
|
||||
@ -37,6 +37,9 @@ add_library(
|
||||
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
|
||||
# Skia Frame Processor
|
||||
src/main/cpp/skia/SkiaRenderer.cpp
|
||||
src/main/cpp/skia/JSkiaFrameProcessor.cpp
|
||||
src/main/cpp/skia/DrawableFrameHostObject.cpp
|
||||
src/main/cpp/skia/VisionCameraSkiaContext.cpp
|
||||
)
|
||||
|
||||
# Header Search Paths (includes)
|
||||
@ -48,6 +51,7 @@ target_include_directories(
|
||||
"src/main/cpp/frameprocessor"
|
||||
"src/main/cpp/frameprocessor/java-bindings"
|
||||
"src/main/cpp/skia"
|
||||
"src/main/cpp/skia/java-bindings"
|
||||
"${NODE_MODULES_DIR}/react-native/ReactCommon"
|
||||
"${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker"
|
||||
"${NODE_MODULES_DIR}/react-native/ReactAndroid/src/main/jni/react/turbomodule" # <-- CallInvokerHolder JNI wrapper
|
||||
@ -97,16 +101,18 @@ if(ENABLE_FRAME_PROCESSORS)
|
||||
target_include_directories(
|
||||
${PACKAGE_NAME}
|
||||
PRIVATE
|
||||
"${RNSKIA_PATH}/cpp/api/"
|
||||
"${RNSKIA_PATH}/cpp/jsi/"
|
||||
"${RNSKIA_PATH}/cpp/rnskia/"
|
||||
"${RNSKIA_PATH}/cpp/skia"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/config/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/core/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/effects/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/utils/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/pathops/"
|
||||
"${RNSKIA_PATH}/cpp/skia/modules/"
|
||||
# "${RNSKIA_PATH}/cpp/skia/modules/skparagraph/include/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/"
|
||||
"${RNSKIA_PATH}/cpp/skia"
|
||||
"${RNSKIA_PATH}/cpp/utils/"
|
||||
)
|
||||
|
||||
target_link_libraries(
|
||||
|
@ -10,18 +10,14 @@
|
||||
|
||||
#include <android/native_window.h>
|
||||
#include <android/log.h>
|
||||
#include <chrono>
|
||||
|
||||
#include "OpenGLError.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface(int width, int height) {
|
||||
return std::unique_ptr<OpenGLContext>(new OpenGLContext(width, height));
|
||||
}
|
||||
|
||||
OpenGLContext::OpenGLContext(int width, int height) {
|
||||
_width = width;
|
||||
_height = height;
|
||||
std::shared_ptr<OpenGLContext> OpenGLContext::CreateWithOffscreenSurface() {
|
||||
return std::unique_ptr<OpenGLContext>(new OpenGLContext());
|
||||
}
|
||||
|
||||
OpenGLContext::~OpenGLContext() {
|
||||
@ -67,10 +63,10 @@ void OpenGLContext::ensureOpenGL() {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
|
||||
EGLint attributes[] = {EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
|
||||
EGL_ALPHA_SIZE, 8,
|
||||
EGL_BLUE_SIZE, 8,
|
||||
EGL_GREEN_SIZE, 8,
|
||||
EGL_RED_SIZE, 8,
|
||||
EGL_GREEN_SIZE, 8,
|
||||
EGL_BLUE_SIZE, 8,
|
||||
EGL_ALPHA_SIZE, 8,
|
||||
EGL_DEPTH_SIZE, 0,
|
||||
EGL_STENCIL_SIZE, 0,
|
||||
EGL_NONE};
|
||||
@ -90,9 +86,9 @@ void OpenGLContext::ensureOpenGL() {
|
||||
// EGLSurface
|
||||
if (offscreenSurface == EGL_NO_SURFACE) {
|
||||
// If we don't have a surface at all
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing %i x %i offscreen pbuffer EGLSurface..", _width, _height);
|
||||
EGLint attributes[] = {EGL_WIDTH, _width,
|
||||
EGL_HEIGHT, _height,
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing 1x1 offscreen pbuffer EGLSurface..");
|
||||
EGLint attributes[] = {EGL_WIDTH, 1,
|
||||
EGL_HEIGHT, 1,
|
||||
EGL_NONE};
|
||||
offscreenSurface = eglCreatePbufferSurface(display, config, attributes);
|
||||
if (offscreenSurface == EGL_NO_SURFACE) throw OpenGLError("Failed to create OpenGL Surface!");
|
||||
@ -116,7 +112,12 @@ void OpenGLContext::use(EGLSurface surface) {
|
||||
// 3. Caller can now render to this surface
|
||||
}
|
||||
|
||||
GLuint OpenGLContext::createTexture() {
|
||||
void OpenGLContext::flush() const {
|
||||
bool successful = eglSwapBuffers(display, eglGetCurrentSurface(EGL_DRAW));
|
||||
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to swap OpenGL buffers!");
|
||||
}
|
||||
|
||||
OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width, int height) {
|
||||
// 1. Make sure the OpenGL context is initialized
|
||||
this->ensureOpenGL();
|
||||
|
||||
@ -127,7 +128,42 @@ GLuint OpenGLContext::createTexture() {
|
||||
GLuint textureId;
|
||||
glGenTextures(1, &textureId);
|
||||
|
||||
return textureId;
|
||||
GLenum target;
|
||||
switch (type) {
|
||||
case OpenGLTexture::Type::ExternalOES:
|
||||
target = GL_TEXTURE_EXTERNAL_OES;
|
||||
break;
|
||||
case OpenGLTexture::Type::Texture2D:
|
||||
target = GL_TEXTURE_2D;
|
||||
break;
|
||||
default:
|
||||
throw std::runtime_error("Invalid OpenGL Texture Type!");
|
||||
}
|
||||
glBindTexture(target, textureId);
|
||||
glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
|
||||
return {
|
||||
.id = textureId,
|
||||
.target = target,
|
||||
.width = width,
|
||||
.height = height
|
||||
};
|
||||
}
|
||||
|
||||
void OpenGLContext::getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels) {
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(texture.target, texture.id);
|
||||
glReadPixels(0, 0, texture.width, texture.height, GL_RGBA, GL_UNSIGNED_BYTE, *outPixels);
|
||||
// height * width * components per pixel (4 for RGBA) * size of one number (byte)
|
||||
*outSize = texture.height * texture.width * 4 * sizeof(uint8_t);
|
||||
}
|
||||
|
||||
long OpenGLContext::getCurrentPresentationTime() {
|
||||
auto now = std::chrono::steady_clock::now();
|
||||
auto duration = now.time_since_epoch();
|
||||
long long milliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
|
||||
return static_cast<long>(milliseconds);
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@ -6,9 +6,11 @@
|
||||
|
||||
#include <EGL/egl.h>
|
||||
#include <GLES2/gl2.h>
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
#include <memory>
|
||||
#include <functional>
|
||||
#include <chrono>
|
||||
|
||||
#include "PassThroughShader.h"
|
||||
|
||||
@ -24,7 +26,7 @@ class OpenGLContext {
|
||||
* Create a new instance of the OpenGLContext that draws to an off-screen PixelBuffer surface.
|
||||
* This will not perform any OpenGL operations yet, and is therefore safe to call from any Thread.
|
||||
*/
|
||||
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface(int width, int height);
|
||||
static std::shared_ptr<OpenGLContext> CreateWithOffscreenSurface();
|
||||
/**
|
||||
* Destroy the OpenGL Context. This needs to be called on the same thread that `use()` was called.
|
||||
*/
|
||||
@ -41,10 +43,25 @@ class OpenGLContext {
|
||||
*/
|
||||
void use();
|
||||
|
||||
/**
|
||||
* Flushes all drawing operations by swapping the buffers and submitting the Frame to the GPU
|
||||
*/
|
||||
void flush() const;
|
||||
|
||||
/**
|
||||
* Create a new texture on this context
|
||||
*/
|
||||
GLuint createTexture();
|
||||
OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
|
||||
|
||||
/**
|
||||
* Gets the pixels as CPU accessible memory of the given input texture
|
||||
*/
|
||||
void getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels);
|
||||
|
||||
/**
|
||||
* Gets the current presentation time for this OpenGL surface.
|
||||
*/
|
||||
long getCurrentPresentationTime();
|
||||
|
||||
public:
|
||||
EGLDisplay display = EGL_NO_DISPLAY;
|
||||
@ -53,13 +70,13 @@ class OpenGLContext {
|
||||
EGLConfig config = nullptr;
|
||||
|
||||
private:
|
||||
int _width = 0, _height = 0;
|
||||
explicit OpenGLContext(int width, int height);
|
||||
explicit OpenGLContext() = default;
|
||||
void destroy();
|
||||
void ensureOpenGL();
|
||||
|
||||
private:
|
||||
PassThroughShader _passThroughShader;
|
||||
std::chrono::time_point<std::chrono::system_clock> _startTime;
|
||||
|
||||
private:
|
||||
static constexpr auto TAG = "OpenGLContext";
|
||||
|
@ -4,8 +4,6 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include <string>
|
||||
#include <stdexcept>
|
||||
#include <GLES2/gl2.h>
|
||||
@ -23,8 +21,11 @@ inline std::string getEglErrorIfAny() {
|
||||
class OpenGLError: public std::runtime_error {
|
||||
public:
|
||||
explicit OpenGLError(const std::string&& message): std::runtime_error(message + getEglErrorIfAny()) {}
|
||||
|
||||
static inline void checkIfError(const std::string&& message) {
|
||||
auto error = getEglErrorIfAny();
|
||||
if (error.length() > 0) throw std::runtime_error(message + error);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
||||
|
@ -43,32 +43,35 @@ void OpenGLRenderer::destroy() {
|
||||
}
|
||||
}
|
||||
|
||||
void OpenGLRenderer::renderTextureToSurface(GLuint textureId, float* transformMatrix) {
|
||||
EGLSurface OpenGLRenderer::getEGLSurface() {
|
||||
if (_surface == EGL_NO_SURFACE) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
|
||||
_context->use();
|
||||
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
|
||||
}
|
||||
return _surface;
|
||||
}
|
||||
|
||||
// 1. Activate the OpenGL context for this surface
|
||||
_context->use(_surface);
|
||||
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
|
||||
// 1. Get (or create) the OpenGL EGLSurface which is the window render target (Android Surface)
|
||||
EGLSurface surface = getEGLSurface();
|
||||
|
||||
// 2. Set the viewport for rendering
|
||||
// 2. Activate the OpenGL context for this surface
|
||||
_context->use(surface);
|
||||
OpenGLError::checkIfError("Failed to use context!");
|
||||
|
||||
// 3. Set the viewport for rendering
|
||||
glViewport(0, 0, _width, _height);
|
||||
glDisable(GL_BLEND);
|
||||
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
// 3. Bind the input texture
|
||||
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
|
||||
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
// 4. Draw it using the pass-through shader which binds the texture and applies transforms
|
||||
_passThroughShader.draw(texture, transformMatrix);
|
||||
|
||||
// 4. Draw it using the pass-through shader which also applies transforms
|
||||
_passThroughShader.draw(textureId, transformMatrix);
|
||||
|
||||
// 5. Swap buffers to pass it to the window surface
|
||||
eglSwapBuffers(_context->display, _surface);
|
||||
// 5 Swap buffers to pass it to the window surface
|
||||
_context->flush();
|
||||
OpenGLError::checkIfError("Failed to render Frame to Surface!");
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@ -29,9 +29,9 @@ class OpenGLRenderer {
|
||||
~OpenGLRenderer();
|
||||
|
||||
/**
|
||||
* Renders the given Texture ID to the Surface
|
||||
* Renders the given Texture to the Surface
|
||||
*/
|
||||
void renderTextureToSurface(GLuint textureId, float* transformMatrix);
|
||||
void renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix);
|
||||
|
||||
/**
|
||||
* Destroys the OpenGL context. This needs to be called on the same thread that `use()` was called.
|
||||
@ -39,6 +39,11 @@ class OpenGLRenderer {
|
||||
*/
|
||||
void destroy();
|
||||
|
||||
/**
|
||||
* Gets the EGLSurface (window surface) that this OpenGL renderer is configured to render to.
|
||||
*/
|
||||
EGLSurface getEGLSurface();
|
||||
|
||||
private:
|
||||
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
|
||||
|
||||
|
22
android/src/main/cpp/OpenGLTexture.h
Normal file
22
android/src/main/cpp/OpenGLTexture.h
Normal file
@ -0,0 +1,22 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 30.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <GLES2/gl2.h>
|
||||
#include <GLES2/gl2ext.h>
|
||||
#include <stdexcept>
|
||||
|
||||
struct OpenGLTexture {
|
||||
enum Type { Texture2D, ExternalOES };
|
||||
|
||||
// The ID of the texture as returned in glGenTextures(..)
|
||||
GLuint id;
|
||||
// GL_TEXTURE_2D or GL_TEXTURE_EXTERNAL_OES
|
||||
GLenum target;
|
||||
|
||||
// Width and height of the texture
|
||||
int width = 0;
|
||||
int height = 0;
|
||||
};
|
@ -10,29 +10,29 @@
|
||||
#include "OpenGLError.h"
|
||||
#include <string>
|
||||
|
||||
#include <android/log.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
PassThroughShader::~PassThroughShader() {
|
||||
if (_programId != NO_SHADER) {
|
||||
glDeleteProgram(_programId);
|
||||
_programId = NO_SHADER;
|
||||
}
|
||||
|
||||
if (_vertexBuffer != NO_BUFFER) {
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
_vertexBuffer = NO_BUFFER;
|
||||
}
|
||||
if (_programId != NO_SHADER) {
|
||||
glDeleteProgram(_programId);
|
||||
_programId = NO_SHADER;
|
||||
}
|
||||
}
|
||||
|
||||
void PassThroughShader::draw(GLuint textureId, float* transformMatrix) {
|
||||
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
|
||||
// 1. Set up Shader Program
|
||||
if (_programId == NO_SHADER) {
|
||||
_programId = createProgram();
|
||||
}
|
||||
|
||||
glUseProgram(_programId);
|
||||
|
||||
if (_vertexParameters.aPosition == NO_POSITION) {
|
||||
if (_programId == NO_SHADER || _shaderTarget != texture.target) {
|
||||
if (_programId != NO_SHADER) {
|
||||
glDeleteProgram(_programId);
|
||||
}
|
||||
_programId = createProgram(texture.target);
|
||||
glUseProgram(_programId);
|
||||
_vertexParameters = {
|
||||
.aPosition = glGetAttribLocation(_programId, "aPosition"),
|
||||
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
|
||||
@ -41,15 +41,19 @@ void PassThroughShader::draw(GLuint textureId, float* transformMatrix) {
|
||||
_fragmentParameters = {
|
||||
.uTexture = glGetUniformLocation(_programId, "uTexture"),
|
||||
};
|
||||
_shaderTarget = texture.target;
|
||||
}
|
||||
|
||||
glUseProgram(_programId);
|
||||
|
||||
// 2. Set up Vertices Buffer
|
||||
if (_vertexBuffer == NO_BUFFER) {
|
||||
glGenBuffers(1, &_vertexBuffer);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
|
||||
}
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
|
||||
|
||||
// 3. Pass all uniforms/attributes for vertex shader
|
||||
glEnableVertexAttribArray(_vertexParameters.aPosition);
|
||||
glVertexAttribPointer(_vertexParameters.aPosition,
|
||||
@ -71,7 +75,7 @@ void PassThroughShader::draw(GLuint textureId, float* transformMatrix) {
|
||||
|
||||
// 4. Pass texture to fragment shader
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
|
||||
glBindTexture(texture.target, texture.id);
|
||||
glUniform1i(_fragmentParameters.uTexture, 0);
|
||||
|
||||
// 5. Draw!
|
||||
@ -93,18 +97,19 @@ GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode)
|
||||
return shader;
|
||||
}
|
||||
|
||||
GLuint PassThroughShader::createProgram() {
|
||||
GLuint PassThroughShader::createProgram(GLenum textureTarget) {
|
||||
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
|
||||
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
|
||||
auto fragmentShaderCode = textureTarget == GL_TEXTURE_EXTERNAL_OES ? FRAGMENT_SHADER_EXTERNAL_TEXTURE : FRAGMENT_SHADER;
|
||||
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
|
||||
|
||||
GLuint program = glCreateProgram();
|
||||
if (program == 0) throw OpenGLError("Failed to create pass-through program!");
|
||||
|
||||
glAttachShader(program, vertexShader);
|
||||
if (glGetError() != GL_NO_ERROR) throw OpenGLError("Failed to attach Vertex Shader!");
|
||||
OpenGLError::checkIfError("Failed to attach Vertex Shader!");
|
||||
|
||||
glAttachShader(program, fragmentShader);
|
||||
if (glGetError() != GL_NO_ERROR) throw OpenGLError("Failed to attach Fragment Shader!");
|
||||
OpenGLError::checkIfError("Failed to attach Fragment Shader!");
|
||||
|
||||
glLinkProgram(program);
|
||||
GLint linkStatus = GL_FALSE;
|
||||
|
@ -7,11 +7,14 @@
|
||||
#include <EGL/egl.h>
|
||||
#include <GLES2/gl2.h>
|
||||
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
#define NO_SHADER 0
|
||||
#define NO_POSITION 0
|
||||
#define NO_BUFFER 0
|
||||
#define NO_SHADER_TARGET 0
|
||||
|
||||
struct Vertex {
|
||||
GLfloat position[2];
|
||||
@ -26,15 +29,16 @@ class PassThroughShader {
|
||||
/**
|
||||
* Draw the texture using this shader.
|
||||
*/
|
||||
void draw(GLuint textureId, float* transformMatrix);
|
||||
void draw(const OpenGLTexture& texture, float* transformMatrix);
|
||||
|
||||
private:
|
||||
// Loading
|
||||
static GLuint loadShader(GLenum shaderType, const char* shaderCode);
|
||||
static GLuint createProgram();
|
||||
static GLuint createProgram(GLenum textureTarget);
|
||||
|
||||
private:
|
||||
// Parameters
|
||||
// Shader program in memory
|
||||
GLenum _shaderTarget = NO_SHADER_TARGET;
|
||||
GLuint _programId = NO_SHADER;
|
||||
GLuint _vertexBuffer = NO_BUFFER;
|
||||
struct VertexParameters {
|
||||
@ -67,7 +71,17 @@ class PassThroughShader {
|
||||
}
|
||||
)";
|
||||
static constexpr char FRAGMENT_SHADER[] = R"(
|
||||
precision mediump float;
|
||||
varying vec2 vTexCoord;
|
||||
uniform sampler2D uTexture;
|
||||
|
||||
void main() {
|
||||
gl_FragColor = texture2D(uTexture, vTexCoord);
|
||||
}
|
||||
)";
|
||||
static constexpr char FRAGMENT_SHADER_EXTERNAL_TEXTURE[] = R"(
|
||||
#extension GL_OES_EGL_image_external : require
|
||||
|
||||
precision mediump float;
|
||||
varying vec2 vTexCoord;
|
||||
uniform samplerExternalOES uTexture;
|
||||
|
@ -9,6 +9,14 @@
|
||||
#include <GLES2/gl2.h>
|
||||
#include <GLES2/gl2ext.h>
|
||||
#include <EGL/egl.h>
|
||||
#include <EGL/eglext.h>
|
||||
#include <GLES/gl.h>
|
||||
|
||||
#include <chrono>
|
||||
|
||||
#include "OpenGLTexture.h"
|
||||
#include "JFrameProcessor.h"
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
@ -19,39 +27,28 @@ jni::local_ref<VideoPipeline::jhybriddata> VideoPipeline::initHybrid(jni::alias_
|
||||
VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height): _javaPart(jni::make_global(jThis)) {
|
||||
_width = width;
|
||||
_height = height;
|
||||
_context = OpenGLContext::CreateWithOffscreenSurface(width, height);
|
||||
_context = OpenGLContext::CreateWithOffscreenSurface();
|
||||
}
|
||||
|
||||
VideoPipeline::~VideoPipeline() {
|
||||
// 1. Remove output surfaces
|
||||
removeFrameProcessorOutputSurface();
|
||||
removeFrameProcessor();
|
||||
removeRecordingSessionOutputSurface();
|
||||
removePreviewOutputSurface();
|
||||
// 2. Delete the input textures
|
||||
if (_inputTextureId != NO_TEXTURE) {
|
||||
glDeleteTextures(1, &_inputTextureId);
|
||||
_inputTextureId = NO_TEXTURE;
|
||||
if (_inputTexture != std::nullopt) {
|
||||
glDeleteTextures(1, &_inputTexture->id);
|
||||
}
|
||||
// 4. Destroy all surfaces
|
||||
_previewOutput = nullptr;
|
||||
_frameProcessorOutput = nullptr;
|
||||
_recordingSessionOutput = nullptr;
|
||||
// 5. Destroy the OpenGL context
|
||||
// 3. Destroy the OpenGL context
|
||||
_context = nullptr;
|
||||
}
|
||||
|
||||
void VideoPipeline::removeFrameProcessorOutputSurface() {
|
||||
if (_frameProcessorOutput) _frameProcessorOutput->destroy();
|
||||
_frameProcessorOutput = nullptr;
|
||||
void VideoPipeline::removeFrameProcessor() {
|
||||
_frameProcessor = nullptr;
|
||||
}
|
||||
|
||||
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
|
||||
// 1. Delete existing output surface
|
||||
removeFrameProcessorOutputSurface();
|
||||
|
||||
// 2. Set new output surface if it is not null
|
||||
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
|
||||
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
|
||||
void VideoPipeline::setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor) {
|
||||
_frameProcessor = jni::make_global(frameProcessor);
|
||||
}
|
||||
|
||||
void VideoPipeline::removeRecordingSessionOutputSurface() {
|
||||
@ -73,6 +70,11 @@ void VideoPipeline::removePreviewOutputSurface() {
|
||||
_previewOutput = nullptr;
|
||||
}
|
||||
|
||||
jni::local_ref<JFrame> VideoPipeline::createFrame() {
|
||||
static const auto createFrameMethod = javaClassLocal()->getMethod<JFrame()>("createFrame");
|
||||
return createFrameMethod(_javaPart);
|
||||
}
|
||||
|
||||
void VideoPipeline::setPreviewOutputSurface(jobject surface) {
|
||||
// 1. Delete existing output surface
|
||||
removePreviewOutputSurface();
|
||||
@ -83,48 +85,119 @@ void VideoPipeline::setPreviewOutputSurface(jobject surface) {
|
||||
}
|
||||
|
||||
int VideoPipeline::getInputTextureId() {
|
||||
if (_inputTextureId != NO_TEXTURE) return static_cast<int>(_inputTextureId);
|
||||
|
||||
_inputTextureId = _context->createTexture();
|
||||
|
||||
return static_cast<int>(_inputTextureId);
|
||||
if (_inputTexture == std::nullopt) {
|
||||
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
|
||||
}
|
||||
return static_cast<int>(_inputTexture->id);
|
||||
}
|
||||
|
||||
void VideoPipeline::onBeforeFrame() {
|
||||
// 1. Activate the offscreen context
|
||||
_context->use();
|
||||
|
||||
glBindTexture(GL_TEXTURE_EXTERNAL_OES, _inputTextureId);
|
||||
// 2. Prepare the external texture so the Camera can render into it
|
||||
OpenGLTexture& texture = _inputTexture.value();
|
||||
glBindTexture(texture.target, texture.id);
|
||||
}
|
||||
|
||||
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
|
||||
// Get the OpenGL transform Matrix (transforms, scales, rotations)
|
||||
// 1. Activate the offscreen context
|
||||
_context->use();
|
||||
|
||||
// 2. Get the OpenGL transform Matrix (transforms, scales, rotations)
|
||||
float transformMatrix[16];
|
||||
transformMatrixParam->getRegion(0, 16, transformMatrix);
|
||||
|
||||
if (_previewOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
|
||||
_previewOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
|
||||
}
|
||||
if (_frameProcessorOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
|
||||
_frameProcessorOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
|
||||
}
|
||||
if (_recordingSessionOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||
_recordingSessionOutput->renderTextureToSurface(_inputTextureId, transformMatrix);
|
||||
// 3. Prepare the texture we are going to render
|
||||
OpenGLTexture& texture = _inputTexture.value();
|
||||
|
||||
// 4. Render to all outputs!
|
||||
auto isSkiaFrameProcessor = _frameProcessor != nullptr && _frameProcessor->isInstanceOf(JSkiaFrameProcessor::javaClassStatic());
|
||||
if (isSkiaFrameProcessor) {
|
||||
// 4.1. If we have a Skia Frame Processor, prepare to render to an offscreen surface using Skia
|
||||
jni::global_ref<JSkiaFrameProcessor::javaobject> skiaFrameProcessor = jni::static_ref_cast<JSkiaFrameProcessor::javaobject>(_frameProcessor);
|
||||
SkiaRenderer& skiaRenderer = skiaFrameProcessor->cthis()->getSkiaRenderer();
|
||||
auto drawCallback = [=](SkCanvas* canvas) {
|
||||
// Create a JFrame instance (this uses queues/recycling)
|
||||
auto frame = JFrame::create(texture.width,
|
||||
texture.height,
|
||||
texture.width * 4,
|
||||
_context->getCurrentPresentationTime(),
|
||||
"portrait",
|
||||
false);
|
||||
|
||||
// Fill the Frame with the contents of the GL surface
|
||||
_context->getPixelsOfTexture(texture,
|
||||
&frame->cthis()->pixelsSize,
|
||||
&frame->cthis()->pixels);
|
||||
|
||||
// Call the Frame processor with the Frame
|
||||
frame->cthis()->incrementRefCount();
|
||||
skiaFrameProcessor->cthis()->call(frame, canvas);
|
||||
frame->cthis()->decrementRefCount();
|
||||
};
|
||||
|
||||
// 4.2. Render to the offscreen surface using Skia
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using Skia..");
|
||||
OpenGLTexture offscreenTexture = skiaRenderer.renderTextureToOffscreenSurface(*_context,
|
||||
texture,
|
||||
transformMatrix,
|
||||
drawCallback);
|
||||
|
||||
// 4.3. Now render the result of the offscreen surface to all output surfaces!
|
||||
if (_previewOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
|
||||
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _previewOutput->getEGLSurface());
|
||||
}
|
||||
if (_recordingSessionOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _recordingSessionOutput->getEGLSurface());
|
||||
}
|
||||
} else {
|
||||
// 4.1. If we have a Frame Processor, call it
|
||||
if (_frameProcessor != nullptr) {
|
||||
// Create a JFrame instance (this uses queues/recycling)
|
||||
auto frame = JFrame::create(texture.width,
|
||||
texture.height,
|
||||
texture.width * 4,
|
||||
_context->getCurrentPresentationTime(),
|
||||
"portrait",
|
||||
false);
|
||||
|
||||
// Fill the Frame with the contents of the GL surface
|
||||
_context->getPixelsOfTexture(texture,
|
||||
&frame->cthis()->pixelsSize,
|
||||
&frame->cthis()->pixels);
|
||||
|
||||
// Call the Frame processor with the Frame
|
||||
frame->cthis()->incrementRefCount();
|
||||
_frameProcessor->cthis()->call(frame);
|
||||
frame->cthis()->decrementRefCount();
|
||||
}
|
||||
|
||||
// 4.2. Simply pass-through shader to render the texture to all output EGLSurfaces
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using pass-through OpenGL Shader..");
|
||||
if (_previewOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
|
||||
_previewOutput->renderTextureToSurface(texture, transformMatrix);
|
||||
}
|
||||
if (_recordingSessionOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VideoPipeline::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
|
||||
makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface),
|
||||
makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface),
|
||||
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
|
||||
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
|
||||
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
|
||||
makeNativeMethod("setFrameProcessor", VideoPipeline::setFrameProcessor),
|
||||
makeNativeMethod("removeFrameProcessor", VideoPipeline::removeFrameProcessor),
|
||||
makeNativeMethod("setPreviewOutputSurface", VideoPipeline::setPreviewOutputSurface),
|
||||
makeNativeMethod("removePreviewOutputSurface", VideoPipeline::removePreviewOutputSurface),
|
||||
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
|
||||
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
|
||||
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
|
||||
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
|
||||
makeNativeMethod("onFrame", VideoPipeline::onFrame),
|
||||
});
|
||||
|
@ -8,14 +8,15 @@
|
||||
#include <fbjni/fbjni.h>
|
||||
#include <EGL/egl.h>
|
||||
#include <android/native_window.h>
|
||||
#include "PassThroughShader.h"
|
||||
#include "OpenGLRenderer.h"
|
||||
#include "OpenGLContext.h"
|
||||
#include <memory>
|
||||
|
||||
namespace vision {
|
||||
#include "OpenGLRenderer.h"
|
||||
#include "OpenGLContext.h"
|
||||
|
||||
#define NO_TEXTURE 0
|
||||
#include "OpenGLTexture.h"
|
||||
#include "JFrameProcessor.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
@ -32,8 +33,8 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
|
||||
int getInputTextureId();
|
||||
|
||||
// <- Frame Processor output
|
||||
void setFrameProcessorOutputSurface(jobject surface);
|
||||
void removeFrameProcessorOutputSurface();
|
||||
void setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor);
|
||||
void removeFrameProcessor();
|
||||
|
||||
// <- MediaRecorder output
|
||||
void setRecordingSessionOutputSurface(jobject surface);
|
||||
@ -50,16 +51,20 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
|
||||
private:
|
||||
// Private constructor. Use `create(..)` to create new instances.
|
||||
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
|
||||
// Creates a new Frame instance which should be filled with data.
|
||||
jni::local_ref<JFrame> createFrame();
|
||||
|
||||
private:
|
||||
// Input Surface Texture
|
||||
GLuint _inputTextureId = NO_TEXTURE;
|
||||
std::optional<OpenGLTexture> _inputTexture;
|
||||
int _width = 0;
|
||||
int _height = 0;
|
||||
|
||||
// (Optional) Frame Processor that processes frames before they go into output
|
||||
jni::global_ref<JFrameProcessor::javaobject> _frameProcessor = nullptr;
|
||||
|
||||
// Output Contexts
|
||||
std::shared_ptr<OpenGLContext> _context = nullptr;
|
||||
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
|
||||
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
|
||||
std::unique_ptr<OpenGLRenderer> _previewOutput = nullptr;
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
#include "JFrameProcessor.h"
|
||||
#include "JVisionCameraProxy.h"
|
||||
#include "VisionCameraProxy.h"
|
||||
#include "SkiaRenderer.h"
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
#include "VideoPipeline.h"
|
||||
|
||||
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
|
||||
@ -17,7 +17,7 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
|
||||
vision::JFrameProcessor::registerNatives();
|
||||
#endif
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
vision::SkiaRenderer::registerNatives();
|
||||
vision::JSkiaFrameProcessor::registerNatives();
|
||||
#endif
|
||||
});
|
||||
}
|
||||
|
@ -31,7 +31,6 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
|
||||
@ -55,7 +54,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
const jsi::Value* args,
|
||||
size_t count) -> jsi::Value {
|
||||
// Increment retain count by one.
|
||||
this->frame->incrementRefCount();
|
||||
this->frame->cthis()->incrementRefCount();
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(runtime,
|
||||
@ -69,7 +68,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
const jsi::Value* args,
|
||||
size_t count) -> jsi::Value {
|
||||
// Decrement retain count by one. If the retain count is zero, the Frame gets closed.
|
||||
this->frame->decrementRefCount();
|
||||
this->frame->cthis()->decrementRefCount();
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(runtime,
|
||||
@ -85,8 +84,8 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
if (!this->frame) {
|
||||
return jsi::String::createFromUtf8(runtime, "[closed frame]");
|
||||
}
|
||||
auto width = this->frame->getWidth();
|
||||
auto height = this->frame->getHeight();
|
||||
auto width = this->frame->cthis()->getWidth();
|
||||
auto height = this->frame->cthis()->getHeight();
|
||||
auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame";
|
||||
return jsi::String::createFromUtf8(runtime, str);
|
||||
};
|
||||
@ -97,11 +96,8 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
const jsi::Value& thisArg,
|
||||
const jsi::Value* args,
|
||||
size_t count) -> jsi::Value {
|
||||
auto buffer = this->frame->toByteBuffer();
|
||||
if (!buffer->isDirect()) {
|
||||
throw std::runtime_error("Failed to get byte content of Frame - array is not direct ByteBuffer!");
|
||||
}
|
||||
auto size = buffer->getDirectSize();
|
||||
size_t size = frame->cthis()->pixelsSize;
|
||||
uint8_t* pixels = frame->cthis()->pixels;
|
||||
|
||||
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
|
||||
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
|
||||
@ -119,7 +115,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
|
||||
// directly write to C++ JSI ArrayBuffer
|
||||
auto destinationBuffer = arrayBuffer.data(runtime);
|
||||
memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size);
|
||||
memcpy(destinationBuffer, pixels, sizeof(uint8_t) * size);
|
||||
|
||||
return arrayBuffer;
|
||||
};
|
||||
@ -127,33 +123,30 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
}
|
||||
|
||||
if (name == "isValid") {
|
||||
return jsi::Value(this->frame && this->frame->getIsValid());
|
||||
return jsi::Value(this->frame && this->frame->cthis()->getIsValid());
|
||||
}
|
||||
if (name == "width") {
|
||||
return jsi::Value(this->frame->getWidth());
|
||||
return jsi::Value(this->frame->cthis()->getWidth());
|
||||
}
|
||||
if (name == "height") {
|
||||
return jsi::Value(this->frame->getHeight());
|
||||
return jsi::Value(this->frame->cthis()->getHeight());
|
||||
}
|
||||
if (name == "isMirrored") {
|
||||
return jsi::Value(this->frame->getIsMirrored());
|
||||
return jsi::Value(this->frame->cthis()->getIsMirrored());
|
||||
}
|
||||
if (name == "orientation") {
|
||||
auto string = this->frame->getOrientation();
|
||||
auto string = this->frame->cthis()->getOrientation();
|
||||
return jsi::String::createFromUtf8(runtime, string->toStdString());
|
||||
}
|
||||
if (name == "pixelFormat") {
|
||||
auto string = this->frame->getPixelFormat();
|
||||
auto string = this->frame->cthis()->getPixelFormat();
|
||||
return jsi::String::createFromUtf8(runtime, string->toStdString());
|
||||
}
|
||||
if (name == "timestamp") {
|
||||
return jsi::Value(static_cast<double>(this->frame->getTimestamp()));
|
||||
return jsi::Value(static_cast<double>(this->frame->cthis()->getTimestamp()));
|
||||
}
|
||||
if (name == "bytesPerRow") {
|
||||
return jsi::Value(this->frame->getBytesPerRow());
|
||||
}
|
||||
if (name == "planesCount") {
|
||||
return jsi::Value(this->frame->getPlanesCount());
|
||||
return jsi::Value(this->frame->cthis()->getBytesPerRow());
|
||||
}
|
||||
|
||||
// fallback to base implementation
|
||||
|
@ -26,7 +26,7 @@ class JSI_EXPORT FrameHostObject : public jsi::HostObject {
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override;
|
||||
|
||||
public:
|
||||
jni::global_ref<JFrame> frame;
|
||||
jni::global_ref<JFrame::javaobject> frame;
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
@ -111,10 +111,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
|
||||
|
||||
return jsi::String::createFromUtf8(runtime, object->toString());
|
||||
|
||||
} else if (object->isInstanceOf(JList<jobject>::javaClassStatic())) {
|
||||
} else if (object->isInstanceOf(jni::JList<jobject>::javaClassStatic())) {
|
||||
// List<E>
|
||||
|
||||
auto arrayList = static_ref_cast<JList<jobject>>(object);
|
||||
auto arrayList = jni::static_ref_cast<jni::JList<jobject>>(object);
|
||||
auto size = arrayList->size();
|
||||
|
||||
auto result = jsi::Array(runtime, size);
|
||||
@ -125,10 +125,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
|
||||
}
|
||||
return result;
|
||||
|
||||
} else if (object->isInstanceOf(JMap<jstring, jobject>::javaClassStatic())) {
|
||||
} else if (object->isInstanceOf(jni::JMap<jstring, jobject>::javaClassStatic())) {
|
||||
// Map<K, V>
|
||||
|
||||
auto map = static_ref_cast<JMap<jstring, jobject>>(object);
|
||||
auto map = jni::static_ref_cast<jni::JMap<jstring, jobject>>(object);
|
||||
|
||||
auto result = jsi::Object(runtime);
|
||||
for (const auto& entry : *map) {
|
||||
@ -140,7 +140,7 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
|
||||
return result;
|
||||
} else if (object->isInstanceOf(JFrame::javaClassStatic())) {
|
||||
// Frame
|
||||
auto frame = static_ref_cast<JFrame>(object);
|
||||
auto frame = jni::static_ref_cast<JFrame::javaobject>(object);
|
||||
|
||||
// box into HostObject
|
||||
auto hostObject = std::make_shared<FrameHostObject>(frame);
|
||||
|
@ -11,71 +11,85 @@
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
int JFrame::getWidth() const {
|
||||
static const auto getWidthMethod = getClass()->getMethod<jint()>("getWidth");
|
||||
return getWidthMethod(self());
|
||||
void JFrame::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("getWidth", JFrame::getWidth),
|
||||
makeNativeMethod("getHeight", JFrame::getHeight),
|
||||
makeNativeMethod("getBytesPerRow", JFrame::getBytesPerRow),
|
||||
makeNativeMethod("getTimestamp", JFrame::getTimestamp),
|
||||
makeNativeMethod("getOrientation", JFrame::getOrientation),
|
||||
makeNativeMethod("getIsMirrored", JFrame::getIsMirrored),
|
||||
makeNativeMethod("getPixelFormat", JFrame::getPixelFormat),
|
||||
makeNativeMethod("getByteBuffer", JFrame::getByteBuffer),
|
||||
makeNativeMethod("getIsValid", JFrame::getIsValid),
|
||||
});
|
||||
}
|
||||
|
||||
int JFrame::getHeight() const {
|
||||
static const auto getWidthMethod = getClass()->getMethod<jint()>("getHeight");
|
||||
return getWidthMethod(self());
|
||||
jni::local_ref<JFrame::javaobject> JFrame::create(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored) {
|
||||
return newObjectCxxArgs(width,
|
||||
height,
|
||||
bytesPerRow,
|
||||
timestamp,
|
||||
orientation,
|
||||
isMirrored);
|
||||
}
|
||||
|
||||
bool JFrame::getIsValid() const {
|
||||
static const auto getIsValidMethod = getClass()->getMethod<jboolean()>("getIsValid");
|
||||
return getIsValidMethod(self());
|
||||
JFrame::JFrame(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored) {
|
||||
_width = width;
|
||||
_height = height;
|
||||
_bytesPerRow = bytesPerRow;
|
||||
_timestamp = timestamp;
|
||||
_orientation = orientation;
|
||||
_isMirrored = isMirrored;
|
||||
_refCount = 0;
|
||||
pixelsSize = height * bytesPerRow;
|
||||
pixels = (uint8_t*) malloc(pixelsSize);
|
||||
}
|
||||
|
||||
bool JFrame::getIsMirrored() const {
|
||||
static const auto getIsMirroredMethod = getClass()->getMethod<jboolean()>("getIsMirrored");
|
||||
return getIsMirroredMethod(self());
|
||||
JFrame::~JFrame() noexcept {
|
||||
close();
|
||||
}
|
||||
|
||||
jlong JFrame::getTimestamp() const {
|
||||
static const auto getTimestampMethod = getClass()->getMethod<jlong()>("getTimestamp");
|
||||
return getTimestampMethod(self());
|
||||
bool JFrame::getIsValid() {
|
||||
return _refCount > 0 && !_isClosed;
|
||||
}
|
||||
|
||||
local_ref<JString> JFrame::getOrientation() const {
|
||||
static const auto getOrientationMethod = getClass()->getMethod<JString()>("getOrientation");
|
||||
return getOrientationMethod(self());
|
||||
}
|
||||
|
||||
local_ref<JString> JFrame::getPixelFormat() const {
|
||||
static const auto getPixelFormatMethod = getClass()->getMethod<JString()>("getPixelFormat");
|
||||
return getPixelFormatMethod(self());
|
||||
}
|
||||
|
||||
int JFrame::getPlanesCount() const {
|
||||
static const auto getPlanesCountMethod = getClass()->getMethod<jint()>("getPlanesCount");
|
||||
return getPlanesCountMethod(self());
|
||||
}
|
||||
|
||||
int JFrame::getBytesPerRow() const {
|
||||
static const auto getBytesPerRowMethod = getClass()->getMethod<jint()>("getBytesPerRow");
|
||||
return getBytesPerRowMethod(self());
|
||||
}
|
||||
|
||||
local_ref<JByteBuffer> JFrame::toByteBuffer() const {
|
||||
static const auto toByteBufferMethod = getClass()->getMethod<JByteBuffer()>("toByteBuffer");
|
||||
return toByteBufferMethod(self());
|
||||
jni::local_ref<jni::JByteBuffer> JFrame::getByteBuffer() {
|
||||
if (!getIsValid()) {
|
||||
[[unlikely]]
|
||||
throw std::runtime_error("Frame is no longer valid, cannot access getByteBuffer!");
|
||||
}
|
||||
return jni::JByteBuffer::wrapBytes(pixels, pixelsSize);
|
||||
}
|
||||
|
||||
void JFrame::incrementRefCount() {
|
||||
static const auto incrementRefCountMethod = getClass()->getMethod<void()>("incrementRefCount");
|
||||
incrementRefCountMethod(self());
|
||||
std::unique_lock lock(_mutex);
|
||||
_refCount++;
|
||||
}
|
||||
|
||||
void JFrame::decrementRefCount() {
|
||||
static const auto decrementRefCountMethod = getClass()->getMethod<void()>("decrementRefCount");
|
||||
decrementRefCountMethod(self());
|
||||
std::unique_lock lock(_mutex);
|
||||
_refCount--;
|
||||
if (_refCount <= 0) {
|
||||
this->close();
|
||||
}
|
||||
}
|
||||
|
||||
void JFrame::close() {
|
||||
static const auto closeMethod = getClass()->getMethod<void()>("close");
|
||||
closeMethod(self());
|
||||
_isClosed = true;
|
||||
free(pixels);
|
||||
pixels = nullptr;
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@ -7,29 +7,70 @@
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
#include <fbjni/ByteBuffer.h>
|
||||
#include <android/hardware_buffer.h>
|
||||
#include <android/hardware_buffer_jni.h>
|
||||
#include <mutex>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
struct JFrame : public JavaClass<JFrame> {
|
||||
class JFrame : public jni::HybridClass<JFrame> {
|
||||
public:
|
||||
static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;";
|
||||
static void registerNatives();
|
||||
static jni::local_ref<JFrame::javaobject> create(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored);
|
||||
|
||||
~JFrame() noexcept;
|
||||
|
||||
protected:
|
||||
friend HybridBase;
|
||||
explicit JFrame(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored);
|
||||
|
||||
public:
|
||||
int getWidth() const;
|
||||
int getHeight() const;
|
||||
bool getIsValid() const;
|
||||
bool getIsMirrored() const;
|
||||
int getPlanesCount() const;
|
||||
int getBytesPerRow() const;
|
||||
jlong getTimestamp() const;
|
||||
local_ref<JString> getOrientation() const;
|
||||
local_ref<JString> getPixelFormat() const;
|
||||
local_ref<JByteBuffer> toByteBuffer() const;
|
||||
int getWidth() { return _width; }
|
||||
int getHeight() { return _height; }
|
||||
int getBytesPerRow() { return _bytesPerRow; }
|
||||
jlong getTimestamp() { return _timestamp; }
|
||||
jni::local_ref<jni::JString> getOrientation() { return jni::make_jstring(_orientation); }
|
||||
bool getIsMirrored() { return _isMirrored; }
|
||||
|
||||
// TODO: Can this be something other than RGB?
|
||||
jni::local_ref<jni::JString> getPixelFormat() { return jni::make_jstring("rgb"); }
|
||||
|
||||
bool getIsValid();
|
||||
jni::local_ref<jni::JByteBuffer> getByteBuffer();
|
||||
void incrementRefCount();
|
||||
void decrementRefCount();
|
||||
void close();
|
||||
|
||||
// Backing byte data
|
||||
uint8_t* pixels = nullptr;
|
||||
size_t pixelsSize = 0;
|
||||
|
||||
private:
|
||||
// Frame info
|
||||
int _width = 0;
|
||||
int _height = 0;
|
||||
int _bytesPerRow = 0;
|
||||
long _timestamp = 0;
|
||||
std::string _orientation = {};
|
||||
bool _isMirrored = false;
|
||||
|
||||
// Ref-counting
|
||||
int _refCount = 0;
|
||||
bool _isClosed = false;
|
||||
std::mutex _mutex;
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
@ -17,9 +17,6 @@ using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
void JFrameProcessor::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("call", JFrameProcessor::call)
|
||||
});
|
||||
}
|
||||
|
||||
using TSelf = jni::local_ref<JFrameProcessor::javaobject>;
|
||||
|
@ -21,7 +21,7 @@ namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
|
||||
class JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
|
||||
public:
|
||||
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;";
|
||||
static void registerNatives();
|
||||
@ -30,20 +30,25 @@ struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
|
||||
|
||||
public:
|
||||
/**
|
||||
* Call the JS Frame Processor.
|
||||
* Wrap the Frame in a HostObject and call the Frame Processor.
|
||||
*/
|
||||
void call(alias_ref<JFrame::javaobject> frame);
|
||||
void call(jni::alias_ref<JFrame::javaobject> frame);
|
||||
|
||||
private:
|
||||
// Private constructor. Use `create(..)` to create new instances.
|
||||
protected:
|
||||
friend HybridBase;
|
||||
// C++ only constructor. Use `create(..)` to create new instances.
|
||||
explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet,
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> context);
|
||||
JFrameProcessor(const JFrameProcessor &) = delete;
|
||||
JFrameProcessor &operator=(const JFrameProcessor &) = delete;
|
||||
|
||||
private:
|
||||
protected:
|
||||
/**
|
||||
* Call the JS Frame Processor with the given Frame Host Object.
|
||||
*/
|
||||
void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const;
|
||||
|
||||
private:
|
||||
friend HybridBase;
|
||||
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
|
||||
};
|
||||
|
@ -18,6 +18,10 @@
|
||||
#include <react-native-worklets-core/WKTJsiWorkletContext.h>
|
||||
#endif
|
||||
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
#endif
|
||||
|
||||
namespace vision {
|
||||
|
||||
using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>;
|
||||
@ -31,6 +35,7 @@ JVisionCameraProxy::JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::
|
||||
const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) {
|
||||
_javaPart = make_global(javaThis);
|
||||
_runtime = runtime;
|
||||
_callInvoker = callInvoker;
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
__android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context...");
|
||||
@ -84,7 +89,7 @@ void JVisionCameraProxy::setFrameProcessor(int viewTag,
|
||||
frameProcessor = JFrameProcessor::create(worklet, _workletContext);
|
||||
} else if (frameProcessorType == "skia-frame-processor") {
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
throw std::runtime_error("system/skia-unavailable: Skia is not yet implemented on Android!");
|
||||
frameProcessor = JSkiaFrameProcessor::create(worklet, _workletContext, _callInvoker);
|
||||
#else
|
||||
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
|
||||
#endif
|
||||
|
@ -36,11 +36,13 @@ class JVisionCameraProxy : public jni::HybridClass<JVisionCameraProxy> {
|
||||
jni::local_ref<JMap<jstring, jobject>> options);
|
||||
|
||||
jsi::Runtime* getJSRuntime() { return _runtime; }
|
||||
std::shared_ptr<react::CallInvoker> getCallInvoker() { return _callInvoker; }
|
||||
|
||||
private:
|
||||
friend HybridBase;
|
||||
jni::global_ref<JVisionCameraProxy::javaobject> _javaPart;
|
||||
jsi::Runtime* _runtime;
|
||||
std::shared_ptr<react::CallInvoker> _callInvoker;
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
|
||||
#endif
|
||||
|
72
android/src/main/cpp/skia/DrawableFrameHostObject.cpp
Normal file
72
android/src/main/cpp/skia/DrawableFrameHostObject.cpp
Normal file
@ -0,0 +1,72 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#include "DrawableFrameHostObject.h"
|
||||
#include <SkCanvas.h>
|
||||
#include "FrameHostObject.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
|
||||
auto result = FrameHostObject::getPropertyNames(rt);
|
||||
|
||||
// Skia - Render Frame
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
|
||||
|
||||
if (_canvas != nullptr) {
|
||||
auto canvasPropNames = _canvas->getPropertyNames(rt);
|
||||
for (auto& prop : canvasPropNames) {
|
||||
result.push_back(std::move(prop));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
SkRect inscribe(SkSize size, SkRect rect) {
|
||||
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
|
||||
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
|
||||
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
|
||||
rect.y() + halfHeightDelta, size.width(),
|
||||
size.height());
|
||||
}
|
||||
|
||||
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
||||
auto name = propName.utf8(runtime);
|
||||
|
||||
if (name == "render") {
|
||||
auto render = JSI_HOST_FUNCTION_LAMBDA {
|
||||
if (_canvas == nullptr) {
|
||||
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
|
||||
}
|
||||
|
||||
throw std::runtime_error("render() is not yet implemented!");
|
||||
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
|
||||
}
|
||||
if (name == "isDrawable") {
|
||||
return jsi::Value(_canvas != nullptr);
|
||||
}
|
||||
|
||||
if (_canvas != nullptr) {
|
||||
// If we have a Canvas, try to access the property on there.
|
||||
auto result = _canvas->get(runtime, propName);
|
||||
if (!result.isUndefined()) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to base implementation
|
||||
return FrameHostObject::get(runtime, propName);
|
||||
}
|
||||
|
||||
void DrawableFrameHostObject::invalidateCanvas() {
|
||||
_canvas = nullptr;
|
||||
}
|
||||
|
||||
|
||||
|
||||
} // namespace vision
|
33
android/src/main/cpp/skia/DrawableFrameHostObject.h
Normal file
33
android/src/main/cpp/skia/DrawableFrameHostObject.h
Normal file
@ -0,0 +1,33 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <jsi/jsi.h>
|
||||
#include "FrameHostObject.h"
|
||||
#include "JFrame.h"
|
||||
|
||||
#include <SkCanvas.h>
|
||||
#include <JsiSkCanvas.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
|
||||
public:
|
||||
explicit DrawableFrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame,
|
||||
std::shared_ptr<RNSkia::JsiSkCanvas> canvas): FrameHostObject(frame), _canvas(canvas) {}
|
||||
|
||||
public:
|
||||
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
|
||||
|
||||
void invalidateCanvas();
|
||||
|
||||
private:
|
||||
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
|
||||
};
|
||||
|
||||
} // namespace vision
|
61
android/src/main/cpp/skia/JSkiaFrameProcessor.cpp
Normal file
61
android/src/main/cpp/skia/JSkiaFrameProcessor.cpp
Normal file
@ -0,0 +1,61 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
|
||||
#include <utility>
|
||||
#include "JFrame.h"
|
||||
#include "DrawableFrameHostObject.h"
|
||||
|
||||
#include <RNSkPlatformContext.h>
|
||||
#include "VisionCameraSkiaContext.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
void JSkiaFrameProcessor::registerNatives() {
|
||||
}
|
||||
|
||||
using TSelf = jni::local_ref<JSkiaFrameProcessor::javaobject>;
|
||||
|
||||
JSkiaFrameProcessor::JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker)
|
||||
: JSkiaFrameProcessor::HybridBase(worklet, context) {
|
||||
// TODO: Can I use the Android Platform Context from react-native-skia here?
|
||||
auto skiaPlatformContext = std::make_shared<VisionCameraSkiaContext>(context->getJsRuntime(),
|
||||
callInvoker,
|
||||
1.0f);
|
||||
_jsiCanvas = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
|
||||
_skiaRenderer = std::make_shared<SkiaRenderer>();
|
||||
}
|
||||
|
||||
TSelf JSkiaFrameProcessor::create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker) {
|
||||
return JSkiaFrameProcessor::newObjectCxxArgs(worklet, context, callInvoker);
|
||||
}
|
||||
|
||||
void JSkiaFrameProcessor::call(alias_ref<JFrame::javaobject> frame,
|
||||
SkCanvas* canvas) {
|
||||
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
|
||||
_jsiCanvas->setCanvas(canvas);
|
||||
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, _jsiCanvas);
|
||||
|
||||
// Call the base function in JFrameProcessor
|
||||
callWithFrameHostObject(frameHostObject);
|
||||
|
||||
// Remove Skia Canvas from Host Object because it is no longer valid
|
||||
frameHostObject->invalidateCanvas();
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
59
android/src/main/cpp/skia/JSkiaFrameProcessor.h
Normal file
59
android/src/main/cpp/skia/JSkiaFrameProcessor.h
Normal file
@ -0,0 +1,59 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
|
||||
#include <react-native-worklets-core/WKTJsiWorklet.h>
|
||||
#include <react-native-worklets-core/WKTJsiHostObject.h>
|
||||
|
||||
#include "JFrame.h"
|
||||
#include "FrameHostObject.h"
|
||||
#include "SkiaRenderer.h"
|
||||
#include "JFrameProcessor.h"
|
||||
|
||||
#include <JsiSkCanvas.h>
|
||||
#include <RNSkPlatformContext.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class JSkiaFrameProcessor : public jni::HybridClass<JSkiaFrameProcessor, JFrameProcessor> {
|
||||
public:
|
||||
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/skia/SkiaFrameProcessor;";
|
||||
static void registerNatives();
|
||||
static jni::local_ref<JSkiaFrameProcessor::javaobject> create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker);
|
||||
public:
|
||||
/**
|
||||
* Call the JS Frame Processor with the given valid Canvas to draw on.
|
||||
*/
|
||||
void call(jni::alias_ref<JFrame::javaobject> frame,
|
||||
SkCanvas* canvas);
|
||||
|
||||
SkiaRenderer& getSkiaRenderer() { return *_skiaRenderer; }
|
||||
|
||||
protected:
|
||||
friend HybridBase;
|
||||
// Private constructor. Use `create(..)` to create new instances.
|
||||
explicit JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker);
|
||||
|
||||
private:
|
||||
std::shared_ptr<RNSkia::JsiSkCanvas> _jsiCanvas;
|
||||
std::shared_ptr<SkiaRenderer> _skiaRenderer;
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
@ -8,6 +8,8 @@
|
||||
#include <android/log.h>
|
||||
#include "OpenGLError.h"
|
||||
|
||||
#include <GLES2/gl2ext.h>
|
||||
|
||||
#include <core/SkColorSpace.h>
|
||||
#include <core/SkCanvas.h>
|
||||
#include <core/SkYUVAPixmaps.h>
|
||||
@ -22,308 +24,209 @@
|
||||
#include <android/surface_texture_jni.h>
|
||||
|
||||
// from <gpu/ganesh/gl/GrGLDefines.h>
|
||||
#define GR_GL_TEXTURE_EXTERNAL 0x8D65
|
||||
#define GR_GL_RGBA8 0x8058
|
||||
#define ACTIVE_SURFACE_ID 0
|
||||
#define DEFAULT_FBO 0
|
||||
|
||||
namespace vision {
|
||||
|
||||
|
||||
jni::local_ref<SkiaRenderer::jhybriddata> SkiaRenderer::initHybrid(jni::alias_ref<jhybridobject> javaPart) {
|
||||
return makeCxxInstance(javaPart);
|
||||
}
|
||||
|
||||
SkiaRenderer::SkiaRenderer(const jni::alias_ref<jhybridobject>& javaPart) {
|
||||
_javaPart = jni::make_global(javaPart);
|
||||
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing SkiaRenderer...");
|
||||
|
||||
_previewSurface = nullptr;
|
||||
_previewWidth = 0;
|
||||
_previewHeight = 0;
|
||||
_inputSurfaceTextureId = NO_INPUT_TEXTURE;
|
||||
}
|
||||
|
||||
SkiaRenderer::~SkiaRenderer() {
|
||||
if (_glDisplay != EGL_NO_DISPLAY) {
|
||||
eglMakeCurrent(_glDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
|
||||
if (_glSurface != EGL_NO_SURFACE) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Surface...");
|
||||
eglDestroySurface(_glDisplay, _glSurface);
|
||||
_glSurface = EGL_NO_SURFACE;
|
||||
}
|
||||
if (_glContext != EGL_NO_CONTEXT) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Context...");
|
||||
eglDestroyContext(_glDisplay, _glContext);
|
||||
_glContext = EGL_NO_CONTEXT;
|
||||
}
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying OpenGL Display...");
|
||||
eglTerminate(_glDisplay);
|
||||
_glDisplay = EGL_NO_DISPLAY;
|
||||
}
|
||||
_offscreenSurface = nullptr;
|
||||
_offscreenSurfaceTextureId = NO_TEXTURE;
|
||||
|
||||
// 3. Delete the Skia context
|
||||
if (_skiaContext != nullptr) {
|
||||
_skiaContext->abandonContext();
|
||||
_skiaContext = nullptr;
|
||||
}
|
||||
destroyOutputSurface();
|
||||
}
|
||||
|
||||
void SkiaRenderer::ensureOpenGL(ANativeWindow* surface) {
|
||||
bool successful;
|
||||
// EGLDisplay
|
||||
if (_glDisplay == EGL_NO_DISPLAY) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLDisplay..");
|
||||
_glDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
|
||||
if (_glDisplay == EGL_NO_DISPLAY) throw OpenGLError("Failed to get default OpenGL Display!");
|
||||
|
||||
EGLint major;
|
||||
EGLint minor;
|
||||
successful = eglInitialize(_glDisplay, &major, &minor);
|
||||
if (!successful) throw OpenGLError("Failed to initialize OpenGL!");
|
||||
}
|
||||
|
||||
// EGLConfig
|
||||
if (_glConfig == nullptr) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLConfig..");
|
||||
EGLint attributes[] = {EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
|
||||
EGL_ALPHA_SIZE, 8,
|
||||
EGL_BLUE_SIZE, 8,
|
||||
EGL_GREEN_SIZE, 8,
|
||||
EGL_RED_SIZE, 8,
|
||||
EGL_DEPTH_SIZE, 0,
|
||||
EGL_STENCIL_SIZE, 0,
|
||||
EGL_NONE};
|
||||
EGLint numConfigs;
|
||||
successful = eglChooseConfig(_glDisplay, attributes, &_glConfig, 1, &numConfigs);
|
||||
if (!successful || numConfigs == 0) throw OpenGLError("Failed to choose OpenGL config!");
|
||||
}
|
||||
|
||||
// EGLContext
|
||||
if (_glContext == EGL_NO_CONTEXT) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLContext..");
|
||||
EGLint contextAttributes[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
|
||||
_glContext = eglCreateContext(_glDisplay, _glConfig, nullptr, contextAttributes);
|
||||
if (_glContext == EGL_NO_CONTEXT) throw OpenGLError("Failed to create OpenGL context!");
|
||||
}
|
||||
|
||||
// EGLSurface
|
||||
if (_glSurface == EGL_NO_SURFACE) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Initializing EGLSurface..");
|
||||
_glSurface = eglCreateWindowSurface(_glDisplay, _glConfig, surface, nullptr);
|
||||
sk_sp<GrDirectContext> SkiaRenderer::getSkiaContext() {
|
||||
if (_skiaContext == nullptr) {
|
||||
_skiaContext = GrDirectContext::MakeGL();
|
||||
}
|
||||
|
||||
successful = eglMakeCurrent(_glDisplay, _glSurface, _glSurface, _glContext);
|
||||
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to use current OpenGL context!");
|
||||
return _skiaContext;
|
||||
}
|
||||
|
||||
void SkiaRenderer::setOutputSurface(jobject previewSurface) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Setting Output Surface..");
|
||||
destroyOutputSurface();
|
||||
|
||||
_previewSurface = ANativeWindow_fromSurface(jni::Environment::current(), previewSurface);
|
||||
_glSurface = EGL_NO_SURFACE;
|
||||
sk_sp<SkImage> SkiaRenderer::wrapTextureAsImage(OpenGLTexture &texture) {
|
||||
GrGLTextureInfo textureInfo {
|
||||
// OpenGL will automatically convert YUV -> RGB - if it's an EXTERNAL texture
|
||||
.fTarget = texture.target,
|
||||
.fID = texture.id,
|
||||
.fFormat = GR_GL_RGBA8,
|
||||
};
|
||||
GrBackendTexture skiaTexture(texture.width,
|
||||
texture.height,
|
||||
GrMipMapped::kNo,
|
||||
textureInfo);
|
||||
sk_sp<SkImage> image = SkImages::BorrowTextureFrom(_skiaContext.get(),
|
||||
skiaTexture,
|
||||
kBottomLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
kOpaque_SkAlphaType,
|
||||
nullptr,
|
||||
nullptr);
|
||||
if (image == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to create Skia Image! Cannot wrap input texture (frame) using Skia.");
|
||||
}
|
||||
return image;
|
||||
}
|
||||
|
||||
void SkiaRenderer::destroyOutputSurface() {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Destroying Output Surface..");
|
||||
if (_glSurface != EGL_NO_SURFACE) {
|
||||
eglDestroySurface(_glDisplay, _glSurface);
|
||||
_glSurface = EGL_NO_SURFACE;
|
||||
if (_skiaContext != nullptr) {
|
||||
_skiaContext->abandonContext();
|
||||
_skiaContext = nullptr;
|
||||
sk_sp<SkSurface> SkiaRenderer::wrapEglSurfaceAsSurface(EGLSurface eglSurface) {
|
||||
GLint sampleCnt;
|
||||
glGetIntegerv(GL_SAMPLES, &sampleCnt);
|
||||
GLint stencilBits;
|
||||
glGetIntegerv(GL_STENCIL_BITS, &stencilBits);
|
||||
GrGLFramebufferInfo fboInfo {
|
||||
// DEFAULT_FBO is FBO0, meaning the default on-screen FBO for that given surface
|
||||
.fFBOID = DEFAULT_FBO,
|
||||
.fFormat = GR_GL_RGBA8
|
||||
};
|
||||
EGLint width = 0, height = 0;
|
||||
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_WIDTH, &width);
|
||||
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_HEIGHT, &height);
|
||||
GrBackendRenderTarget renderTarget(width,
|
||||
height,
|
||||
sampleCnt,
|
||||
stencilBits,
|
||||
fboInfo);
|
||||
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
|
||||
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
|
||||
renderTarget,
|
||||
kBottomLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
nullptr,
|
||||
&props,
|
||||
nullptr,
|
||||
nullptr);
|
||||
if (surface == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to create Skia Surface! Cannot wrap EGLSurface/FrameBuffer using Skia.");
|
||||
}
|
||||
return surface;
|
||||
}
|
||||
|
||||
sk_sp<SkSurface> SkiaRenderer::getOffscreenSurface(int width, int height) {
|
||||
if (_offscreenSurface == nullptr || _offscreenSurface->width() != width || _offscreenSurface->height() != height) {
|
||||
// 1. Get Skia Context
|
||||
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
|
||||
|
||||
// 2. Create a backend texture (TEXTURE_2D + Frame Buffer)
|
||||
GrBackendTexture backendTexture = skiaContext->createBackendTexture(width,
|
||||
height,
|
||||
SkColorType::kN32_SkColorType,
|
||||
GrMipMapped::kNo,
|
||||
GrRenderable::kYes);
|
||||
|
||||
// 3. Get it's Texture ID
|
||||
GrGLTextureInfo info;
|
||||
backendTexture.getGLTextureInfo(&info);
|
||||
_offscreenSurfaceTextureId = info.fID;
|
||||
|
||||
struct ReleaseContext {
|
||||
GrDirectContext* context;
|
||||
GrBackendTexture texture;
|
||||
};
|
||||
auto releaseCtx = new ReleaseContext(
|
||||
{skiaContext.get(), backendTexture});
|
||||
SkSurfaces::TextureReleaseProc releaseProc = [] (void* address) {
|
||||
// 5. Once done using, delete the backend OpenGL texture.
|
||||
auto releaseCtx = reinterpret_cast<ReleaseContext*>(address);
|
||||
releaseCtx->context->deleteBackendTexture(releaseCtx->texture);
|
||||
};
|
||||
|
||||
// 4. Wrap the newly created texture as an SkSurface
|
||||
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
|
||||
_offscreenSurface = SkSurfaces::WrapBackendTexture(skiaContext.get(),
|
||||
backendTexture,
|
||||
kBottomLeft_GrSurfaceOrigin,
|
||||
0,
|
||||
SkColorType::kN32_SkColorType,
|
||||
nullptr,
|
||||
&props,
|
||||
releaseProc,
|
||||
releaseCtx);
|
||||
if (_offscreenSurface == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to create offscreen Skia Surface!");
|
||||
}
|
||||
}
|
||||
if (_previewSurface != nullptr) {
|
||||
ANativeWindow_release(_previewSurface);
|
||||
_previewSurface = nullptr;
|
||||
}
|
||||
|
||||
return _offscreenSurface;
|
||||
}
|
||||
|
||||
void SkiaRenderer::setOutputSurfaceSize(int width, int height) {
|
||||
_previewWidth = width;
|
||||
_previewHeight = height;
|
||||
}
|
||||
OpenGLTexture SkiaRenderer::renderTextureToOffscreenSurface(OpenGLContext& glContext,
|
||||
OpenGLTexture& texture,
|
||||
float* transformMatrix,
|
||||
const DrawCallback& drawCallback) {
|
||||
// 1. Activate the OpenGL context (eglMakeCurrent)
|
||||
glContext.use();
|
||||
|
||||
void SkiaRenderer::setInputTextureSize(int width, int height) {
|
||||
_inputWidth = width;
|
||||
_inputHeight = height;
|
||||
}
|
||||
// 2. Initialize Skia
|
||||
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
|
||||
|
||||
void SkiaRenderer::renderLatestFrameToPreview() {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "renderLatestFrameToPreview()");
|
||||
if (_previewSurface == nullptr) {
|
||||
throw std::runtime_error("Cannot render latest frame to preview without a preview surface! "
|
||||
"renderLatestFrameToPreview() needs to be called after setPreviewSurface().");
|
||||
// 3. Create the offscreen Skia Surface
|
||||
sk_sp<SkSurface> surface = getOffscreenSurface(texture.width, texture.height);
|
||||
|
||||
// 4. Wrap the input texture as an image so we can draw it to the surface
|
||||
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
|
||||
|
||||
// 5. Prepare the Canvas
|
||||
SkCanvas* canvas = _offscreenSurface->getCanvas();
|
||||
if (canvas == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to get Skia Canvas!");
|
||||
}
|
||||
return;
|
||||
if (_inputSurfaceTextureId == NO_INPUT_TEXTURE) {
|
||||
throw std::runtime_error("Cannot render latest frame to preview without an input texture! "
|
||||
"renderLatestFrameToPreview() needs to be called after prepareInputTexture().");
|
||||
}
|
||||
ensureOpenGL(_previewSurface);
|
||||
|
||||
if (_skiaContext == nullptr) {
|
||||
_skiaContext = GrDirectContext::MakeGL();
|
||||
}
|
||||
_skiaContext->resetContext();
|
||||
|
||||
GrGLTextureInfo textureInfo {
|
||||
// OpenGL will automatically convert YUV -> RGB because it's an EXTERNAL texture
|
||||
.fTarget = GR_GL_TEXTURE_EXTERNAL,
|
||||
.fID = _inputSurfaceTextureId,
|
||||
.fFormat = GR_GL_RGBA8,
|
||||
.fProtected = skgpu::Protected::kNo,
|
||||
};
|
||||
GrBackendTexture texture(_inputWidth,
|
||||
_inputHeight,
|
||||
GrMipMapped::kNo,
|
||||
textureInfo);
|
||||
sk_sp<SkImage> frame = SkImages::AdoptTextureFrom(_skiaContext.get(),
|
||||
texture,
|
||||
kTopLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
kOpaque_SkAlphaType);
|
||||
|
||||
GrGLFramebufferInfo fboInfo {
|
||||
// FBO #0 is the currently active OpenGL Surface (eglMakeCurrent)
|
||||
.fFBOID = ACTIVE_SURFACE_ID,
|
||||
.fFormat = GR_GL_RGBA8,
|
||||
.fProtected = skgpu::Protected::kNo,
|
||||
};;
|
||||
GrBackendRenderTarget renderTarget(_previewWidth,
|
||||
_previewHeight,
|
||||
0,
|
||||
8,
|
||||
fboInfo);
|
||||
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
|
||||
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
|
||||
renderTarget,
|
||||
kTopLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
nullptr,
|
||||
&props);
|
||||
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering %ix%i Frame to %ix%i Preview..", frame->width(), frame->height(), surface->width(), surface->height());
|
||||
|
||||
auto canvas = surface->getCanvas();
|
||||
// TODO: Apply Matrix. No idea how though.
|
||||
SkM44 matrix = SkM44::ColMajor(transformMatrix);
|
||||
|
||||
// 6. Render it!
|
||||
canvas->clear(SkColors::kBlack);
|
||||
|
||||
auto duration = std::chrono::system_clock::now().time_since_epoch();
|
||||
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
|
||||
|
||||
canvas->drawImage(frame, 0, 0);
|
||||
|
||||
// TODO: Run Skia Frame Processor
|
||||
auto rect = SkRect::MakeXYWH(150, 250, millis % 3000 / 10, millis % 3000 / 10);
|
||||
auto paint = SkPaint();
|
||||
paint.setColor(SkColors::kRed);
|
||||
canvas->drawRect(rect, paint);
|
||||
drawCallback(canvas);
|
||||
|
||||
// Flush
|
||||
canvas->flush();
|
||||
// 8. Flush all Skia operations to OpenGL
|
||||
_offscreenSurface->flushAndSubmit();
|
||||
|
||||
bool successful = eglSwapBuffers(_glDisplay, _glSurface);
|
||||
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to swap OpenGL buffers!");
|
||||
return OpenGLTexture {
|
||||
.id = _offscreenSurfaceTextureId,
|
||||
.target = GL_TEXTURE_2D,
|
||||
.width = texture.width,
|
||||
.height = texture.height,
|
||||
};
|
||||
}
|
||||
|
||||
void SkiaRenderer::renderTextureToSurface(OpenGLContext &glContext, OpenGLTexture &texture, EGLSurface surface) {
|
||||
// 1. Activate the OpenGL context (eglMakeCurrent)
|
||||
glContext.use(surface);
|
||||
|
||||
void SkiaRenderer::renderCameraFrameToOffscreenCanvas(jni::JByteBuffer yBuffer,
|
||||
jni::JByteBuffer uBuffer,
|
||||
jni::JByteBuffer vBuffer) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Begin render...");
|
||||
ensureOpenGL(_previewSurface);
|
||||
if (_skiaContext == nullptr) {
|
||||
_skiaContext = GrDirectContext::MakeGL();
|
||||
// 2. Initialize Skia
|
||||
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
|
||||
|
||||
// 3. Wrap the output EGLSurface in a Skia SkSurface
|
||||
sk_sp<SkSurface> skSurface = wrapEglSurfaceAsSurface(surface);
|
||||
|
||||
// 4. Wrap the input texture in a Skia SkImage
|
||||
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
|
||||
|
||||
// 5. Prepare the Canvas!
|
||||
SkCanvas* canvas = skSurface->getCanvas();
|
||||
if (canvas == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to get Skia Canvas!");
|
||||
}
|
||||
_skiaContext->resetContext();
|
||||
|
||||
// See https://en.wikipedia.org/wiki/Chroma_subsampling - we're in 4:2:0
|
||||
size_t bytesPerRow = sizeof(uint8_t) * _inputWidth;
|
||||
|
||||
SkImageInfo yInfo = SkImageInfo::MakeA8(_inputWidth, _inputHeight);
|
||||
SkPixmap yPixmap(yInfo, yBuffer.getDirectAddress(), bytesPerRow);
|
||||
|
||||
SkImageInfo uInfo = SkImageInfo::MakeA8(_inputWidth / 2, _inputHeight / 2);
|
||||
SkPixmap uPixmap(uInfo, uBuffer.getDirectAddress(), bytesPerRow / 2);
|
||||
|
||||
SkImageInfo vInfo = SkImageInfo::MakeA8(_inputWidth / 2, _inputHeight / 2);
|
||||
SkPixmap vPixmap(vInfo, vBuffer.getDirectAddress(), bytesPerRow / 2);
|
||||
|
||||
SkYUVAInfo info(SkISize::Make(_inputWidth, _inputHeight),
|
||||
SkYUVAInfo::PlaneConfig::kY_U_V,
|
||||
SkYUVAInfo::Subsampling::k420,
|
||||
SkYUVColorSpace::kRec709_Limited_SkYUVColorSpace);
|
||||
SkPixmap externalPixmaps[3] = { yPixmap, uPixmap, vPixmap };
|
||||
SkYUVAPixmaps pixmaps = SkYUVAPixmaps::FromExternalPixmaps(info, externalPixmaps);
|
||||
|
||||
sk_sp<SkImage> image = SkImages::TextureFromYUVAPixmaps(_skiaContext.get(), pixmaps);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
GrGLFramebufferInfo fboInfo {
|
||||
// FBO #0 is the currently active OpenGL Surface (eglMakeCurrent)
|
||||
.fFBOID = ACTIVE_SURFACE_ID,
|
||||
.fFormat = GR_GL_RGBA8,
|
||||
.fProtected = skgpu::Protected::kNo,
|
||||
};;
|
||||
GrBackendRenderTarget renderTarget(_previewWidth,
|
||||
_previewHeight,
|
||||
0,
|
||||
8,
|
||||
fboInfo);
|
||||
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
|
||||
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
|
||||
renderTarget,
|
||||
kTopLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
nullptr,
|
||||
&props);
|
||||
|
||||
auto canvas = surface->getCanvas();
|
||||
|
||||
// 6. Render it!
|
||||
canvas->clear(SkColors::kBlack);
|
||||
canvas->drawImage(frame, 0, 0);
|
||||
|
||||
auto duration = std::chrono::system_clock::now().time_since_epoch();
|
||||
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
|
||||
// 7. Flush all Skia operations to OpenGL
|
||||
skSurface->flushAndSubmit();
|
||||
|
||||
canvas->drawImage(image, 0, 0);
|
||||
|
||||
// TODO: Run Skia Frame Processor
|
||||
auto rect = SkRect::MakeXYWH(150, 250, millis % 3000 / 10, millis % 3000 / 10);
|
||||
auto paint = SkPaint();
|
||||
paint.setColor(SkColors::kRed);
|
||||
canvas->drawRect(rect, paint);
|
||||
|
||||
// Flush
|
||||
canvas->flush();
|
||||
|
||||
bool successful = eglSwapBuffers(_glDisplay, _glSurface);
|
||||
if (!successful || eglGetError() != EGL_SUCCESS) throw OpenGLError("Failed to swap OpenGL buffers!");
|
||||
|
||||
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendered!");
|
||||
}
|
||||
|
||||
|
||||
void SkiaRenderer::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("initHybrid", SkiaRenderer::initHybrid),
|
||||
makeNativeMethod("setInputTextureSize", SkiaRenderer::setInputTextureSize),
|
||||
makeNativeMethod("setOutputSurface", SkiaRenderer::setOutputSurface),
|
||||
makeNativeMethod("destroyOutputSurface", SkiaRenderer::destroyOutputSurface),
|
||||
makeNativeMethod("setOutputSurfaceSize", SkiaRenderer::setOutputSurfaceSize),
|
||||
makeNativeMethod("renderLatestFrameToPreview", SkiaRenderer::renderLatestFrameToPreview),
|
||||
makeNativeMethod("renderCameraFrameToOffscreenCanvas", SkiaRenderer::renderCameraFrameToOffscreenCanvas),
|
||||
});
|
||||
// 8. Swap the buffers so the onscreen surface gets updated.
|
||||
glContext.flush();
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@ -6,72 +6,63 @@
|
||||
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
#include <fbjni/ByteBuffer.h>
|
||||
|
||||
#include <GLES2/gl2.h>
|
||||
#include <EGL/egl.h>
|
||||
#include <include/core/SkSurface.h>
|
||||
#include <android/native_window.h>
|
||||
|
||||
#include <include/core/SkSurface.h>
|
||||
#include <include/gpu/GrDirectContext.h>
|
||||
|
||||
#include "OpenGLContext.h"
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
#define NO_TEXTURE 0
|
||||
|
||||
#define NO_INPUT_TEXTURE 7654321
|
||||
using DrawCallback = std::function<void(SkCanvas*)>;
|
||||
|
||||
class SkiaRenderer: public jni::HybridClass<SkiaRenderer> {
|
||||
// JNI Stuff
|
||||
class SkiaRenderer {
|
||||
public:
|
||||
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/skia/SkiaRenderer;";
|
||||
static void registerNatives();
|
||||
|
||||
private:
|
||||
friend HybridBase;
|
||||
jni::global_ref<SkiaRenderer::javaobject> _javaPart;
|
||||
explicit SkiaRenderer(const jni::alias_ref<jhybridobject>& javaPart);
|
||||
|
||||
public:
|
||||
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> javaPart);
|
||||
/**
|
||||
* Create a new Skia renderer. You need to use OpenGL outside of this context to make sure the
|
||||
* Skia renderer can use the global OpenGL context.
|
||||
*/
|
||||
explicit SkiaRenderer() {};
|
||||
~SkiaRenderer();
|
||||
|
||||
private:
|
||||
// Input Texture (Camera)
|
||||
void setInputTextureSize(int width, int height);
|
||||
// Output Surface (Preview)
|
||||
void setOutputSurface(jobject previewSurface);
|
||||
void destroyOutputSurface();
|
||||
void setOutputSurfaceSize(int width, int height);
|
||||
/**
|
||||
* Renders the given Texture (might be a Camera Frame) to a cached offscreen Texture using Skia.
|
||||
*
|
||||
* @returns The texture that was rendered to.
|
||||
*/
|
||||
OpenGLTexture renderTextureToOffscreenSurface(OpenGLContext& glContext,
|
||||
OpenGLTexture& texture,
|
||||
float* transformMatrix,
|
||||
const DrawCallback& drawCallback);
|
||||
|
||||
/**
|
||||
* Renders the latest Camera Frame from the Input Texture onto the Preview Surface. (60 FPS)
|
||||
* Renders the given texture to the target output surface using Skia.
|
||||
*/
|
||||
void renderLatestFrameToPreview();
|
||||
/**
|
||||
* Renders the latest Camera Frame into it's Input Texture and run the Skia Frame Processor (1..240 FPS)
|
||||
*/
|
||||
void renderCameraFrameToOffscreenCanvas(jni::JByteBuffer yBuffer,
|
||||
jni::JByteBuffer uBuffer,
|
||||
jni::JByteBuffer vBuffer);
|
||||
void renderTextureToSurface(OpenGLContext& glContext,
|
||||
OpenGLTexture& texture,
|
||||
EGLSurface surface);
|
||||
|
||||
private:
|
||||
// Gets or creates the Skia context.
|
||||
sk_sp<GrDirectContext> getSkiaContext();
|
||||
// Wraps a Texture as an SkImage allowing you to draw it
|
||||
sk_sp<SkImage> wrapTextureAsImage(OpenGLTexture& texture);
|
||||
// Wraps an EGLSurface as an SkSurface allowing you to draw into it
|
||||
sk_sp<SkSurface> wrapEglSurfaceAsSurface(EGLSurface eglSurface);
|
||||
// Gets or creates an off-screen surface that you can draw into
|
||||
sk_sp<SkSurface> getOffscreenSurface(int width, int height);
|
||||
|
||||
private:
|
||||
// OpenGL Context
|
||||
EGLContext _glContext = EGL_NO_CONTEXT;
|
||||
EGLDisplay _glDisplay = EGL_NO_DISPLAY;
|
||||
EGLSurface _glSurface = EGL_NO_SURFACE;
|
||||
EGLConfig _glConfig = nullptr;
|
||||
// Skia Context
|
||||
sk_sp<GrDirectContext> _skiaContext;
|
||||
|
||||
// Input Texture (Camera/Offscreen)
|
||||
GLuint _inputSurfaceTextureId = NO_INPUT_TEXTURE;
|
||||
int _inputWidth, _inputHeight;
|
||||
// Output Texture (Surface/Preview)
|
||||
ANativeWindow* _previewSurface;
|
||||
int _previewWidth, _previewHeight;
|
||||
|
||||
void ensureOpenGL(ANativeWindow* surface);
|
||||
sk_sp<GrDirectContext> _skiaContext = nullptr;
|
||||
sk_sp<SkSurface> _offscreenSurface = nullptr;
|
||||
GLuint _offscreenSurfaceTextureId = NO_TEXTURE;
|
||||
|
||||
static auto constexpr TAG = "SkiaRenderer";
|
||||
};
|
||||
|
8
android/src/main/cpp/skia/VisionCameraSkiaContext.cpp
Normal file
8
android/src/main/cpp/skia/VisionCameraSkiaContext.cpp
Normal file
@ -0,0 +1,8 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#include "VisionCameraSkiaContext.h"
|
||||
|
||||
namespace vision {
|
||||
} // vision
|
52
android/src/main/cpp/skia/VisionCameraSkiaContext.h
Normal file
52
android/src/main/cpp/skia/VisionCameraSkiaContext.h
Normal file
@ -0,0 +1,52 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <jsi/jsi.h>
|
||||
#include <RNSkPlatformContext.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class VisionCameraSkiaContext: public RNSkia::RNSkPlatformContext {
|
||||
public:
|
||||
VisionCameraSkiaContext(jsi::Runtime* runtime,
|
||||
std::shared_ptr<react::CallInvoker> callInvoker,
|
||||
float pixelDensity)
|
||||
: RNSkia::RNSkPlatformContext(runtime, callInvoker, pixelDensity) { }
|
||||
|
||||
void raiseError(const std::exception &err) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext Error: " + std::string(err.what()));
|
||||
}
|
||||
|
||||
void performStreamOperation(
|
||||
const std::string &sourceUri,
|
||||
const std::function<void(std::unique_ptr<SkStreamAsset>)> &op) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::performStreamOperation is not yet implemented!");
|
||||
}
|
||||
|
||||
sk_sp<SkSurface> makeOffscreenSurface(int width, int height) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::makeOffscreenSurface is not yet implemented!");
|
||||
}
|
||||
|
||||
void runOnMainThread(std::function<void()> task) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::runOnMainThread is not yet implemented!");
|
||||
}
|
||||
|
||||
sk_sp<SkImage> takeScreenshotFromViewTag(size_t tag) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::takeScreenshotFromViewTag is not yet implemented!");
|
||||
}
|
||||
|
||||
void startDrawLoop() override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::startDrawLoop is not yet implemented!");
|
||||
}
|
||||
|
||||
void stopDrawLoop() override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::stopDrawLoop is not yet implemented!");
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace vision
|
@ -200,8 +200,8 @@ class CameraSession(private val context: Context,
|
||||
private fun updateVideoOutputs() {
|
||||
val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return
|
||||
val previewOutput = outputs?.previewOutput
|
||||
videoPipeline.setRecordingSessionOutput(this.recording)
|
||||
videoPipeline.setFrameProcessorOutput(this.frameProcessor)
|
||||
videoPipeline.setRecordingSessionOutput(recording)
|
||||
videoPipeline.setFrameProcessorOutput(frameProcessor)
|
||||
videoPipeline.setPreviewOutput(previewOutput?.surface)
|
||||
}
|
||||
|
||||
|
@ -19,16 +19,12 @@ import com.mrousavy.camera.extensions.installHierarchyFitter
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import com.mrousavy.camera.parsers.PixelFormat
|
||||
import com.mrousavy.camera.parsers.PreviewType
|
||||
import com.mrousavy.camera.parsers.Torch
|
||||
import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
import com.mrousavy.camera.skia.SkiaPreviewView
|
||||
import com.mrousavy.camera.skia.SkiaRenderer
|
||||
import com.mrousavy.camera.utils.outputs.CameraOutputs
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import java.io.Closeable
|
||||
|
||||
//
|
||||
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
|
||||
@ -52,7 +48,7 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
companion object {
|
||||
const val TAG = "CameraView"
|
||||
|
||||
private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId", "previewType")
|
||||
private val propsThatRequirePreviewReconfiguration = arrayListOf("cameraId")
|
||||
private val propsThatRequireSessionReconfiguration = arrayListOf("cameraId", "format", "photo", "video", "enableFrameProcessor", "pixelFormat")
|
||||
private val propsThatRequireFormatReconfiguration = arrayListOf("fps", "hdr", "videoStabilizationMode", "lowLightBoost")
|
||||
}
|
||||
@ -75,7 +71,6 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
var videoStabilizationMode: VideoStabilizationMode? = null
|
||||
var hdr: Boolean? = null // nullable bool
|
||||
var lowLightBoost: Boolean? = null // nullable bool
|
||||
var previewType: PreviewType = PreviewType.NONE
|
||||
// other props
|
||||
var isActive = false
|
||||
var torch: Torch = Torch.OFF
|
||||
@ -92,11 +87,10 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
private var previewView: View? = null
|
||||
private var previewSurface: Surface? = null
|
||||
|
||||
private var skiaRenderer: SkiaRenderer? = null
|
||||
internal var frameProcessor: FrameProcessor? = null
|
||||
set(value) {
|
||||
field = value
|
||||
cameraSession.frameProcessor = frameProcessor
|
||||
cameraSession.frameProcessor = value
|
||||
}
|
||||
|
||||
private val inputOrientation: Orientation
|
||||
@ -130,34 +124,17 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
}
|
||||
|
||||
private fun setupPreviewView() {
|
||||
this.previewView?.let { previewView ->
|
||||
removeView(previewView)
|
||||
if (previewView is Closeable) previewView.close()
|
||||
}
|
||||
removeView(previewView)
|
||||
this.previewSurface = null
|
||||
|
||||
when (previewType) {
|
||||
PreviewType.NONE -> {
|
||||
// Do nothing.
|
||||
}
|
||||
PreviewType.NATIVE -> {
|
||||
val cameraId = cameraId ?: throw NoCameraDeviceError()
|
||||
this.previewView = NativePreviewView(context, cameraManager, cameraId) { surface ->
|
||||
previewSurface = surface
|
||||
configureSession()
|
||||
}
|
||||
}
|
||||
PreviewType.SKIA -> {
|
||||
if (skiaRenderer == null) skiaRenderer = SkiaRenderer()
|
||||
this.previewView = SkiaPreviewView(context, skiaRenderer!!)
|
||||
configureSession()
|
||||
}
|
||||
}
|
||||
|
||||
this.previewView?.let { previewView ->
|
||||
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
||||
addView(previewView)
|
||||
val cameraId = cameraId ?: return
|
||||
val previewView = NativePreviewView(context, cameraManager, cameraId) { surface ->
|
||||
previewSurface = surface
|
||||
configureSession()
|
||||
}
|
||||
previewView.layoutParams = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
|
||||
addView(previewView)
|
||||
this.previewView = previewView
|
||||
}
|
||||
|
||||
fun update(changedProps: ArrayList<String>) {
|
||||
@ -218,8 +195,6 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
// TODO: Allow previewSurface to be null/none
|
||||
val previewSurface = previewSurface ?: return
|
||||
|
||||
if (targetVideoSize != null) skiaRenderer?.setInputSurfaceSize(targetVideoSize.width, targetVideoSize.height)
|
||||
|
||||
val previewOutput = CameraOutputs.PreviewOutput(previewSurface)
|
||||
val photoOutput = if (photo == true) {
|
||||
CameraOutputs.PhotoOutput(targetPhotoSize)
|
||||
|
@ -7,7 +7,6 @@ import com.facebook.react.uimanager.ViewGroupManager
|
||||
import com.facebook.react.uimanager.annotations.ReactProp
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import com.mrousavy.camera.parsers.PixelFormat
|
||||
import com.mrousavy.camera.parsers.PreviewType
|
||||
import com.mrousavy.camera.parsers.Torch
|
||||
import com.mrousavy.camera.parsers.VideoStabilizationMode
|
||||
|
||||
@ -102,14 +101,6 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
|
||||
view.videoStabilizationMode = newMode
|
||||
}
|
||||
|
||||
@ReactProp(name = "previewType")
|
||||
fun setPreviewType(view: CameraView, previewType: String) {
|
||||
val newMode = PreviewType.fromUnionValue(previewType)
|
||||
if (view.previewType != newMode)
|
||||
addChangedPropToTransaction(view, "previewType")
|
||||
view.previewType = newMode
|
||||
}
|
||||
|
||||
@ReactProp(name = "enableHighQualityPhotos")
|
||||
fun setEnableHighQualityPhotos(view: CameraView, enableHighQualityPhotos: Boolean?) {
|
||||
if (view.enableHighQualityPhotos != enableHighQualityPhotos)
|
||||
|
@ -1,147 +1,66 @@
|
||||
package com.mrousavy.camera.frameprocessor;
|
||||
|
||||
import android.graphics.ImageFormat;
|
||||
import android.media.Image;
|
||||
import com.facebook.proguard.annotations.DoNotStrip;
|
||||
import com.mrousavy.camera.parsers.PixelFormat;
|
||||
import com.mrousavy.camera.parsers.Orientation;
|
||||
import com.facebook.jni.HybridData;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/** @noinspection JavaJniMissingFunction*/
|
||||
public class Frame {
|
||||
private final Image image;
|
||||
private final boolean isMirrored;
|
||||
private final long timestamp;
|
||||
private final Orientation orientation;
|
||||
private int refCount = 0;
|
||||
private final HybridData mHybridData;
|
||||
|
||||
public Frame(Image image, long timestamp, Orientation orientation, boolean isMirrored) {
|
||||
this.image = image;
|
||||
this.timestamp = timestamp;
|
||||
this.orientation = orientation;
|
||||
this.isMirrored = isMirrored;
|
||||
private Frame(HybridData hybridData) {
|
||||
mHybridData = hybridData;
|
||||
}
|
||||
|
||||
public Image getImage() {
|
||||
return image;
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
mHybridData.resetNative();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getWidth() {
|
||||
return image.getWidth();
|
||||
}
|
||||
/**
|
||||
* Get the width of the Frame, in it's sensor orientation. (in pixels)
|
||||
*/
|
||||
public native int getWidth();
|
||||
/**
|
||||
* Get the height of the Frame, in it's sensor orientation. (in pixels)
|
||||
*/
|
||||
public native int getHeight();
|
||||
/**
|
||||
* Get the number of bytes per row.
|
||||
* * To get the number of components per pixel you can divide this with the Frame's width.
|
||||
* * To get the total size of the byte buffer you can multiply this with the Frame's height.
|
||||
*/
|
||||
public native int getBytesPerRow();
|
||||
/**
|
||||
* Get the local timestamp of this Frame. This is always monotonically increasing for each Frame.
|
||||
*/
|
||||
public native long getTimestamp();
|
||||
/**
|
||||
* Get the Orientation of this Frame. The return value is the result of `Orientation.toUnionValue()`.
|
||||
*/
|
||||
public native String getOrientation();
|
||||
/**
|
||||
* Return whether this Frame is mirrored or not. Frames from the front-facing Camera are often mirrored.
|
||||
*/
|
||||
public native boolean getIsMirrored();
|
||||
/**
|
||||
* Get the pixel-format of this Frame. The return value is the result of `PixelFormat.toUnionValue()`.
|
||||
*/
|
||||
public native String getPixelFormat();
|
||||
/**
|
||||
* Get the actual backing pixel data of this Frame using a zero-copy C++ ByteBuffer.
|
||||
*/
|
||||
public native ByteBuffer getByteBuffer();
|
||||
/**
|
||||
* Get whether this Frame is still valid.
|
||||
* A Frame is valid as long as it hasn't been closed by the Frame Processor Runtime Manager
|
||||
* (either because it ran out of Frames in it's queue and needs to close old ones, or because
|
||||
* a Frame Processor finished executing and you're still trying to hold onto this Frame in native)
|
||||
*/
|
||||
public native boolean getIsValid();
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getHeight() {
|
||||
return image.getHeight();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public boolean getIsValid() {
|
||||
try {
|
||||
// will throw an exception if the image is already closed
|
||||
image.getCropRect();
|
||||
// no exception thrown, image must still be valid.
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
// exception thrown, image has already been closed.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public boolean getIsMirrored() {
|
||||
return isMirrored;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public long getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public String getOrientation() {
|
||||
return orientation.getUnionValue();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public String getPixelFormat() {
|
||||
PixelFormat format = PixelFormat.Companion.fromImageFormat(image.getFormat());
|
||||
return format.getUnionValue();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getPlanesCount() {
|
||||
return image.getPlanes().length;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getBytesPerRow() {
|
||||
return image.getPlanes()[0].getRowStride();
|
||||
}
|
||||
|
||||
private static ByteBuffer byteArrayCache;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public ByteBuffer toByteBuffer() {
|
||||
switch (image.getFormat()) {
|
||||
case ImageFormat.YUV_420_888:
|
||||
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
|
||||
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
|
||||
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
|
||||
int ySize = yBuffer.remaining();
|
||||
int uSize = uBuffer.remaining();
|
||||
int vSize = vBuffer.remaining();
|
||||
int totalSize = ySize + uSize + vSize;
|
||||
|
||||
if (byteArrayCache != null) byteArrayCache.rewind();
|
||||
if (byteArrayCache == null || byteArrayCache.remaining() != totalSize) {
|
||||
byteArrayCache = ByteBuffer.allocateDirect(totalSize);
|
||||
}
|
||||
|
||||
byteArrayCache.put(yBuffer).put(uBuffer).put(vBuffer);
|
||||
|
||||
return byteArrayCache;
|
||||
case ImageFormat.JPEG:
|
||||
return image.getPlanes()[0].getBuffer();
|
||||
default:
|
||||
throw new RuntimeException("Cannot convert Frame with Format " + image.getFormat() + " to byte array!");
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public void incrementRefCount() {
|
||||
synchronized (this) {
|
||||
refCount++;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public void decrementRefCount() {
|
||||
synchronized (this) {
|
||||
refCount--;
|
||||
if (refCount <= 0) {
|
||||
// If no reference is held on this Image, close it.
|
||||
image.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
private void close() {
|
||||
image.close();
|
||||
}
|
||||
private native void incrementRefCount();
|
||||
private native void decrementRefCount();
|
||||
private native void close();
|
||||
}
|
||||
|
@ -8,15 +8,9 @@ import com.facebook.jni.HybridData;
|
||||
import com.facebook.proguard.annotations.DoNotStrip;
|
||||
|
||||
/**
|
||||
* Represents a JS Frame Processor
|
||||
* Represents a JS Frame Processor. It's actual implementation is in NDK/C++.
|
||||
*/
|
||||
@SuppressWarnings("JavaJniMissingFunction") // we're using fbjni.
|
||||
public final class FrameProcessor {
|
||||
/**
|
||||
* Call the JS Frame Processor function with the given Frame
|
||||
*/
|
||||
public native void call(Frame frame);
|
||||
|
||||
public class FrameProcessor {
|
||||
@DoNotStrip
|
||||
@Keep
|
||||
private final HybridData mHybridData;
|
||||
|
@ -0,0 +1,11 @@
|
||||
package com.mrousavy.camera.skia;
|
||||
|
||||
import com.facebook.jni.HybridData;
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor;
|
||||
|
||||
public class SkiaFrameProcessor extends FrameProcessor {
|
||||
// Implementation is in JSkiaFrameProcessor.cpp
|
||||
public SkiaFrameProcessor(HybridData hybridData) {
|
||||
super(hybridData);
|
||||
}
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
package com.mrousavy.camera.skia
|
||||
|
||||
import android.annotation.SuppressLint
|
||||
import android.content.Context
|
||||
import android.util.Log
|
||||
import android.view.Choreographer
|
||||
import android.view.SurfaceHolder
|
||||
import android.view.SurfaceView
|
||||
import com.mrousavy.camera.extensions.postAndWait
|
||||
|
||||
@SuppressLint("ViewConstructor")
|
||||
class SkiaPreviewView(context: Context,
|
||||
private val skiaRenderer: SkiaRenderer): SurfaceView(context), SurfaceHolder.Callback {
|
||||
companion object {
|
||||
private const val TAG = "SkiaPreviewView"
|
||||
}
|
||||
|
||||
private var isAlive = true
|
||||
|
||||
init {
|
||||
holder.addCallback(this)
|
||||
}
|
||||
|
||||
private fun startLooping(choreographer: Choreographer) {
|
||||
choreographer.postFrameCallback {
|
||||
synchronized(this) {
|
||||
if (!isAlive) return@synchronized
|
||||
|
||||
Log.i(TAG, "tick..")
|
||||
|
||||
// Refresh UI (60 FPS)
|
||||
skiaRenderer.onPreviewFrame()
|
||||
startLooping(choreographer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun surfaceCreated(holder: SurfaceHolder) {
|
||||
synchronized(this) {
|
||||
Log.i(TAG, "onSurfaceCreated(..)")
|
||||
|
||||
skiaRenderer.thread.postAndWait {
|
||||
// Create C++ part (OpenGL/Skia context)
|
||||
skiaRenderer.setPreviewSurface(holder.surface)
|
||||
isAlive = true
|
||||
|
||||
// Start updating the Preview View (~60 FPS)
|
||||
startLooping(Choreographer.getInstance())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun surfaceChanged(holder: SurfaceHolder, format: Int, w: Int, h: Int) {
|
||||
synchronized(this) {
|
||||
Log.i(TAG, "surfaceChanged($w, $h)")
|
||||
|
||||
skiaRenderer.thread.postAndWait {
|
||||
// Update C++ OpenGL Surface size
|
||||
skiaRenderer.setPreviewSurfaceSize(w, h)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun surfaceDestroyed(holder: SurfaceHolder) {
|
||||
synchronized(this) {
|
||||
isAlive = false
|
||||
Log.i(TAG, "surfaceDestroyed(..)")
|
||||
|
||||
skiaRenderer.thread.postAndWait {
|
||||
// Clean up C++ part (OpenGL/Skia context)
|
||||
skiaRenderer.destroyPreviewSurface()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
package com.mrousavy.camera.skia
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import android.view.Surface
|
||||
import com.facebook.jni.HybridData
|
||||
import com.facebook.proguard.annotations.DoNotStrip
|
||||
import com.mrousavy.camera.CameraQueues
|
||||
import com.mrousavy.camera.frameprocessor.Frame
|
||||
import java.io.Closeable
|
||||
import java.nio.ByteBuffer
|
||||
|
||||
@Suppress("KotlinJniMissingFunction")
|
||||
class SkiaRenderer: Closeable {
|
||||
@DoNotStrip
|
||||
private var mHybridData: HybridData
|
||||
private var hasNewFrame = false
|
||||
private var hasOutputSurface = false
|
||||
|
||||
val thread = CameraQueues.previewQueue.handler
|
||||
|
||||
init {
|
||||
mHybridData = initHybrid()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
hasNewFrame = false
|
||||
thread.post {
|
||||
synchronized(this) {
|
||||
destroyOutputSurface()
|
||||
mHybridData.resetNative()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun setPreviewSurface(surface: Surface) {
|
||||
synchronized(this) {
|
||||
setOutputSurface(surface)
|
||||
hasOutputSurface = true
|
||||
}
|
||||
}
|
||||
|
||||
fun setPreviewSurfaceSize(width: Int, height: Int) {
|
||||
synchronized(this) {
|
||||
setOutputSurfaceSize(width, height)
|
||||
}
|
||||
}
|
||||
|
||||
fun destroyPreviewSurface() {
|
||||
synchronized(this) {
|
||||
destroyOutputSurface()
|
||||
hasOutputSurface = false
|
||||
}
|
||||
}
|
||||
|
||||
fun setInputSurfaceSize(width: Int, height: Int) {
|
||||
synchronized(this) {
|
||||
setInputTextureSize(width, height)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called on every Camera Frame (1..240 FPS)
|
||||
*/
|
||||
fun onCameraFrame(frame: Frame) {
|
||||
synchronized(this) {
|
||||
if (!hasOutputSurface) return
|
||||
if (frame.image.format != ImageFormat.YUV_420_888) {
|
||||
throw Error("Failed to render Camera Frame! Expected Image format #${ImageFormat.YUV_420_888} (ImageFormat.YUV_420_888), received #${frame.image.format}.")
|
||||
}
|
||||
val (y, u, v) = frame.image.planes
|
||||
renderCameraFrameToOffscreenCanvas(y.buffer, u.buffer, v.buffer)
|
||||
hasNewFrame = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called on every UI Frame (60 FPS)
|
||||
*/
|
||||
fun onPreviewFrame() {
|
||||
synchronized(this) {
|
||||
if (!hasOutputSurface) return
|
||||
if (!hasNewFrame) return
|
||||
renderLatestFrameToPreview()
|
||||
hasNewFrame = false
|
||||
}
|
||||
}
|
||||
|
||||
private external fun initHybrid(): HybridData
|
||||
|
||||
private external fun renderCameraFrameToOffscreenCanvas(yBuffer: ByteBuffer,
|
||||
uBuffer: ByteBuffer,
|
||||
vBuffer: ByteBuffer)
|
||||
private external fun renderLatestFrameToPreview()
|
||||
private external fun setInputTextureSize(width: Int, height: Int)
|
||||
private external fun setOutputSurface(surface: Any)
|
||||
private external fun setOutputSurfaceSize(width: Int, height: Int)
|
||||
private external fun destroyOutputSurface()
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
package com.mrousavy.camera.utils
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import android.media.Image
|
||||
import android.media.ImageReader
|
||||
import android.media.ImageWriter
|
||||
import java.io.Closeable
|
||||
|
||||
class ImageCreator(private val width: Int,
|
||||
private val height: Int,
|
||||
private val format: Int = ImageFormat.PRIVATE,
|
||||
private val maxImages: Int = 3): Closeable {
|
||||
private var imageReader: ImageReader? = null
|
||||
private var imageWriter: ImageWriter? = null
|
||||
|
||||
override fun close() {
|
||||
imageWriter?.close()
|
||||
imageReader?.close()
|
||||
}
|
||||
|
||||
fun createImage(): Image {
|
||||
if (imageReader == null || imageWriter == null) {
|
||||
imageWriter?.close()
|
||||
imageReader?.close()
|
||||
|
||||
imageReader = ImageReader.newInstance(width, height, format, maxImages)
|
||||
imageWriter = ImageWriter.newInstance(imageReader!!.surface, maxImages)
|
||||
}
|
||||
|
||||
return imageWriter!!.dequeueInputImage()
|
||||
}
|
||||
}
|
@ -41,7 +41,6 @@ class RecordingSession(context: Context,
|
||||
val surface: Surface = MediaCodec.createPersistentInputSurface()
|
||||
|
||||
init {
|
||||
|
||||
outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir)
|
||||
|
||||
Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}")
|
||||
@ -54,7 +53,7 @@ class RecordingSession(context: Context,
|
||||
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
|
||||
recorder.setOutputFile(outputFile.absolutePath)
|
||||
recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE)
|
||||
recorder.setVideoSize(size.height, size.width)
|
||||
recorder.setVideoSize(size.width, size.height)
|
||||
if (fps != null) recorder.setVideoFrameRate(fps)
|
||||
|
||||
Log.i(TAG, "Using $codec Video Codec..")
|
||||
@ -67,7 +66,7 @@ class RecordingSession(context: Context,
|
||||
recorder.setAudioChannels(AUDIO_CHANNELS)
|
||||
}
|
||||
recorder.setInputSurface(surface)
|
||||
//recorder.setOrientationHint(orientation.toDegrees())
|
||||
recorder.setOrientationHint(orientation.toDegrees())
|
||||
|
||||
recorder.setOnErrorListener { _, what, extra ->
|
||||
Log.e(TAG, "MediaRecorder Error: $what ($extra)")
|
||||
|
@ -2,15 +2,12 @@ package com.mrousavy.camera.utils
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import android.graphics.SurfaceTexture
|
||||
import android.media.ImageReader
|
||||
import android.media.ImageWriter
|
||||
import android.media.MediaRecorder
|
||||
import android.util.Log
|
||||
import android.view.Surface
|
||||
import com.facebook.jni.HybridData
|
||||
import com.mrousavy.camera.frameprocessor.Frame
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import java.io.Closeable
|
||||
|
||||
/**
|
||||
@ -26,23 +23,24 @@ class VideoPipeline(val width: Int,
|
||||
val height: Int,
|
||||
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
|
||||
companion object {
|
||||
private const val MAX_IMAGES = 5
|
||||
private const val MAX_IMAGES = 3
|
||||
private const val TAG = "VideoPipeline"
|
||||
}
|
||||
|
||||
private val mHybridData: HybridData
|
||||
private var openGLTextureId: Int? = null
|
||||
private var transformMatrix = FloatArray(16)
|
||||
private var isActive = true
|
||||
|
||||
// Output 1
|
||||
private var frameProcessor: FrameProcessor? = null
|
||||
private var imageReader: ImageReader? = null
|
||||
// Input Texture
|
||||
private var openGLTextureId: Int? = null
|
||||
private var transformMatrix = FloatArray(16)
|
||||
|
||||
// Output 2
|
||||
// Processing input texture
|
||||
private var frameProcessor: FrameProcessor? = null
|
||||
|
||||
// Output 1
|
||||
private var recordingSession: RecordingSession? = null
|
||||
|
||||
// Output 3
|
||||
// Output 2
|
||||
private var previewSurface: Surface? = null
|
||||
|
||||
// Input
|
||||
@ -60,8 +58,6 @@ class VideoPipeline(val width: Int,
|
||||
override fun close() {
|
||||
synchronized(this) {
|
||||
isActive = false
|
||||
imageReader?.close()
|
||||
imageReader = null
|
||||
frameProcessor = null
|
||||
recordingSession = null
|
||||
surfaceTexture.release()
|
||||
@ -94,21 +90,6 @@ class VideoPipeline(val width: Int,
|
||||
}
|
||||
}
|
||||
|
||||
private fun getImageReader(): ImageReader {
|
||||
val imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
|
||||
imageReader.setOnImageAvailableListener({ reader ->
|
||||
Log.i("VideoPipeline", "ImageReader::onImageAvailable!")
|
||||
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
|
||||
|
||||
// TODO: Get correct orientation and isMirrored
|
||||
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
|
||||
frame.incrementRefCount()
|
||||
frameProcessor?.call(frame)
|
||||
frame.decrementRefCount()
|
||||
}, null)
|
||||
return imageReader
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the Pipeline to also call the given [FrameProcessor].
|
||||
* * If the [frameProcessor] is `null`, this output channel will be removed.
|
||||
@ -121,20 +102,11 @@ class VideoPipeline(val width: Int,
|
||||
this.frameProcessor = frameProcessor
|
||||
|
||||
if (frameProcessor != null) {
|
||||
if (this.imageReader == null) {
|
||||
// 1. Create new ImageReader that just calls the Frame Processor
|
||||
this.imageReader = getImageReader()
|
||||
}
|
||||
|
||||
// 2. Configure OpenGL pipeline to stream Frames into the ImageReader's surface
|
||||
setFrameProcessorOutputSurface(imageReader!!.surface)
|
||||
// Configure OpenGL pipeline to stream Frames into the Frame Processor (CPU pixel access)
|
||||
setFrameProcessor(frameProcessor)
|
||||
} else {
|
||||
// 1. Configure OpenGL pipeline to stop streaming Frames into the ImageReader's surface
|
||||
removeFrameProcessorOutputSurface()
|
||||
|
||||
// 2. Close the ImageReader
|
||||
this.imageReader?.close()
|
||||
this.imageReader = null
|
||||
// Configure OpenGL pipeline to stop streaming Frames into a Frame Processor
|
||||
removeFrameProcessor()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -175,8 +147,8 @@ class VideoPipeline(val width: Int,
|
||||
private external fun getInputTextureId(): Int
|
||||
private external fun onBeforeFrame()
|
||||
private external fun onFrame(transformMatrix: FloatArray)
|
||||
private external fun setFrameProcessorOutputSurface(surface: Any)
|
||||
private external fun removeFrameProcessorOutputSurface()
|
||||
private external fun setFrameProcessor(frameProcessor: FrameProcessor)
|
||||
private external fun removeFrameProcessor()
|
||||
private external fun setRecordingSessionOutputSurface(surface: Any)
|
||||
private external fun removeRecordingSessionOutputSurface()
|
||||
private external fun setPreviewOutputSurface(surface: Any)
|
||||
|
@ -17,9 +17,8 @@ public class ExampleFrameProcessorPlugin extends FrameProcessorPlugin {
|
||||
@Override
|
||||
public Object callback(@NotNull Frame frame, @Nullable Map<String, Object> params) {
|
||||
if (params == null) return null;
|
||||
Image image = frame.getImage();
|
||||
|
||||
Log.d("ExamplePlugin", image.getWidth() + " x " + image.getHeight() + " Image with format #" + image.getFormat() + ". Logging " + params.size() + " parameters:");
|
||||
Log.d("ExamplePlugin", frame.getWidth() + " x " + frame.getHeight() + " Image with format #" + frame.getPixelFormat() + ". Logging " + params.size() + " parameters:");
|
||||
|
||||
for (String key : params.keySet()) {
|
||||
Object value = params.get(key);
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
sortFormats,
|
||||
useCameraDevices,
|
||||
useFrameProcessor,
|
||||
useSkiaFrameProcessor,
|
||||
VideoFile,
|
||||
} from 'react-native-vision-camera';
|
||||
import { Camera } from 'react-native-vision-camera';
|
||||
@ -218,11 +219,15 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
|
||||
const paint = Skia.Paint();
|
||||
paint.setImageFilter(imageFilter);
|
||||
|
||||
const frameProcessor = useFrameProcessor((frame) => {
|
||||
const frameProcessor = useSkiaFrameProcessor((frame) => {
|
||||
'worklet';
|
||||
|
||||
const rect = Skia.XYWHRect(150, 150, 300, 300);
|
||||
const paint = Skia.Paint();
|
||||
paint.setColor(Skia.Color('red'));
|
||||
frame.drawRect(rect, paint);
|
||||
|
||||
console.log(frame.timestamp, frame.toString(), frame.pixelFormat);
|
||||
examplePlugin(frame);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
|
@ -18,7 +18,6 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
|
||||
@ -176,11 +175,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
||||
return jsi::Value((double) bytesPerRow);
|
||||
}
|
||||
if (name == "planesCount") {
|
||||
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
|
||||
return jsi::Value((double) planesCount);
|
||||
}
|
||||
|
||||
// fallback to base implementation
|
||||
return HostObject::get(runtime, propName);
|
||||
|
@ -39,15 +39,15 @@ using namespace facebook;
|
||||
}
|
||||
|
||||
- (void)call:(Frame*)frame {
|
||||
[_skiaRenderer renderCameraFrameToOffscreenCanvas:frame.buffer
|
||||
[_skiaRenderer renderCameraFrameToOffscreenSurface:frame.buffer
|
||||
withDrawCallback:^(SkiaCanvas _Nonnull canvas) {
|
||||
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
|
||||
self->_skiaCanvas->setCanvas(static_cast<SkCanvas*>(canvas));
|
||||
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, self->_skiaCanvas);
|
||||
|
||||
|
||||
// Call JS Frame Processor
|
||||
[self callWithFrameHostObject:frameHostObject];
|
||||
|
||||
|
||||
// Remove Skia Canvas from Host Object because it is no longer valid
|
||||
frameHostObject->invalidateCanvas();
|
||||
}];
|
||||
|
@ -30,7 +30,7 @@ typedef void(^draw_callback_t)(SkiaCanvas _Nonnull);
|
||||
The given callback will be executed with a reference to the Skia Canvas
|
||||
for the user to perform draw operations on (in this case, through a JS Frame Processor)
|
||||
*/
|
||||
- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
|
||||
- (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
|
||||
/**
|
||||
Renders the latest Frame to the onscreen Layer.
|
||||
This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms.
|
||||
|
@ -35,7 +35,7 @@
|
||||
std::unique_ptr<RenderContext> _layerContext;
|
||||
// The texture holding the drawn-to Frame
|
||||
id<MTLTexture> _texture;
|
||||
|
||||
|
||||
// For synchronization between the two Threads/Contexts
|
||||
std::mutex _textureMutex;
|
||||
std::atomic<bool> _hasNewFrame;
|
||||
@ -70,7 +70,7 @@
|
||||
return _texture;
|
||||
}
|
||||
|
||||
- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
|
||||
- (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
|
||||
// Wrap in auto release pool since we want the system to clean up after rendering
|
||||
@autoreleasepool {
|
||||
// Get the Frame's PixelBuffer
|
||||
@ -87,7 +87,7 @@
|
||||
height:CVPixelBufferGetHeight(pixelBuffer)];
|
||||
|
||||
// Get & Lock the writeable Texture from the Metal Drawable
|
||||
|
||||
|
||||
GrMtlTextureInfo textureInfo;
|
||||
textureInfo.fTexture.retain((__bridge void*)texture);
|
||||
GrBackendRenderTarget backendRenderTarget((int)texture.width,
|
||||
@ -122,7 +122,7 @@
|
||||
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
|
||||
// but that'll just overwrite the existing one - no need to worry.
|
||||
canvas->drawImage(image, 0, 0);
|
||||
|
||||
|
||||
// Call the draw callback - probably a JS Frame Processor.
|
||||
callback(static_cast<void*>(canvas));
|
||||
|
||||
@ -145,7 +145,7 @@
|
||||
|
||||
@autoreleasepool {
|
||||
auto context = _layerContext->skiaContext.get();
|
||||
|
||||
|
||||
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
|
||||
GrMTLHandle drawableHandle;
|
||||
auto surface = SkSurfaces::WrapCAMetalLayer(context,
|
||||
@ -161,14 +161,14 @@
|
||||
}
|
||||
|
||||
auto canvas = surface->getCanvas();
|
||||
|
||||
|
||||
// Lock the Mutex so we can operate on the Texture atomically without
|
||||
// renderFrameToCanvas() overwriting in between from a different thread
|
||||
std::unique_lock lock(_textureMutex);
|
||||
|
||||
auto texture = _texture;
|
||||
if (texture == nil) return;
|
||||
|
||||
|
||||
// Calculate Center Crop (aspectRatio: cover) transform
|
||||
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
|
||||
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
|
||||
@ -202,7 +202,7 @@
|
||||
id<MTLCommandBuffer> commandBuffer([_layerContext->commandQueue commandBuffer]);
|
||||
[commandBuffer presentDrawable:drawable];
|
||||
[commandBuffer commit];
|
||||
|
||||
|
||||
// Set flag back to false
|
||||
_hasNewFrame = false;
|
||||
lock.unlock();
|
||||
|
@ -22,10 +22,6 @@ export interface Frame {
|
||||
* Returns the amount of bytes per row.
|
||||
*/
|
||||
bytesPerRow: number;
|
||||
/**
|
||||
* Returns the number of planes this frame contains.
|
||||
*/
|
||||
planesCount: number;
|
||||
/**
|
||||
* Returns whether the Frame is mirrored (selfie camera) or not.
|
||||
*/
|
||||
|
Loading…
Reference in New Issue
Block a user