chore: Remove Skia ❌🎨 (#1740)
* Revert "feat: Skia for Android (#1731)"
This reverts commit a7c137da07
.
* Remove some skia
* Remove all the Skia stuff.
* Update useFrameProcessor.ts
* Update lockfiles
* fix: Use native Preview again
* Use `OpenGLTexture&` again
* Remove `PreviewOutput` (we use `SurfaceView` in parallel)
* fix: Log photo widths
* fix: Fix cpplint
This commit is contained in:
@@ -12,7 +12,7 @@ find_package(ReactAndroid REQUIRED CONFIG)
|
||||
find_package(fbjni REQUIRED CONFIG)
|
||||
find_library(LOG_LIB log)
|
||||
|
||||
add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS} -DVISION_CAMERA_ENABLE_SKIA=${ENABLE_SKIA} -DEGL_EGLEXT_PROTOTYPES=1)
|
||||
add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS})
|
||||
|
||||
|
||||
# Add react-native-vision-camera sources
|
||||
@@ -35,11 +35,6 @@ add_library(
|
||||
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
|
||||
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
|
||||
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
|
||||
# Skia Frame Processor
|
||||
src/main/cpp/skia/SkiaRenderer.cpp
|
||||
src/main/cpp/skia/JSkiaFrameProcessor.cpp
|
||||
src/main/cpp/skia/DrawableFrameHostObject.cpp
|
||||
src/main/cpp/skia/VisionCameraSkiaContext.cpp
|
||||
)
|
||||
|
||||
# Header Search Paths (includes)
|
||||
@@ -50,8 +45,6 @@ target_include_directories(
|
||||
"src/main/cpp"
|
||||
"src/main/cpp/frameprocessor"
|
||||
"src/main/cpp/frameprocessor/java-bindings"
|
||||
"src/main/cpp/skia"
|
||||
"src/main/cpp/skia/java-bindings"
|
||||
"${NODE_MODULES_DIR}/react-native/ReactCommon"
|
||||
"${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker"
|
||||
"${NODE_MODULES_DIR}/react-native/ReactAndroid/src/main/jni/react/turbomodule" # <-- CallInvokerHolder JNI wrapper
|
||||
@@ -65,6 +58,8 @@ target_link_libraries(
|
||||
ReactAndroid::jsi # <-- RN: JSI
|
||||
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings
|
||||
fbjni::fbjni # <-- fbjni
|
||||
GLESv2 # <-- OpenGL (for VideoPipeline)
|
||||
EGL # <-- OpenGL (EGL) (for VideoPipeline)
|
||||
)
|
||||
|
||||
# Optionally also add Frame Processors here
|
||||
@@ -75,57 +70,4 @@ if(ENABLE_FRAME_PROCESSORS)
|
||||
react-native-worklets-core::rnworklets
|
||||
)
|
||||
message("VisionCamera: Frame Processors enabled!")
|
||||
|
||||
# Optionally also add Skia Integration here
|
||||
if(ENABLE_SKIA)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSK_GL -DSK_GANESH -DSK_BUILD_FOR_ANDROID")
|
||||
|
||||
find_package(shopify_react-native-skia REQUIRED CONFIG)
|
||||
|
||||
set(SKIA_PACKAGE shopify_react-native-skia::rnskia)
|
||||
set(RNSKIA_PATH ${NODE_MODULES_DIR}/@shopify/react-native-skia)
|
||||
set (SKIA_LIBS_PATH "${RNSKIA_PATH}/libs/android/${ANDROID_ABI}")
|
||||
add_library(skia STATIC IMPORTED)
|
||||
set_property(TARGET skia PROPERTY IMPORTED_LOCATION "${SKIA_LIBS_PATH}/libskia.a")
|
||||
add_library(svg STATIC IMPORTED)
|
||||
set_property(TARGET svg PROPERTY IMPORTED_LOCATION "${SKIA_LIBS_PATH}/libsvg.a")
|
||||
add_library(skshaper STATIC IMPORTED)
|
||||
set_property(TARGET skshaper PROPERTY IMPORTED_LOCATION "${SKIA_LIBS_PATH}/libskshaper.a")
|
||||
|
||||
# We need to include the headers from skia
|
||||
# (Note: rnskia includes all their files without any relative path
|
||||
# so for example "include/core/SkImage.h" becomes #include "SkImage.h".
|
||||
# That's why for the prefab of rnskia, we flatten all cpp files into
|
||||
# just one directory. HOWEVER, skia itself uses relative paths in
|
||||
# their include statements, and so we have to include the path to skia)
|
||||
target_include_directories(
|
||||
${PACKAGE_NAME}
|
||||
PRIVATE
|
||||
"${RNSKIA_PATH}/cpp/api/"
|
||||
"${RNSKIA_PATH}/cpp/jsi/"
|
||||
"${RNSKIA_PATH}/cpp/rnskia/"
|
||||
"${RNSKIA_PATH}/cpp/skia"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/config/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/core/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/effects/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/utils/"
|
||||
"${RNSKIA_PATH}/cpp/skia/include/pathops/"
|
||||
"${RNSKIA_PATH}/cpp/skia/modules/"
|
||||
"${RNSKIA_PATH}/cpp/utils/"
|
||||
)
|
||||
|
||||
target_link_libraries(
|
||||
${PACKAGE_NAME}
|
||||
GLESv2 # <-- Optional: OpenGL (for Skia)
|
||||
EGL # <-- Optional: OpenGL (EGL) (for Skia)
|
||||
${SKIA_PACKAGE} # <-- Optional: RN Skia
|
||||
jnigraphics
|
||||
skia
|
||||
svg
|
||||
skshaper
|
||||
)
|
||||
|
||||
message("VisionCamera: Skia enabled!")
|
||||
endif()
|
||||
endif()
|
||||
|
@@ -66,10 +66,7 @@ static def findNodeModules(baseDir) {
|
||||
def nodeModules = findNodeModules(projectDir)
|
||||
|
||||
def hasWorklets = !safeExtGet("VisionCamera_disableFrameProcessors", false) && findProject(":react-native-worklets-core") != null
|
||||
def hasSkia = !safeExtGet("VisionCamera_disableSkia", false) && findProject(":shopify_react-native-skia") != null
|
||||
|
||||
logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!")
|
||||
logger.warn("[VisionCamera] react-native-skia ${hasSkia ? "found" : "not found"}, Skia Frame Processors ${hasSkia ? "enabled" : "disabled"}!")
|
||||
|
||||
repositories {
|
||||
google()
|
||||
@@ -105,8 +102,7 @@ android {
|
||||
cppFlags "-O2 -frtti -fexceptions -Wall -Wno-unused-variable -fstack-protector-all"
|
||||
arguments "-DANDROID_STL=c++_shared",
|
||||
"-DNODE_MODULES_DIR=${nodeModules}",
|
||||
"-DENABLE_FRAME_PROCESSORS=${hasWorklets}",
|
||||
"-DENABLE_SKIA=${hasWorklets && hasSkia}"
|
||||
"-DENABLE_FRAME_PROCESSORS=${hasWorklets}"
|
||||
abiFilters (*reactNativeArchitectures())
|
||||
}
|
||||
}
|
||||
@@ -152,11 +148,6 @@ dependencies {
|
||||
if (hasWorklets) {
|
||||
// Frame Processor integration (optional)
|
||||
implementation project(":react-native-worklets-core")
|
||||
|
||||
if (hasSkia) {
|
||||
// Skia Frame Processor integration (optional)
|
||||
implementation project(":shopify_react-native-skia")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -10,7 +10,6 @@
|
||||
|
||||
#include <android/native_window.h>
|
||||
#include <android/log.h>
|
||||
#include <chrono>
|
||||
|
||||
#include "OpenGLError.h"
|
||||
|
||||
@@ -151,19 +150,4 @@ OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width,
|
||||
};
|
||||
}
|
||||
|
||||
void OpenGLContext::getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels) {
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(texture.target, texture.id);
|
||||
glReadPixels(0, 0, texture.width, texture.height, GL_RGBA, GL_UNSIGNED_BYTE, *outPixels);
|
||||
// height * width * components per pixel (4 for RGBA) * size of one number (byte)
|
||||
*outSize = texture.height * texture.width * 4 * sizeof(uint8_t);
|
||||
}
|
||||
|
||||
long OpenGLContext::getCurrentPresentationTime() {
|
||||
auto now = std::chrono::steady_clock::now();
|
||||
auto duration = now.time_since_epoch();
|
||||
long long milliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
|
||||
return static_cast<long>(milliseconds);
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@@ -6,13 +6,12 @@
|
||||
|
||||
#include <EGL/egl.h>
|
||||
#include <GLES2/gl2.h>
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
#include <memory>
|
||||
#include <functional>
|
||||
#include <chrono>
|
||||
|
||||
#include "PassThroughShader.h"
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
@@ -53,16 +52,6 @@ class OpenGLContext {
|
||||
*/
|
||||
OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
|
||||
|
||||
/**
|
||||
* Gets the pixels as CPU accessible memory of the given input texture
|
||||
*/
|
||||
void getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels);
|
||||
|
||||
/**
|
||||
* Gets the current presentation time for this OpenGL surface.
|
||||
*/
|
||||
long getCurrentPresentationTime();
|
||||
|
||||
public:
|
||||
EGLDisplay display = EGL_NO_DISPLAY;
|
||||
EGLContext context = EGL_NO_CONTEXT;
|
||||
@@ -70,13 +59,12 @@ class OpenGLContext {
|
||||
EGLConfig config = nullptr;
|
||||
|
||||
private:
|
||||
explicit OpenGLContext() = default;
|
||||
OpenGLContext() = default;
|
||||
void destroy();
|
||||
void ensureOpenGL();
|
||||
|
||||
private:
|
||||
PassThroughShader _passThroughShader;
|
||||
std::chrono::time_point<std::chrono::system_clock> _startTime;
|
||||
|
||||
private:
|
||||
static constexpr auto TAG = "OpenGLContext";
|
||||
|
@@ -43,35 +43,32 @@ void OpenGLRenderer::destroy() {
|
||||
}
|
||||
}
|
||||
|
||||
EGLSurface OpenGLRenderer::getEGLSurface() {
|
||||
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
|
||||
if (_surface == EGL_NO_SURFACE) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
|
||||
_context->use();
|
||||
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
|
||||
}
|
||||
return _surface;
|
||||
}
|
||||
|
||||
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
|
||||
// 1. Get (or create) the OpenGL EGLSurface which is the window render target (Android Surface)
|
||||
EGLSurface surface = getEGLSurface();
|
||||
// 1. Activate the OpenGL context for this surface
|
||||
_context->use(_surface);
|
||||
|
||||
// 2. Activate the OpenGL context for this surface
|
||||
_context->use(surface);
|
||||
OpenGLError::checkIfError("Failed to use context!");
|
||||
|
||||
// 3. Set the viewport for rendering
|
||||
// 2. Set the viewport for rendering
|
||||
glViewport(0, 0, _width, _height);
|
||||
glDisable(GL_BLEND);
|
||||
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
// 4. Draw it using the pass-through shader which binds the texture and applies transforms
|
||||
// 3. Bind the input texture
|
||||
glBindTexture(texture.target, texture.id);
|
||||
glTexParameteri(texture.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(texture.target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(texture.target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
|
||||
// 4. Draw it using the pass-through shader which also applies transforms
|
||||
_passThroughShader.draw(texture, transformMatrix);
|
||||
|
||||
// 5 Swap buffers to pass it to the window surface
|
||||
_context->flush();
|
||||
OpenGLError::checkIfError("Failed to render Frame to Surface!");
|
||||
// 5. Swap buffers to pass it to the window surface
|
||||
eglSwapBuffers(_context->display, _surface);
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@@ -11,6 +11,7 @@
|
||||
#include "PassThroughShader.h"
|
||||
|
||||
#include "OpenGLContext.h"
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
@@ -39,11 +40,6 @@ class OpenGLRenderer {
|
||||
*/
|
||||
void destroy();
|
||||
|
||||
/**
|
||||
* Gets the EGLSurface (window surface) that this OpenGL renderer is configured to render to.
|
||||
*/
|
||||
EGLSurface getEGLSurface();
|
||||
|
||||
private:
|
||||
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);
|
||||
|
||||
|
@@ -10,38 +10,33 @@
|
||||
#include "OpenGLError.h"
|
||||
#include <string>
|
||||
|
||||
#include <android/log.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
PassThroughShader::~PassThroughShader() {
|
||||
if (_vertexBuffer != NO_BUFFER) {
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
_vertexBuffer = NO_BUFFER;
|
||||
}
|
||||
if (_programId != NO_SHADER) {
|
||||
glDeleteProgram(_programId);
|
||||
_programId = NO_SHADER;
|
||||
}
|
||||
|
||||
if (_vertexBuffer != NO_BUFFER) {
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
_vertexBuffer = NO_BUFFER;
|
||||
}
|
||||
}
|
||||
|
||||
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
|
||||
// 1. Set up Shader Program
|
||||
if (_programId == NO_SHADER || _shaderTarget != texture.target) {
|
||||
if (_programId != NO_SHADER) {
|
||||
glDeleteProgram(_programId);
|
||||
}
|
||||
_programId = createProgram(texture.target);
|
||||
if (_programId == NO_SHADER) {
|
||||
_programId = createProgram();
|
||||
glUseProgram(_programId);
|
||||
_vertexParameters = {
|
||||
.aPosition = glGetAttribLocation(_programId, "aPosition"),
|
||||
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
|
||||
.uTransformMatrix = glGetUniformLocation(_programId, "uTransformMatrix"),
|
||||
.aPosition = glGetAttribLocation(_programId, "aPosition"),
|
||||
.aTexCoord = glGetAttribLocation(_programId, "aTexCoord"),
|
||||
.uTransformMatrix = glGetUniformLocation(_programId, "uTransformMatrix"),
|
||||
};
|
||||
_fragmentParameters = {
|
||||
.uTexture = glGetUniformLocation(_programId, "uTexture"),
|
||||
.uTexture = glGetUniformLocation(_programId, "uTexture"),
|
||||
};
|
||||
_shaderTarget = texture.target;
|
||||
}
|
||||
|
||||
glUseProgram(_programId);
|
||||
@@ -49,11 +44,10 @@ void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatri
|
||||
// 2. Set up Vertices Buffer
|
||||
if (_vertexBuffer == NO_BUFFER) {
|
||||
glGenBuffers(1, &_vertexBuffer);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
|
||||
}
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
|
||||
|
||||
// 3. Pass all uniforms/attributes for vertex shader
|
||||
glEnableVertexAttribArray(_vertexParameters.aPosition);
|
||||
glVertexAttribPointer(_vertexParameters.aPosition,
|
||||
@@ -97,10 +91,9 @@ GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode)
|
||||
return shader;
|
||||
}
|
||||
|
||||
GLuint PassThroughShader::createProgram(GLenum textureTarget) {
|
||||
GLuint PassThroughShader::createProgram() {
|
||||
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
|
||||
auto fragmentShaderCode = textureTarget == GL_TEXTURE_EXTERNAL_OES ? FRAGMENT_SHADER_EXTERNAL_TEXTURE : FRAGMENT_SHADER;
|
||||
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
|
||||
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
|
||||
|
||||
GLuint program = glCreateProgram();
|
||||
if (program == 0) throw OpenGLError("Failed to create pass-through program!");
|
||||
|
@@ -14,7 +14,6 @@ namespace vision {
|
||||
#define NO_SHADER 0
|
||||
#define NO_POSITION 0
|
||||
#define NO_BUFFER 0
|
||||
#define NO_SHADER_TARGET 0
|
||||
|
||||
struct Vertex {
|
||||
GLfloat position[2];
|
||||
@@ -28,17 +27,17 @@ class PassThroughShader {
|
||||
|
||||
/**
|
||||
* Draw the texture using this shader.
|
||||
* Note: At the moment, only EXTERNAL textures are supported by the Shader.
|
||||
*/
|
||||
void draw(const OpenGLTexture& texture, float* transformMatrix);
|
||||
|
||||
private:
|
||||
// Loading
|
||||
static GLuint loadShader(GLenum shaderType, const char* shaderCode);
|
||||
static GLuint createProgram(GLenum textureTarget);
|
||||
static GLuint createProgram();
|
||||
|
||||
private:
|
||||
// Shader program in memory
|
||||
GLenum _shaderTarget = NO_SHADER_TARGET;
|
||||
// Parameters
|
||||
GLuint _programId = NO_SHADER;
|
||||
GLuint _vertexBuffer = NO_BUFFER;
|
||||
struct VertexParameters {
|
||||
@@ -71,17 +70,7 @@ class PassThroughShader {
|
||||
}
|
||||
)";
|
||||
static constexpr char FRAGMENT_SHADER[] = R"(
|
||||
precision mediump float;
|
||||
varying vec2 vTexCoord;
|
||||
uniform sampler2D uTexture;
|
||||
|
||||
void main() {
|
||||
gl_FragColor = texture2D(uTexture, vTexCoord);
|
||||
}
|
||||
)";
|
||||
static constexpr char FRAGMENT_SHADER_EXTERNAL_TEXTURE[] = R"(
|
||||
#extension GL_OES_EGL_image_external : require
|
||||
|
||||
precision mediump float;
|
||||
varying vec2 vTexCoord;
|
||||
uniform samplerExternalOES uTexture;
|
||||
|
@@ -16,7 +16,6 @@
|
||||
|
||||
#include "OpenGLTexture.h"
|
||||
#include "JFrameProcessor.h"
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
@@ -32,23 +31,29 @@ VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int
|
||||
|
||||
VideoPipeline::~VideoPipeline() {
|
||||
// 1. Remove output surfaces
|
||||
removeFrameProcessor();
|
||||
removeFrameProcessorOutputSurface();
|
||||
removeRecordingSessionOutputSurface();
|
||||
removePreviewOutputSurface();
|
||||
// 2. Delete the input textures
|
||||
if (_inputTexture != std::nullopt) {
|
||||
glDeleteTextures(1, &_inputTexture->id);
|
||||
_inputTexture = std::nullopt;
|
||||
}
|
||||
// 3. Destroy the OpenGL context
|
||||
_context = nullptr;
|
||||
}
|
||||
|
||||
void VideoPipeline::removeFrameProcessor() {
|
||||
_frameProcessor = nullptr;
|
||||
void VideoPipeline::removeFrameProcessorOutputSurface() {
|
||||
if (_frameProcessorOutput) _frameProcessorOutput->destroy();
|
||||
_frameProcessorOutput = nullptr;
|
||||
}
|
||||
|
||||
void VideoPipeline::setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor) {
|
||||
_frameProcessor = jni::make_global(frameProcessor);
|
||||
void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
|
||||
// 1. Delete existing output surface
|
||||
removeFrameProcessorOutputSurface();
|
||||
|
||||
// 2. Set new output surface if it is not null
|
||||
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
|
||||
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
|
||||
}
|
||||
|
||||
void VideoPipeline::removeRecordingSessionOutputSurface() {
|
||||
@@ -65,139 +70,45 @@ void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
|
||||
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
|
||||
}
|
||||
|
||||
void VideoPipeline::removePreviewOutputSurface() {
|
||||
if (_previewOutput) _previewOutput->destroy();
|
||||
_previewOutput = nullptr;
|
||||
}
|
||||
|
||||
jni::local_ref<JFrame> VideoPipeline::createFrame() {
|
||||
static const auto createFrameMethod = javaClassLocal()->getMethod<JFrame()>("createFrame");
|
||||
return createFrameMethod(_javaPart);
|
||||
}
|
||||
|
||||
void VideoPipeline::setPreviewOutputSurface(jobject surface) {
|
||||
// 1. Delete existing output surface
|
||||
removePreviewOutputSurface();
|
||||
|
||||
// 2. Set new output surface if it is not null
|
||||
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
|
||||
_previewOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
|
||||
}
|
||||
|
||||
int VideoPipeline::getInputTextureId() {
|
||||
if (_inputTexture == std::nullopt) {
|
||||
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
|
||||
}
|
||||
|
||||
return static_cast<int>(_inputTexture->id);
|
||||
}
|
||||
|
||||
void VideoPipeline::onBeforeFrame() {
|
||||
// 1. Activate the offscreen context
|
||||
_context->use();
|
||||
|
||||
// 2. Prepare the external texture so the Camera can render into it
|
||||
OpenGLTexture& texture = _inputTexture.value();
|
||||
glBindTexture(texture.target, texture.id);
|
||||
glBindTexture(_inputTexture->target, _inputTexture->id);
|
||||
}
|
||||
|
||||
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
|
||||
// 1. Activate the offscreen context
|
||||
_context->use();
|
||||
|
||||
// 2. Get the OpenGL transform Matrix (transforms, scales, rotations)
|
||||
// Get the OpenGL transform Matrix (transforms, scales, rotations)
|
||||
float transformMatrix[16];
|
||||
transformMatrixParam->getRegion(0, 16, transformMatrix);
|
||||
|
||||
// 3. Prepare the texture we are going to render
|
||||
OpenGLTexture& texture = _inputTexture.value();
|
||||
|
||||
// 4. Render to all outputs!
|
||||
auto isSkiaFrameProcessor = _frameProcessor != nullptr && _frameProcessor->isInstanceOf(JSkiaFrameProcessor::javaClassStatic());
|
||||
if (isSkiaFrameProcessor) {
|
||||
// 4.1. If we have a Skia Frame Processor, prepare to render to an offscreen surface using Skia
|
||||
jni::global_ref<JSkiaFrameProcessor::javaobject> skiaFrameProcessor = jni::static_ref_cast<JSkiaFrameProcessor::javaobject>(_frameProcessor);
|
||||
SkiaRenderer& skiaRenderer = skiaFrameProcessor->cthis()->getSkiaRenderer();
|
||||
auto drawCallback = [=](SkCanvas* canvas) {
|
||||
// Create a JFrame instance (this uses queues/recycling)
|
||||
auto frame = JFrame::create(texture.width,
|
||||
texture.height,
|
||||
texture.width * 4,
|
||||
_context->getCurrentPresentationTime(),
|
||||
"portrait",
|
||||
false);
|
||||
|
||||
// Fill the Frame with the contents of the GL surface
|
||||
_context->getPixelsOfTexture(texture,
|
||||
&frame->cthis()->pixelsSize,
|
||||
&frame->cthis()->pixels);
|
||||
|
||||
// Call the Frame processor with the Frame
|
||||
frame->cthis()->incrementRefCount();
|
||||
skiaFrameProcessor->cthis()->call(frame, canvas);
|
||||
frame->cthis()->decrementRefCount();
|
||||
};
|
||||
|
||||
// 4.2. Render to the offscreen surface using Skia
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using Skia..");
|
||||
OpenGLTexture offscreenTexture = skiaRenderer.renderTextureToOffscreenSurface(*_context,
|
||||
texture,
|
||||
transformMatrix,
|
||||
drawCallback);
|
||||
|
||||
// 4.3. Now render the result of the offscreen surface to all output surfaces!
|
||||
if (_previewOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
|
||||
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _previewOutput->getEGLSurface());
|
||||
}
|
||||
if (_recordingSessionOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _recordingSessionOutput->getEGLSurface());
|
||||
}
|
||||
} else {
|
||||
// 4.1. If we have a Frame Processor, call it
|
||||
if (_frameProcessor != nullptr) {
|
||||
// Create a JFrame instance (this uses queues/recycling)
|
||||
auto frame = JFrame::create(texture.width,
|
||||
texture.height,
|
||||
texture.width * 4,
|
||||
_context->getCurrentPresentationTime(),
|
||||
"portrait",
|
||||
false);
|
||||
|
||||
// Fill the Frame with the contents of the GL surface
|
||||
_context->getPixelsOfTexture(texture,
|
||||
&frame->cthis()->pixelsSize,
|
||||
&frame->cthis()->pixels);
|
||||
|
||||
// Call the Frame processor with the Frame
|
||||
frame->cthis()->incrementRefCount();
|
||||
_frameProcessor->cthis()->call(frame);
|
||||
frame->cthis()->decrementRefCount();
|
||||
}
|
||||
|
||||
// 4.2. Simply pass-through shader to render the texture to all output EGLSurfaces
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using pass-through OpenGL Shader..");
|
||||
if (_previewOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
|
||||
_previewOutput->renderTextureToSurface(texture, transformMatrix);
|
||||
}
|
||||
if (_recordingSessionOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
|
||||
}
|
||||
if (_frameProcessorOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
|
||||
_frameProcessorOutput->renderTextureToSurface(texture, transformMatrix);
|
||||
}
|
||||
if (_recordingSessionOutput) {
|
||||
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
|
||||
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
|
||||
}
|
||||
}
|
||||
|
||||
void VideoPipeline::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
|
||||
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
|
||||
makeNativeMethod("setFrameProcessor", VideoPipeline::setFrameProcessor),
|
||||
makeNativeMethod("removeFrameProcessor", VideoPipeline::removeFrameProcessor),
|
||||
makeNativeMethod("setPreviewOutputSurface", VideoPipeline::setPreviewOutputSurface),
|
||||
makeNativeMethod("removePreviewOutputSurface", VideoPipeline::removePreviewOutputSurface),
|
||||
makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface),
|
||||
makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface),
|
||||
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
|
||||
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
|
||||
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
|
||||
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
|
||||
makeNativeMethod("onFrame", VideoPipeline::onFrame),
|
||||
});
|
||||
|
@@ -8,13 +8,11 @@
|
||||
#include <fbjni/fbjni.h>
|
||||
#include <EGL/egl.h>
|
||||
#include <android/native_window.h>
|
||||
#include <memory>
|
||||
|
||||
#include "PassThroughShader.h"
|
||||
#include "OpenGLRenderer.h"
|
||||
#include "OpenGLContext.h"
|
||||
|
||||
#include "OpenGLTexture.h"
|
||||
#include "JFrameProcessor.h"
|
||||
#include <memory>
|
||||
#include <optional>
|
||||
|
||||
namespace vision {
|
||||
|
||||
@@ -33,17 +31,13 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
|
||||
int getInputTextureId();
|
||||
|
||||
// <- Frame Processor output
|
||||
void setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor);
|
||||
void removeFrameProcessor();
|
||||
void setFrameProcessorOutputSurface(jobject surface);
|
||||
void removeFrameProcessorOutputSurface();
|
||||
|
||||
// <- MediaRecorder output
|
||||
void setRecordingSessionOutputSurface(jobject surface);
|
||||
void removeRecordingSessionOutputSurface();
|
||||
|
||||
// <- Preview output
|
||||
void setPreviewOutputSurface(jobject surface);
|
||||
void removePreviewOutputSurface();
|
||||
|
||||
// Frame callbacks
|
||||
void onBeforeFrame();
|
||||
void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix);
|
||||
@@ -51,22 +45,17 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
|
||||
private:
|
||||
// Private constructor. Use `create(..)` to create new instances.
|
||||
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
|
||||
// Creates a new Frame instance which should be filled with data.
|
||||
jni::local_ref<JFrame> createFrame();
|
||||
|
||||
private:
|
||||
// Input Surface Texture
|
||||
std::optional<OpenGLTexture> _inputTexture;
|
||||
std::optional<OpenGLTexture> _inputTexture = std::nullopt;
|
||||
int _width = 0;
|
||||
int _height = 0;
|
||||
|
||||
// (Optional) Frame Processor that processes frames before they go into output
|
||||
jni::global_ref<JFrameProcessor::javaobject> _frameProcessor = nullptr;
|
||||
|
||||
// Output Contexts
|
||||
std::shared_ptr<OpenGLContext> _context = nullptr;
|
||||
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
|
||||
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
|
||||
std::unique_ptr<OpenGLRenderer> _previewOutput = nullptr;
|
||||
|
||||
private:
|
||||
friend HybridBase;
|
||||
|
@@ -4,7 +4,6 @@
|
||||
#include "JFrameProcessor.h"
|
||||
#include "JVisionCameraProxy.h"
|
||||
#include "VisionCameraProxy.h"
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
#include "VideoPipeline.h"
|
||||
|
||||
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
|
||||
@@ -15,9 +14,6 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
|
||||
vision::VideoPipeline::registerNatives();
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
vision::JFrameProcessor::registerNatives();
|
||||
#endif
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
vision::JSkiaFrameProcessor::registerNatives();
|
||||
#endif
|
||||
});
|
||||
}
|
||||
|
@@ -31,6 +31,7 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
|
||||
@@ -54,7 +55,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
const jsi::Value* args,
|
||||
size_t count) -> jsi::Value {
|
||||
// Increment retain count by one.
|
||||
this->frame->cthis()->incrementRefCount();
|
||||
this->frame->incrementRefCount();
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(runtime,
|
||||
@@ -68,7 +69,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
const jsi::Value* args,
|
||||
size_t count) -> jsi::Value {
|
||||
// Decrement retain count by one. If the retain count is zero, the Frame gets closed.
|
||||
this->frame->cthis()->decrementRefCount();
|
||||
this->frame->decrementRefCount();
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(runtime,
|
||||
@@ -84,8 +85,8 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
if (!this->frame) {
|
||||
return jsi::String::createFromUtf8(runtime, "[closed frame]");
|
||||
}
|
||||
auto width = this->frame->cthis()->getWidth();
|
||||
auto height = this->frame->cthis()->getHeight();
|
||||
auto width = this->frame->getWidth();
|
||||
auto height = this->frame->getHeight();
|
||||
auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame";
|
||||
return jsi::String::createFromUtf8(runtime, str);
|
||||
};
|
||||
@@ -96,8 +97,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
const jsi::Value& thisArg,
|
||||
const jsi::Value* args,
|
||||
size_t count) -> jsi::Value {
|
||||
size_t size = frame->cthis()->pixelsSize;
|
||||
uint8_t* pixels = frame->cthis()->pixels;
|
||||
auto buffer = this->frame->toByteBuffer();
|
||||
if (!buffer->isDirect()) {
|
||||
throw std::runtime_error("Failed to get byte content of Frame - array is not direct ByteBuffer!");
|
||||
}
|
||||
auto size = buffer->getDirectSize();
|
||||
|
||||
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
|
||||
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
|
||||
@@ -115,7 +119,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
|
||||
// directly write to C++ JSI ArrayBuffer
|
||||
auto destinationBuffer = arrayBuffer.data(runtime);
|
||||
memcpy(destinationBuffer, pixels, sizeof(uint8_t) * size);
|
||||
memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size);
|
||||
|
||||
return arrayBuffer;
|
||||
};
|
||||
@@ -123,30 +127,33 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
}
|
||||
|
||||
if (name == "isValid") {
|
||||
return jsi::Value(this->frame && this->frame->cthis()->getIsValid());
|
||||
return jsi::Value(this->frame && this->frame->getIsValid());
|
||||
}
|
||||
if (name == "width") {
|
||||
return jsi::Value(this->frame->cthis()->getWidth());
|
||||
return jsi::Value(this->frame->getWidth());
|
||||
}
|
||||
if (name == "height") {
|
||||
return jsi::Value(this->frame->cthis()->getHeight());
|
||||
return jsi::Value(this->frame->getHeight());
|
||||
}
|
||||
if (name == "isMirrored") {
|
||||
return jsi::Value(this->frame->cthis()->getIsMirrored());
|
||||
return jsi::Value(this->frame->getIsMirrored());
|
||||
}
|
||||
if (name == "orientation") {
|
||||
auto string = this->frame->cthis()->getOrientation();
|
||||
auto string = this->frame->getOrientation();
|
||||
return jsi::String::createFromUtf8(runtime, string->toStdString());
|
||||
}
|
||||
if (name == "pixelFormat") {
|
||||
auto string = this->frame->cthis()->getPixelFormat();
|
||||
auto string = this->frame->getPixelFormat();
|
||||
return jsi::String::createFromUtf8(runtime, string->toStdString());
|
||||
}
|
||||
if (name == "timestamp") {
|
||||
return jsi::Value(static_cast<double>(this->frame->cthis()->getTimestamp()));
|
||||
return jsi::Value(static_cast<double>(this->frame->getTimestamp()));
|
||||
}
|
||||
if (name == "bytesPerRow") {
|
||||
return jsi::Value(this->frame->cthis()->getBytesPerRow());
|
||||
return jsi::Value(this->frame->getBytesPerRow());
|
||||
}
|
||||
if (name == "planesCount") {
|
||||
return jsi::Value(this->frame->getPlanesCount());
|
||||
}
|
||||
|
||||
// fallback to base implementation
|
||||
|
@@ -26,7 +26,7 @@ class JSI_EXPORT FrameHostObject : public jsi::HostObject {
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override;
|
||||
|
||||
public:
|
||||
jni::global_ref<JFrame::javaobject> frame;
|
||||
jni::global_ref<JFrame> frame;
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
@@ -111,10 +111,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
|
||||
|
||||
return jsi::String::createFromUtf8(runtime, object->toString());
|
||||
|
||||
} else if (object->isInstanceOf(jni::JList<jobject>::javaClassStatic())) {
|
||||
} else if (object->isInstanceOf(JList<jobject>::javaClassStatic())) {
|
||||
// List<E>
|
||||
|
||||
auto arrayList = jni::static_ref_cast<jni::JList<jobject>>(object);
|
||||
auto arrayList = static_ref_cast<JList<jobject>>(object);
|
||||
auto size = arrayList->size();
|
||||
|
||||
auto result = jsi::Array(runtime, size);
|
||||
@@ -125,10 +125,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
|
||||
}
|
||||
return result;
|
||||
|
||||
} else if (object->isInstanceOf(jni::JMap<jstring, jobject>::javaClassStatic())) {
|
||||
} else if (object->isInstanceOf(JMap<jstring, jobject>::javaClassStatic())) {
|
||||
// Map<K, V>
|
||||
|
||||
auto map = jni::static_ref_cast<jni::JMap<jstring, jobject>>(object);
|
||||
auto map = static_ref_cast<JMap<jstring, jobject>>(object);
|
||||
|
||||
auto result = jsi::Object(runtime);
|
||||
for (const auto& entry : *map) {
|
||||
@@ -140,7 +140,7 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
|
||||
return result;
|
||||
} else if (object->isInstanceOf(JFrame::javaClassStatic())) {
|
||||
// Frame
|
||||
auto frame = jni::static_ref_cast<JFrame::javaobject>(object);
|
||||
auto frame = static_ref_cast<JFrame>(object);
|
||||
|
||||
// box into HostObject
|
||||
auto hostObject = std::make_shared<FrameHostObject>(frame);
|
||||
|
@@ -39,7 +39,6 @@ std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& r
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("isSkiaEnabled")));
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -65,13 +64,6 @@ jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime,
|
||||
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
||||
auto name = propName.utf8(runtime);
|
||||
|
||||
if (name == "isSkiaEnabled") {
|
||||
#ifdef VISION_CAMERA_ENABLE_SKIA
|
||||
return jsi::Value(true);
|
||||
#else
|
||||
return jsi::Value(false);
|
||||
#endif
|
||||
}
|
||||
if (name == "setFrameProcessor") {
|
||||
return jsi::Function::createFromHostFunction(runtime,
|
||||
jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"),
|
||||
|
@@ -11,85 +11,71 @@
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
void JFrame::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("getWidth", JFrame::getWidth),
|
||||
makeNativeMethod("getHeight", JFrame::getHeight),
|
||||
makeNativeMethod("getBytesPerRow", JFrame::getBytesPerRow),
|
||||
makeNativeMethod("getTimestamp", JFrame::getTimestamp),
|
||||
makeNativeMethod("getOrientation", JFrame::getOrientation),
|
||||
makeNativeMethod("getIsMirrored", JFrame::getIsMirrored),
|
||||
makeNativeMethod("getPixelFormat", JFrame::getPixelFormat),
|
||||
makeNativeMethod("getByteBuffer", JFrame::getByteBuffer),
|
||||
makeNativeMethod("getIsValid", JFrame::getIsValid),
|
||||
});
|
||||
int JFrame::getWidth() const {
|
||||
static const auto getWidthMethod = getClass()->getMethod<jint()>("getWidth");
|
||||
return getWidthMethod(self());
|
||||
}
|
||||
|
||||
jni::local_ref<JFrame::javaobject> JFrame::create(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored) {
|
||||
return newObjectCxxArgs(width,
|
||||
height,
|
||||
bytesPerRow,
|
||||
timestamp,
|
||||
orientation,
|
||||
isMirrored);
|
||||
int JFrame::getHeight() const {
|
||||
static const auto getWidthMethod = getClass()->getMethod<jint()>("getHeight");
|
||||
return getWidthMethod(self());
|
||||
}
|
||||
|
||||
JFrame::JFrame(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored) {
|
||||
_width = width;
|
||||
_height = height;
|
||||
_bytesPerRow = bytesPerRow;
|
||||
_timestamp = timestamp;
|
||||
_orientation = orientation;
|
||||
_isMirrored = isMirrored;
|
||||
_refCount = 0;
|
||||
pixelsSize = height * bytesPerRow;
|
||||
pixels = (uint8_t*) malloc(pixelsSize);
|
||||
bool JFrame::getIsValid() const {
|
||||
static const auto getIsValidMethod = getClass()->getMethod<jboolean()>("getIsValid");
|
||||
return getIsValidMethod(self());
|
||||
}
|
||||
|
||||
JFrame::~JFrame() noexcept {
|
||||
close();
|
||||
bool JFrame::getIsMirrored() const {
|
||||
static const auto getIsMirroredMethod = getClass()->getMethod<jboolean()>("getIsMirrored");
|
||||
return getIsMirroredMethod(self());
|
||||
}
|
||||
|
||||
bool JFrame::getIsValid() {
|
||||
return _refCount > 0 && !_isClosed;
|
||||
jlong JFrame::getTimestamp() const {
|
||||
static const auto getTimestampMethod = getClass()->getMethod<jlong()>("getTimestamp");
|
||||
return getTimestampMethod(self());
|
||||
}
|
||||
|
||||
jni::local_ref<jni::JByteBuffer> JFrame::getByteBuffer() {
|
||||
if (!getIsValid()) {
|
||||
[[unlikely]]
|
||||
throw std::runtime_error("Frame is no longer valid, cannot access getByteBuffer!");
|
||||
}
|
||||
return jni::JByteBuffer::wrapBytes(pixels, pixelsSize);
|
||||
local_ref<JString> JFrame::getOrientation() const {
|
||||
static const auto getOrientationMethod = getClass()->getMethod<JString()>("getOrientation");
|
||||
return getOrientationMethod(self());
|
||||
}
|
||||
|
||||
local_ref<JString> JFrame::getPixelFormat() const {
|
||||
static const auto getPixelFormatMethod = getClass()->getMethod<JString()>("getPixelFormat");
|
||||
return getPixelFormatMethod(self());
|
||||
}
|
||||
|
||||
int JFrame::getPlanesCount() const {
|
||||
static const auto getPlanesCountMethod = getClass()->getMethod<jint()>("getPlanesCount");
|
||||
return getPlanesCountMethod(self());
|
||||
}
|
||||
|
||||
int JFrame::getBytesPerRow() const {
|
||||
static const auto getBytesPerRowMethod = getClass()->getMethod<jint()>("getBytesPerRow");
|
||||
return getBytesPerRowMethod(self());
|
||||
}
|
||||
|
||||
local_ref<JByteBuffer> JFrame::toByteBuffer() const {
|
||||
static const auto toByteBufferMethod = getClass()->getMethod<JByteBuffer()>("toByteBuffer");
|
||||
return toByteBufferMethod(self());
|
||||
}
|
||||
|
||||
void JFrame::incrementRefCount() {
|
||||
std::unique_lock lock(_mutex);
|
||||
_refCount++;
|
||||
static const auto incrementRefCountMethod = getClass()->getMethod<void()>("incrementRefCount");
|
||||
incrementRefCountMethod(self());
|
||||
}
|
||||
|
||||
void JFrame::decrementRefCount() {
|
||||
std::unique_lock lock(_mutex);
|
||||
_refCount--;
|
||||
if (_refCount <= 0) {
|
||||
this->close();
|
||||
}
|
||||
static const auto decrementRefCountMethod = getClass()->getMethod<void()>("decrementRefCount");
|
||||
decrementRefCountMethod(self());
|
||||
}
|
||||
|
||||
void JFrame::close() {
|
||||
_isClosed = true;
|
||||
free(pixels);
|
||||
pixels = nullptr;
|
||||
static const auto closeMethod = getClass()->getMethod<void()>("close");
|
||||
closeMethod(self());
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
@@ -7,70 +7,29 @@
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
#include <fbjni/ByteBuffer.h>
|
||||
#include <android/hardware_buffer.h>
|
||||
#include <android/hardware_buffer_jni.h>
|
||||
#include <mutex>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
class JFrame : public jni::HybridClass<JFrame> {
|
||||
public:
|
||||
struct JFrame : public JavaClass<JFrame> {
|
||||
static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;";
|
||||
static void registerNatives();
|
||||
static jni::local_ref<JFrame::javaobject> create(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored);
|
||||
|
||||
~JFrame() noexcept;
|
||||
|
||||
protected:
|
||||
friend HybridBase;
|
||||
explicit JFrame(int width,
|
||||
int height,
|
||||
int bytesPerRow,
|
||||
long timestamp,
|
||||
const std::string& orientation,
|
||||
bool isMirrored);
|
||||
|
||||
public:
|
||||
int getWidth() { return _width; }
|
||||
int getHeight() { return _height; }
|
||||
int getBytesPerRow() { return _bytesPerRow; }
|
||||
jlong getTimestamp() { return _timestamp; }
|
||||
jni::local_ref<jni::JString> getOrientation() { return jni::make_jstring(_orientation); }
|
||||
bool getIsMirrored() { return _isMirrored; }
|
||||
|
||||
// TODO: Can this be something other than RGB?
|
||||
jni::local_ref<jni::JString> getPixelFormat() { return jni::make_jstring("rgb"); }
|
||||
|
||||
bool getIsValid();
|
||||
jni::local_ref<jni::JByteBuffer> getByteBuffer();
|
||||
int getWidth() const;
|
||||
int getHeight() const;
|
||||
bool getIsValid() const;
|
||||
bool getIsMirrored() const;
|
||||
int getPlanesCount() const;
|
||||
int getBytesPerRow() const;
|
||||
jlong getTimestamp() const;
|
||||
local_ref<JString> getOrientation() const;
|
||||
local_ref<JString> getPixelFormat() const;
|
||||
local_ref<JByteBuffer> toByteBuffer() const;
|
||||
void incrementRefCount();
|
||||
void decrementRefCount();
|
||||
void close();
|
||||
|
||||
// Backing byte data
|
||||
uint8_t* pixels = nullptr;
|
||||
size_t pixelsSize = 0;
|
||||
|
||||
private:
|
||||
// Frame info
|
||||
int _width = 0;
|
||||
int _height = 0;
|
||||
int _bytesPerRow = 0;
|
||||
long _timestamp = 0;
|
||||
std::string _orientation = {};
|
||||
bool _isMirrored = false;
|
||||
|
||||
// Ref-counting
|
||||
int _refCount = 0;
|
||||
bool _isClosed = false;
|
||||
std::mutex _mutex;
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
@@ -17,6 +17,9 @@ using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
void JFrameProcessor::registerNatives() {
|
||||
registerHybrid({
|
||||
makeNativeMethod("call", JFrameProcessor::call)
|
||||
});
|
||||
}
|
||||
|
||||
using TSelf = jni::local_ref<JFrameProcessor::javaobject>;
|
||||
|
@@ -21,7 +21,7 @@ namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
|
||||
struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
|
||||
public:
|
||||
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;";
|
||||
static void registerNatives();
|
||||
@@ -30,25 +30,20 @@ class JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
|
||||
|
||||
public:
|
||||
/**
|
||||
* Wrap the Frame in a HostObject and call the Frame Processor.
|
||||
* Call the JS Frame Processor.
|
||||
*/
|
||||
void call(jni::alias_ref<JFrame::javaobject> frame);
|
||||
void call(alias_ref<JFrame::javaobject> frame);
|
||||
|
||||
protected:
|
||||
friend HybridBase;
|
||||
// C++ only constructor. Use `create(..)` to create new instances.
|
||||
private:
|
||||
// Private constructor. Use `create(..)` to create new instances.
|
||||
explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet,
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> context);
|
||||
JFrameProcessor(const JFrameProcessor &) = delete;
|
||||
JFrameProcessor &operator=(const JFrameProcessor &) = delete;
|
||||
|
||||
protected:
|
||||
/**
|
||||
* Call the JS Frame Processor with the given Frame Host Object.
|
||||
*/
|
||||
private:
|
||||
void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const;
|
||||
|
||||
private:
|
||||
friend HybridBase;
|
||||
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
|
||||
};
|
||||
|
@@ -18,10 +18,6 @@
|
||||
#include <react-native-worklets-core/WKTJsiWorkletContext.h>
|
||||
#endif
|
||||
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
#endif
|
||||
|
||||
namespace vision {
|
||||
|
||||
using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>;
|
||||
@@ -35,7 +31,6 @@ JVisionCameraProxy::JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::
|
||||
const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) {
|
||||
_javaPart = make_global(javaThis);
|
||||
_runtime = runtime;
|
||||
_callInvoker = callInvoker;
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
__android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context...");
|
||||
@@ -58,12 +53,6 @@ JVisionCameraProxy::JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::
|
||||
#else
|
||||
__android_log_write(ANDROID_LOG_INFO, TAG, "Frame Processors are disabled!");
|
||||
#endif
|
||||
|
||||
#ifdef VISION_CAMERA_ENABLE_SKIA
|
||||
__android_log_write(ANDROID_LOG_INFO, TAG, "Skia is enabled!");
|
||||
#else
|
||||
__android_log_write(ANDROID_LOG_INFO, TAG, "Skia is disabled!");
|
||||
#endif
|
||||
}
|
||||
|
||||
JVisionCameraProxy::~JVisionCameraProxy() {
|
||||
@@ -87,12 +76,6 @@ void JVisionCameraProxy::setFrameProcessor(int viewTag,
|
||||
jni::local_ref<JFrameProcessor::javaobject> frameProcessor;
|
||||
if (frameProcessorType == "frame-processor") {
|
||||
frameProcessor = JFrameProcessor::create(worklet, _workletContext);
|
||||
} else if (frameProcessorType == "skia-frame-processor") {
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
frameProcessor = JSkiaFrameProcessor::create(worklet, _workletContext, _callInvoker);
|
||||
#else
|
||||
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
|
||||
#endif
|
||||
} else {
|
||||
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
|
||||
}
|
||||
|
@@ -36,13 +36,11 @@ class JVisionCameraProxy : public jni::HybridClass<JVisionCameraProxy> {
|
||||
jni::local_ref<JMap<jstring, jobject>> options);
|
||||
|
||||
jsi::Runtime* getJSRuntime() { return _runtime; }
|
||||
std::shared_ptr<react::CallInvoker> getCallInvoker() { return _callInvoker; }
|
||||
|
||||
private:
|
||||
friend HybridBase;
|
||||
jni::global_ref<JVisionCameraProxy::javaobject> _javaPart;
|
||||
jsi::Runtime* _runtime;
|
||||
std::shared_ptr<react::CallInvoker> _callInvoker;
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
|
||||
#endif
|
||||
|
@@ -1,72 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#include "DrawableFrameHostObject.h"
|
||||
#include <SkCanvas.h>
|
||||
#include "FrameHostObject.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
|
||||
auto result = FrameHostObject::getPropertyNames(rt);
|
||||
|
||||
// Skia - Render Frame
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
|
||||
|
||||
if (_canvas != nullptr) {
|
||||
auto canvasPropNames = _canvas->getPropertyNames(rt);
|
||||
for (auto& prop : canvasPropNames) {
|
||||
result.push_back(std::move(prop));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
SkRect inscribe(SkSize size, SkRect rect) {
|
||||
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
|
||||
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
|
||||
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
|
||||
rect.y() + halfHeightDelta, size.width(),
|
||||
size.height());
|
||||
}
|
||||
|
||||
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
||||
auto name = propName.utf8(runtime);
|
||||
|
||||
if (name == "render") {
|
||||
auto render = JSI_HOST_FUNCTION_LAMBDA {
|
||||
if (_canvas == nullptr) {
|
||||
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
|
||||
}
|
||||
|
||||
throw std::runtime_error("render() is not yet implemented!");
|
||||
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
|
||||
}
|
||||
if (name == "isDrawable") {
|
||||
return jsi::Value(_canvas != nullptr);
|
||||
}
|
||||
|
||||
if (_canvas != nullptr) {
|
||||
// If we have a Canvas, try to access the property on there.
|
||||
auto result = _canvas->get(runtime, propName);
|
||||
if (!result.isUndefined()) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to base implementation
|
||||
return FrameHostObject::get(runtime, propName);
|
||||
}
|
||||
|
||||
void DrawableFrameHostObject::invalidateCanvas() {
|
||||
_canvas = nullptr;
|
||||
}
|
||||
|
||||
|
||||
|
||||
} // namespace vision
|
@@ -1,33 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <jsi/jsi.h>
|
||||
#include "FrameHostObject.h"
|
||||
#include "JFrame.h"
|
||||
|
||||
#include <SkCanvas.h>
|
||||
#include <JsiSkCanvas.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
|
||||
public:
|
||||
explicit DrawableFrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame,
|
||||
std::shared_ptr<RNSkia::JsiSkCanvas> canvas): FrameHostObject(frame), _canvas(canvas) {}
|
||||
|
||||
public:
|
||||
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
|
||||
|
||||
void invalidateCanvas();
|
||||
|
||||
private:
|
||||
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
|
||||
};
|
||||
|
||||
} // namespace vision
|
@@ -1,61 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include "JSkiaFrameProcessor.h"
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
|
||||
#include <utility>
|
||||
#include "JFrame.h"
|
||||
#include "DrawableFrameHostObject.h"
|
||||
|
||||
#include <RNSkPlatformContext.h>
|
||||
#include "VisionCameraSkiaContext.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace jni;
|
||||
|
||||
void JSkiaFrameProcessor::registerNatives() {
|
||||
}
|
||||
|
||||
using TSelf = jni::local_ref<JSkiaFrameProcessor::javaobject>;
|
||||
|
||||
JSkiaFrameProcessor::JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker)
|
||||
: JSkiaFrameProcessor::HybridBase(worklet, context) {
|
||||
// TODO: Can I use the Android Platform Context from react-native-skia here?
|
||||
auto skiaPlatformContext = std::make_shared<VisionCameraSkiaContext>(context->getJsRuntime(),
|
||||
callInvoker,
|
||||
1.0f);
|
||||
_jsiCanvas = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
|
||||
_skiaRenderer = std::make_shared<SkiaRenderer>();
|
||||
}
|
||||
|
||||
TSelf JSkiaFrameProcessor::create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker) {
|
||||
return JSkiaFrameProcessor::newObjectCxxArgs(worklet, context, callInvoker);
|
||||
}
|
||||
|
||||
void JSkiaFrameProcessor::call(alias_ref<JFrame::javaobject> frame,
|
||||
SkCanvas* canvas) {
|
||||
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
|
||||
_jsiCanvas->setCanvas(canvas);
|
||||
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, _jsiCanvas);
|
||||
|
||||
// Call the base function in JFrameProcessor
|
||||
callWithFrameHostObject(frameHostObject);
|
||||
|
||||
// Remove Skia Canvas from Host Object because it is no longer valid
|
||||
frameHostObject->invalidateCanvas();
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
@@ -1,59 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <jni.h>
|
||||
#include <fbjni/fbjni.h>
|
||||
|
||||
#include <react-native-worklets-core/WKTJsiWorklet.h>
|
||||
#include <react-native-worklets-core/WKTJsiHostObject.h>
|
||||
|
||||
#include "JFrame.h"
|
||||
#include "FrameHostObject.h"
|
||||
#include "SkiaRenderer.h"
|
||||
#include "JFrameProcessor.h"
|
||||
|
||||
#include <JsiSkCanvas.h>
|
||||
#include <RNSkPlatformContext.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class JSkiaFrameProcessor : public jni::HybridClass<JSkiaFrameProcessor, JFrameProcessor> {
|
||||
public:
|
||||
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/skia/SkiaFrameProcessor;";
|
||||
static void registerNatives();
|
||||
static jni::local_ref<JSkiaFrameProcessor::javaobject> create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker);
|
||||
public:
|
||||
/**
|
||||
* Call the JS Frame Processor with the given valid Canvas to draw on.
|
||||
*/
|
||||
void call(jni::alias_ref<JFrame::javaobject> frame,
|
||||
SkCanvas* canvas);
|
||||
|
||||
SkiaRenderer& getSkiaRenderer() { return *_skiaRenderer; }
|
||||
|
||||
protected:
|
||||
friend HybridBase;
|
||||
// Private constructor. Use `create(..)` to create new instances.
|
||||
explicit JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
|
||||
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
|
||||
const std::shared_ptr<react::CallInvoker>& callInvoker);
|
||||
|
||||
private:
|
||||
std::shared_ptr<RNSkia::JsiSkCanvas> _jsiCanvas;
|
||||
std::shared_ptr<SkiaRenderer> _skiaRenderer;
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
@@ -1,234 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 10.08.23.
|
||||
//
|
||||
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include "SkiaRenderer.h"
|
||||
#include <android/log.h>
|
||||
#include "OpenGLError.h"
|
||||
|
||||
#include <GLES2/gl2ext.h>
|
||||
|
||||
#include <core/SkColorSpace.h>
|
||||
#include <core/SkCanvas.h>
|
||||
#include <core/SkYUVAPixmaps.h>
|
||||
|
||||
#include <gpu/gl/GrGLInterface.h>
|
||||
#include <gpu/GrDirectContext.h>
|
||||
#include <gpu/GrBackendSurface.h>
|
||||
#include <gpu/ganesh/SkSurfaceGanesh.h>
|
||||
#include <gpu/ganesh/SkImageGanesh.h>
|
||||
|
||||
#include <android/native_window_jni.h>
|
||||
#include <android/surface_texture_jni.h>
|
||||
|
||||
// from <gpu/ganesh/gl/GrGLDefines.h>
|
||||
#define GR_GL_RGBA8 0x8058
|
||||
#define DEFAULT_FBO 0
|
||||
|
||||
namespace vision {
|
||||
|
||||
SkiaRenderer::~SkiaRenderer() {
|
||||
_offscreenSurface = nullptr;
|
||||
_offscreenSurfaceTextureId = NO_TEXTURE;
|
||||
|
||||
// 3. Delete the Skia context
|
||||
if (_skiaContext != nullptr) {
|
||||
_skiaContext->abandonContext();
|
||||
_skiaContext = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
sk_sp<GrDirectContext> SkiaRenderer::getSkiaContext() {
|
||||
if (_skiaContext == nullptr) {
|
||||
_skiaContext = GrDirectContext::MakeGL();
|
||||
}
|
||||
return _skiaContext;
|
||||
}
|
||||
|
||||
sk_sp<SkImage> SkiaRenderer::wrapTextureAsImage(OpenGLTexture &texture) {
|
||||
GrGLTextureInfo textureInfo {
|
||||
// OpenGL will automatically convert YUV -> RGB - if it's an EXTERNAL texture
|
||||
.fTarget = texture.target,
|
||||
.fID = texture.id,
|
||||
.fFormat = GR_GL_RGBA8,
|
||||
};
|
||||
GrBackendTexture skiaTexture(texture.width,
|
||||
texture.height,
|
||||
GrMipMapped::kNo,
|
||||
textureInfo);
|
||||
sk_sp<SkImage> image = SkImages::BorrowTextureFrom(_skiaContext.get(),
|
||||
skiaTexture,
|
||||
kBottomLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
kOpaque_SkAlphaType,
|
||||
nullptr,
|
||||
nullptr);
|
||||
if (image == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to create Skia Image! Cannot wrap input texture (frame) using Skia.");
|
||||
}
|
||||
return image;
|
||||
}
|
||||
|
||||
sk_sp<SkSurface> SkiaRenderer::wrapEglSurfaceAsSurface(EGLSurface eglSurface) {
|
||||
GLint sampleCnt;
|
||||
glGetIntegerv(GL_SAMPLES, &sampleCnt);
|
||||
GLint stencilBits;
|
||||
glGetIntegerv(GL_STENCIL_BITS, &stencilBits);
|
||||
GrGLFramebufferInfo fboInfo {
|
||||
// DEFAULT_FBO is FBO0, meaning the default on-screen FBO for that given surface
|
||||
.fFBOID = DEFAULT_FBO,
|
||||
.fFormat = GR_GL_RGBA8
|
||||
};
|
||||
EGLint width = 0, height = 0;
|
||||
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_WIDTH, &width);
|
||||
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_HEIGHT, &height);
|
||||
GrBackendRenderTarget renderTarget(width,
|
||||
height,
|
||||
sampleCnt,
|
||||
stencilBits,
|
||||
fboInfo);
|
||||
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
|
||||
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
|
||||
renderTarget,
|
||||
kBottomLeft_GrSurfaceOrigin,
|
||||
kN32_SkColorType,
|
||||
nullptr,
|
||||
&props,
|
||||
nullptr,
|
||||
nullptr);
|
||||
if (surface == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to create Skia Surface! Cannot wrap EGLSurface/FrameBuffer using Skia.");
|
||||
}
|
||||
return surface;
|
||||
}
|
||||
|
||||
sk_sp<SkSurface> SkiaRenderer::getOffscreenSurface(int width, int height) {
|
||||
if (_offscreenSurface == nullptr || _offscreenSurface->width() != width || _offscreenSurface->height() != height) {
|
||||
// 1. Get Skia Context
|
||||
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
|
||||
|
||||
// 2. Create a backend texture (TEXTURE_2D + Frame Buffer)
|
||||
GrBackendTexture backendTexture = skiaContext->createBackendTexture(width,
|
||||
height,
|
||||
SkColorType::kN32_SkColorType,
|
||||
GrMipMapped::kNo,
|
||||
GrRenderable::kYes);
|
||||
|
||||
// 3. Get it's Texture ID
|
||||
GrGLTextureInfo info;
|
||||
backendTexture.getGLTextureInfo(&info);
|
||||
_offscreenSurfaceTextureId = info.fID;
|
||||
|
||||
struct ReleaseContext {
|
||||
GrDirectContext* context;
|
||||
GrBackendTexture texture;
|
||||
};
|
||||
auto releaseCtx = new ReleaseContext(
|
||||
{skiaContext.get(), backendTexture});
|
||||
SkSurfaces::TextureReleaseProc releaseProc = [] (void* address) {
|
||||
// 5. Once done using, delete the backend OpenGL texture.
|
||||
auto releaseCtx = reinterpret_cast<ReleaseContext*>(address);
|
||||
releaseCtx->context->deleteBackendTexture(releaseCtx->texture);
|
||||
};
|
||||
|
||||
// 4. Wrap the newly created texture as an SkSurface
|
||||
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
|
||||
_offscreenSurface = SkSurfaces::WrapBackendTexture(skiaContext.get(),
|
||||
backendTexture,
|
||||
kBottomLeft_GrSurfaceOrigin,
|
||||
0,
|
||||
SkColorType::kN32_SkColorType,
|
||||
nullptr,
|
||||
&props,
|
||||
releaseProc,
|
||||
releaseCtx);
|
||||
if (_offscreenSurface == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to create offscreen Skia Surface!");
|
||||
}
|
||||
}
|
||||
|
||||
return _offscreenSurface;
|
||||
}
|
||||
|
||||
OpenGLTexture SkiaRenderer::renderTextureToOffscreenSurface(OpenGLContext& glContext,
|
||||
OpenGLTexture& texture,
|
||||
float* transformMatrix,
|
||||
const DrawCallback& drawCallback) {
|
||||
// 1. Activate the OpenGL context (eglMakeCurrent)
|
||||
glContext.use();
|
||||
|
||||
// 2. Initialize Skia
|
||||
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
|
||||
|
||||
// 3. Create the offscreen Skia Surface
|
||||
sk_sp<SkSurface> surface = getOffscreenSurface(texture.width, texture.height);
|
||||
|
||||
// 4. Wrap the input texture as an image so we can draw it to the surface
|
||||
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
|
||||
|
||||
// 5. Prepare the Canvas
|
||||
SkCanvas* canvas = _offscreenSurface->getCanvas();
|
||||
if (canvas == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to get Skia Canvas!");
|
||||
}
|
||||
|
||||
// TODO: Apply Matrix. No idea how though.
|
||||
SkM44 matrix = SkM44::ColMajor(transformMatrix);
|
||||
|
||||
// 6. Render it!
|
||||
canvas->clear(SkColors::kBlack);
|
||||
canvas->drawImage(frame, 0, 0);
|
||||
|
||||
drawCallback(canvas);
|
||||
|
||||
// 8. Flush all Skia operations to OpenGL
|
||||
_offscreenSurface->flushAndSubmit();
|
||||
|
||||
return OpenGLTexture {
|
||||
.id = _offscreenSurfaceTextureId,
|
||||
.target = GL_TEXTURE_2D,
|
||||
.width = texture.width,
|
||||
.height = texture.height,
|
||||
};
|
||||
}
|
||||
|
||||
void SkiaRenderer::renderTextureToSurface(OpenGLContext &glContext, OpenGLTexture &texture, EGLSurface surface) {
|
||||
// 1. Activate the OpenGL context (eglMakeCurrent)
|
||||
glContext.use(surface);
|
||||
|
||||
// 2. Initialize Skia
|
||||
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
|
||||
|
||||
// 3. Wrap the output EGLSurface in a Skia SkSurface
|
||||
sk_sp<SkSurface> skSurface = wrapEglSurfaceAsSurface(surface);
|
||||
|
||||
// 4. Wrap the input texture in a Skia SkImage
|
||||
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
|
||||
|
||||
// 5. Prepare the Canvas!
|
||||
SkCanvas* canvas = skSurface->getCanvas();
|
||||
if (canvas == nullptr) {
|
||||
[[unlikely]];
|
||||
throw std::runtime_error("Failed to get Skia Canvas!");
|
||||
}
|
||||
|
||||
// 6. Render it!
|
||||
canvas->clear(SkColors::kBlack);
|
||||
canvas->drawImage(frame, 0, 0);
|
||||
|
||||
// 7. Flush all Skia operations to OpenGL
|
||||
skSurface->flushAndSubmit();
|
||||
|
||||
// 8. Swap the buffers so the onscreen surface gets updated.
|
||||
glContext.flush();
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
@@ -1,72 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 10.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#if VISION_CAMERA_ENABLE_SKIA
|
||||
|
||||
#include <GLES2/gl2.h>
|
||||
#include <EGL/egl.h>
|
||||
#include <android/native_window.h>
|
||||
|
||||
#include <include/core/SkSurface.h>
|
||||
#include <include/gpu/GrDirectContext.h>
|
||||
|
||||
#include "OpenGLContext.h"
|
||||
#include "OpenGLTexture.h"
|
||||
|
||||
namespace vision {
|
||||
|
||||
#define NO_TEXTURE 0
|
||||
|
||||
using DrawCallback = std::function<void(SkCanvas*)>;
|
||||
|
||||
class SkiaRenderer {
|
||||
public:
|
||||
/**
|
||||
* Create a new Skia renderer. You need to use OpenGL outside of this context to make sure the
|
||||
* Skia renderer can use the global OpenGL context.
|
||||
*/
|
||||
explicit SkiaRenderer() {};
|
||||
~SkiaRenderer();
|
||||
|
||||
/**
|
||||
* Renders the given Texture (might be a Camera Frame) to a cached offscreen Texture using Skia.
|
||||
*
|
||||
* @returns The texture that was rendered to.
|
||||
*/
|
||||
OpenGLTexture renderTextureToOffscreenSurface(OpenGLContext& glContext,
|
||||
OpenGLTexture& texture,
|
||||
float* transformMatrix,
|
||||
const DrawCallback& drawCallback);
|
||||
|
||||
/**
|
||||
* Renders the given texture to the target output surface using Skia.
|
||||
*/
|
||||
void renderTextureToSurface(OpenGLContext& glContext,
|
||||
OpenGLTexture& texture,
|
||||
EGLSurface surface);
|
||||
|
||||
private:
|
||||
// Gets or creates the Skia context.
|
||||
sk_sp<GrDirectContext> getSkiaContext();
|
||||
// Wraps a Texture as an SkImage allowing you to draw it
|
||||
sk_sp<SkImage> wrapTextureAsImage(OpenGLTexture& texture);
|
||||
// Wraps an EGLSurface as an SkSurface allowing you to draw into it
|
||||
sk_sp<SkSurface> wrapEglSurfaceAsSurface(EGLSurface eglSurface);
|
||||
// Gets or creates an off-screen surface that you can draw into
|
||||
sk_sp<SkSurface> getOffscreenSurface(int width, int height);
|
||||
|
||||
private:
|
||||
// Skia Context
|
||||
sk_sp<GrDirectContext> _skiaContext = nullptr;
|
||||
sk_sp<SkSurface> _offscreenSurface = nullptr;
|
||||
GLuint _offscreenSurfaceTextureId = NO_TEXTURE;
|
||||
|
||||
static auto constexpr TAG = "SkiaRenderer";
|
||||
};
|
||||
|
||||
} // namespace vision
|
||||
|
||||
#endif
|
@@ -1,8 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#include "VisionCameraSkiaContext.h"
|
||||
|
||||
namespace vision {
|
||||
} // vision
|
@@ -1,52 +0,0 @@
|
||||
//
|
||||
// Created by Marc Rousavy on 31.08.23.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <jsi/jsi.h>
|
||||
#include <RNSkPlatformContext.h>
|
||||
|
||||
namespace vision {
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class VisionCameraSkiaContext: public RNSkia::RNSkPlatformContext {
|
||||
public:
|
||||
VisionCameraSkiaContext(jsi::Runtime* runtime,
|
||||
std::shared_ptr<react::CallInvoker> callInvoker,
|
||||
float pixelDensity)
|
||||
: RNSkia::RNSkPlatformContext(runtime, callInvoker, pixelDensity) { }
|
||||
|
||||
void raiseError(const std::exception &err) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext Error: " + std::string(err.what()));
|
||||
}
|
||||
|
||||
void performStreamOperation(
|
||||
const std::string &sourceUri,
|
||||
const std::function<void(std::unique_ptr<SkStreamAsset>)> &op) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::performStreamOperation is not yet implemented!");
|
||||
}
|
||||
|
||||
sk_sp<SkSurface> makeOffscreenSurface(int width, int height) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::makeOffscreenSurface is not yet implemented!");
|
||||
}
|
||||
|
||||
void runOnMainThread(std::function<void()> task) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::runOnMainThread is not yet implemented!");
|
||||
}
|
||||
|
||||
sk_sp<SkImage> takeScreenshotFromViewTag(size_t tag) override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::takeScreenshotFromViewTag is not yet implemented!");
|
||||
}
|
||||
|
||||
void startDrawLoop() override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::startDrawLoop is not yet implemented!");
|
||||
}
|
||||
|
||||
void stopDrawLoop() override {
|
||||
throw std::runtime_error("VisionCameraSkiaContext::stopDrawLoop is not yet implemented!");
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace vision
|
@@ -200,9 +200,8 @@ class CameraSession(private val context: Context,
|
||||
private fun updateVideoOutputs() {
|
||||
val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return
|
||||
val previewOutput = outputs?.previewOutput
|
||||
videoPipeline.setRecordingSessionOutput(recording)
|
||||
videoPipeline.setFrameProcessorOutput(frameProcessor)
|
||||
videoPipeline.setPreviewOutput(previewOutput?.surface)
|
||||
videoPipeline.setRecordingSessionOutput(this.recording)
|
||||
videoPipeline.setFrameProcessorOutput(this.frameProcessor)
|
||||
}
|
||||
|
||||
suspend fun takePhoto(qualityPrioritization: QualityPrioritization,
|
||||
@@ -216,6 +215,8 @@ class CameraSession(private val context: Context,
|
||||
|
||||
val photoOutput = outputs.photoOutput ?: throw PhotoNotEnabledError()
|
||||
|
||||
Log.i(TAG, "Photo capture 0/3 - preparing capture request (${photoOutput.size.width}x${photoOutput.size.height})...")
|
||||
|
||||
val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id)
|
||||
val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics)
|
||||
val captureRequest = captureSession.device.createPhotoCaptureRequest(cameraManager,
|
||||
@@ -226,16 +227,16 @@ class CameraSession(private val context: Context,
|
||||
enableRedEyeReduction,
|
||||
enableAutoStabilization,
|
||||
orientation)
|
||||
Log.i(TAG, "Photo capture 0/2 - starting capture...")
|
||||
Log.i(TAG, "Photo capture 1/3 - starting capture...")
|
||||
val result = captureSession.capture(captureRequest, enableShutterSound)
|
||||
val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!!
|
||||
Log.i(TAG, "Photo capture 1/2 complete - received metadata with timestamp $timestamp")
|
||||
Log.i(TAG, "Photo capture 2/3 complete - received metadata with timestamp $timestamp")
|
||||
try {
|
||||
val image = photoOutputSynchronizer.await(timestamp)
|
||||
|
||||
val isMirrored = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT
|
||||
|
||||
Log.i(TAG, "Photo capture 2/2 complete - received ${image.width} x ${image.height} image.")
|
||||
Log.i(TAG, "Photo capture 3/3 complete - received ${image.width} x ${image.height} image.")
|
||||
return CapturedPhoto(image, result, orientation, isMirrored, image.format)
|
||||
} catch (e: CancellationException) {
|
||||
throw CaptureAbortedError(false)
|
||||
@@ -501,8 +502,7 @@ class CameraSession(private val context: Context,
|
||||
val captureRequest = camera.createCaptureRequest(template)
|
||||
outputs.previewOutput?.let { output ->
|
||||
Log.i(TAG, "Adding output surface ${output.outputType}..")
|
||||
// TODO: Add here again?
|
||||
// captureRequest.addTarget(output.surface)
|
||||
captureRequest.addTarget(output.surface)
|
||||
}
|
||||
outputs.videoOutput?.let { output ->
|
||||
Log.i(TAG, "Adding output surface ${output.outputType}..")
|
||||
|
@@ -25,6 +25,7 @@ import com.mrousavy.camera.utils.outputs.CameraOutputs
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import java.io.Closeable
|
||||
|
||||
//
|
||||
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
|
||||
@@ -90,7 +91,7 @@ class CameraView(context: Context) : FrameLayout(context) {
|
||||
internal var frameProcessor: FrameProcessor? = null
|
||||
set(value) {
|
||||
field = value
|
||||
cameraSession.frameProcessor = value
|
||||
cameraSession.frameProcessor = frameProcessor
|
||||
}
|
||||
|
||||
private val inputOrientation: Orientation
|
||||
|
@@ -64,8 +64,7 @@ suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
|
||||
|
||||
val outputConfigurations = arrayListOf<OutputConfiguration>()
|
||||
outputs.previewOutput?.let { output ->
|
||||
// TODO: add here again?
|
||||
// outputConfigurations.add(output.toOutputConfiguration(characteristics))
|
||||
outputConfigurations.add(output.toOutputConfiguration(characteristics))
|
||||
}
|
||||
outputs.photoOutput?.let { output ->
|
||||
outputConfigurations.add(output.toOutputConfiguration(characteristics))
|
||||
|
@@ -1,66 +1,147 @@
|
||||
package com.mrousavy.camera.frameprocessor;
|
||||
|
||||
import com.facebook.jni.HybridData;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.media.Image;
|
||||
import com.facebook.proguard.annotations.DoNotStrip;
|
||||
import com.mrousavy.camera.parsers.PixelFormat;
|
||||
import com.mrousavy.camera.parsers.Orientation;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/** @noinspection JavaJniMissingFunction*/
|
||||
public class Frame {
|
||||
private final HybridData mHybridData;
|
||||
private final Image image;
|
||||
private final boolean isMirrored;
|
||||
private final long timestamp;
|
||||
private final Orientation orientation;
|
||||
private int refCount = 0;
|
||||
|
||||
private Frame(HybridData hybridData) {
|
||||
mHybridData = hybridData;
|
||||
public Frame(Image image, long timestamp, Orientation orientation, boolean isMirrored) {
|
||||
this.image = image;
|
||||
this.timestamp = timestamp;
|
||||
this.orientation = orientation;
|
||||
this.isMirrored = isMirrored;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
mHybridData.resetNative();
|
||||
public Image getImage() {
|
||||
return image;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the width of the Frame, in it's sensor orientation. (in pixels)
|
||||
*/
|
||||
public native int getWidth();
|
||||
/**
|
||||
* Get the height of the Frame, in it's sensor orientation. (in pixels)
|
||||
*/
|
||||
public native int getHeight();
|
||||
/**
|
||||
* Get the number of bytes per row.
|
||||
* * To get the number of components per pixel you can divide this with the Frame's width.
|
||||
* * To get the total size of the byte buffer you can multiply this with the Frame's height.
|
||||
*/
|
||||
public native int getBytesPerRow();
|
||||
/**
|
||||
* Get the local timestamp of this Frame. This is always monotonically increasing for each Frame.
|
||||
*/
|
||||
public native long getTimestamp();
|
||||
/**
|
||||
* Get the Orientation of this Frame. The return value is the result of `Orientation.toUnionValue()`.
|
||||
*/
|
||||
public native String getOrientation();
|
||||
/**
|
||||
* Return whether this Frame is mirrored or not. Frames from the front-facing Camera are often mirrored.
|
||||
*/
|
||||
public native boolean getIsMirrored();
|
||||
/**
|
||||
* Get the pixel-format of this Frame. The return value is the result of `PixelFormat.toUnionValue()`.
|
||||
*/
|
||||
public native String getPixelFormat();
|
||||
/**
|
||||
* Get the actual backing pixel data of this Frame using a zero-copy C++ ByteBuffer.
|
||||
*/
|
||||
public native ByteBuffer getByteBuffer();
|
||||
/**
|
||||
* Get whether this Frame is still valid.
|
||||
* A Frame is valid as long as it hasn't been closed by the Frame Processor Runtime Manager
|
||||
* (either because it ran out of Frames in it's queue and needs to close old ones, or because
|
||||
* a Frame Processor finished executing and you're still trying to hold onto this Frame in native)
|
||||
*/
|
||||
public native boolean getIsValid();
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getWidth() {
|
||||
return image.getWidth();
|
||||
}
|
||||
|
||||
private native void incrementRefCount();
|
||||
private native void decrementRefCount();
|
||||
private native void close();
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getHeight() {
|
||||
return image.getHeight();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public boolean getIsValid() {
|
||||
try {
|
||||
// will throw an exception if the image is already closed
|
||||
image.getCropRect();
|
||||
// no exception thrown, image must still be valid.
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
// exception thrown, image has already been closed.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public boolean getIsMirrored() {
|
||||
return isMirrored;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public long getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public String getOrientation() {
|
||||
return orientation.getUnionValue();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public String getPixelFormat() {
|
||||
PixelFormat format = PixelFormat.Companion.fromImageFormat(image.getFormat());
|
||||
return format.getUnionValue();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getPlanesCount() {
|
||||
return image.getPlanes().length;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public int getBytesPerRow() {
|
||||
return image.getPlanes()[0].getRowStride();
|
||||
}
|
||||
|
||||
private static ByteBuffer byteArrayCache;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public ByteBuffer toByteBuffer() {
|
||||
switch (image.getFormat()) {
|
||||
case ImageFormat.YUV_420_888:
|
||||
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
|
||||
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
|
||||
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
|
||||
int ySize = yBuffer.remaining();
|
||||
int uSize = uBuffer.remaining();
|
||||
int vSize = vBuffer.remaining();
|
||||
int totalSize = ySize + uSize + vSize;
|
||||
|
||||
if (byteArrayCache != null) byteArrayCache.rewind();
|
||||
if (byteArrayCache == null || byteArrayCache.remaining() != totalSize) {
|
||||
byteArrayCache = ByteBuffer.allocateDirect(totalSize);
|
||||
}
|
||||
|
||||
byteArrayCache.put(yBuffer).put(uBuffer).put(vBuffer);
|
||||
|
||||
return byteArrayCache;
|
||||
case ImageFormat.JPEG:
|
||||
return image.getPlanes()[0].getBuffer();
|
||||
default:
|
||||
throw new RuntimeException("Cannot convert Frame with Format " + image.getFormat() + " to byte array!");
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public void incrementRefCount() {
|
||||
synchronized (this) {
|
||||
refCount++;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
public void decrementRefCount() {
|
||||
synchronized (this) {
|
||||
refCount--;
|
||||
if (refCount <= 0) {
|
||||
// If no reference is held on this Image, close it.
|
||||
image.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@DoNotStrip
|
||||
private void close() {
|
||||
image.close();
|
||||
}
|
||||
}
|
||||
|
@@ -8,9 +8,15 @@ import com.facebook.jni.HybridData;
|
||||
import com.facebook.proguard.annotations.DoNotStrip;
|
||||
|
||||
/**
|
||||
* Represents a JS Frame Processor. It's actual implementation is in NDK/C++.
|
||||
* Represents a JS Frame Processor
|
||||
*/
|
||||
public class FrameProcessor {
|
||||
@SuppressWarnings("JavaJniMissingFunction") // we're using fbjni.
|
||||
public final class FrameProcessor {
|
||||
/**
|
||||
* Call the JS Frame Processor function with the given Frame
|
||||
*/
|
||||
public native void call(Frame frame);
|
||||
|
||||
@DoNotStrip
|
||||
@Keep
|
||||
private final HybridData mHybridData;
|
||||
|
@@ -1,18 +0,0 @@
|
||||
package com.mrousavy.camera.parsers
|
||||
|
||||
enum class PreviewType(override val unionValue: String): JSUnionValue {
|
||||
NONE("none"),
|
||||
NATIVE("native"),
|
||||
SKIA("skia");
|
||||
|
||||
companion object: JSUnionValue.Companion<PreviewType> {
|
||||
override fun fromUnionValue(unionValue: String?): PreviewType {
|
||||
return when (unionValue) {
|
||||
"none" -> NONE
|
||||
"native" -> NATIVE
|
||||
"skia" -> SKIA
|
||||
else -> NONE
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,11 +0,0 @@
|
||||
package com.mrousavy.camera.skia;
|
||||
|
||||
import com.facebook.jni.HybridData;
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor;
|
||||
|
||||
public class SkiaFrameProcessor extends FrameProcessor {
|
||||
// Implementation is in JSkiaFrameProcessor.cpp
|
||||
public SkiaFrameProcessor(HybridData hybridData) {
|
||||
super(hybridData);
|
||||
}
|
||||
}
|
@@ -1,32 +0,0 @@
|
||||
package com.mrousavy.camera.utils
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import android.media.Image
|
||||
import android.media.ImageReader
|
||||
import android.media.ImageWriter
|
||||
import java.io.Closeable
|
||||
|
||||
class ImageCreator(private val width: Int,
|
||||
private val height: Int,
|
||||
private val format: Int = ImageFormat.PRIVATE,
|
||||
private val maxImages: Int = 3): Closeable {
|
||||
private var imageReader: ImageReader? = null
|
||||
private var imageWriter: ImageWriter? = null
|
||||
|
||||
override fun close() {
|
||||
imageWriter?.close()
|
||||
imageReader?.close()
|
||||
}
|
||||
|
||||
fun createImage(): Image {
|
||||
if (imageReader == null || imageWriter == null) {
|
||||
imageWriter?.close()
|
||||
imageReader?.close()
|
||||
|
||||
imageReader = ImageReader.newInstance(width, height, format, maxImages)
|
||||
imageWriter = ImageWriter.newInstance(imageReader!!.surface, maxImages)
|
||||
}
|
||||
|
||||
return imageWriter!!.dequeueInputImage()
|
||||
}
|
||||
}
|
@@ -41,6 +41,7 @@ class RecordingSession(context: Context,
|
||||
val surface: Surface = MediaCodec.createPersistentInputSurface()
|
||||
|
||||
init {
|
||||
|
||||
outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir)
|
||||
|
||||
Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}")
|
||||
@@ -53,7 +54,7 @@ class RecordingSession(context: Context,
|
||||
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
|
||||
recorder.setOutputFile(outputFile.absolutePath)
|
||||
recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE)
|
||||
recorder.setVideoSize(size.width, size.height)
|
||||
recorder.setVideoSize(size.height, size.width)
|
||||
if (fps != null) recorder.setVideoFrameRate(fps)
|
||||
|
||||
Log.i(TAG, "Using $codec Video Codec..")
|
||||
@@ -66,7 +67,7 @@ class RecordingSession(context: Context,
|
||||
recorder.setAudioChannels(AUDIO_CHANNELS)
|
||||
}
|
||||
recorder.setInputSurface(surface)
|
||||
recorder.setOrientationHint(orientation.toDegrees())
|
||||
//recorder.setOrientationHint(orientation.toDegrees())
|
||||
|
||||
recorder.setOnErrorListener { _, what, extra ->
|
||||
Log.e(TAG, "MediaRecorder Error: $what ($extra)")
|
||||
|
@@ -2,12 +2,15 @@ package com.mrousavy.camera.utils
|
||||
|
||||
import android.graphics.ImageFormat
|
||||
import android.graphics.SurfaceTexture
|
||||
import android.media.ImageReader
|
||||
import android.media.ImageWriter
|
||||
import android.media.MediaRecorder
|
||||
import android.util.Log
|
||||
import android.view.Surface
|
||||
import com.facebook.jni.HybridData
|
||||
import com.mrousavy.camera.frameprocessor.Frame
|
||||
import com.mrousavy.camera.frameprocessor.FrameProcessor
|
||||
import com.mrousavy.camera.parsers.Orientation
|
||||
import java.io.Closeable
|
||||
|
||||
/**
|
||||
@@ -23,25 +26,21 @@ class VideoPipeline(val width: Int,
|
||||
val height: Int,
|
||||
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
|
||||
companion object {
|
||||
private const val MAX_IMAGES = 3
|
||||
private const val MAX_IMAGES = 5
|
||||
private const val TAG = "VideoPipeline"
|
||||
}
|
||||
|
||||
private val mHybridData: HybridData
|
||||
private var isActive = true
|
||||
|
||||
// Input Texture
|
||||
private var openGLTextureId: Int? = null
|
||||
private var transformMatrix = FloatArray(16)
|
||||
|
||||
// Processing input texture
|
||||
private var frameProcessor: FrameProcessor? = null
|
||||
private var isActive = true
|
||||
|
||||
// Output 1
|
||||
private var recordingSession: RecordingSession? = null
|
||||
private var frameProcessor: FrameProcessor? = null
|
||||
private var imageReader: ImageReader? = null
|
||||
|
||||
// Output 2
|
||||
private var previewSurface: Surface? = null
|
||||
private var recordingSession: RecordingSession? = null
|
||||
|
||||
// Input
|
||||
private val surfaceTexture: SurfaceTexture
|
||||
@@ -58,6 +57,8 @@ class VideoPipeline(val width: Int,
|
||||
override fun close() {
|
||||
synchronized(this) {
|
||||
isActive = false
|
||||
imageReader?.close()
|
||||
imageReader = null
|
||||
frameProcessor = null
|
||||
recordingSession = null
|
||||
surfaceTexture.release()
|
||||
@@ -90,6 +91,21 @@ class VideoPipeline(val width: Int,
|
||||
}
|
||||
}
|
||||
|
||||
private fun getImageReader(): ImageReader {
|
||||
val imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
|
||||
imageReader.setOnImageAvailableListener({ reader ->
|
||||
Log.i("VideoPipeline", "ImageReader::onImageAvailable!")
|
||||
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
|
||||
|
||||
// TODO: Get correct orientation and isMirrored
|
||||
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
|
||||
frame.incrementRefCount()
|
||||
frameProcessor?.call(frame)
|
||||
frame.decrementRefCount()
|
||||
}, null)
|
||||
return imageReader
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the Pipeline to also call the given [FrameProcessor].
|
||||
* * If the [frameProcessor] is `null`, this output channel will be removed.
|
||||
@@ -102,11 +118,20 @@ class VideoPipeline(val width: Int,
|
||||
this.frameProcessor = frameProcessor
|
||||
|
||||
if (frameProcessor != null) {
|
||||
// Configure OpenGL pipeline to stream Frames into the Frame Processor (CPU pixel access)
|
||||
setFrameProcessor(frameProcessor)
|
||||
if (this.imageReader == null) {
|
||||
// 1. Create new ImageReader that just calls the Frame Processor
|
||||
this.imageReader = getImageReader()
|
||||
}
|
||||
|
||||
// 2. Configure OpenGL pipeline to stream Frames into the ImageReader's surface
|
||||
setFrameProcessorOutputSurface(imageReader!!.surface)
|
||||
} else {
|
||||
// Configure OpenGL pipeline to stop streaming Frames into a Frame Processor
|
||||
removeFrameProcessor()
|
||||
// 1. Configure OpenGL pipeline to stop streaming Frames into the ImageReader's surface
|
||||
removeFrameProcessorOutputSurface()
|
||||
|
||||
// 2. Close the ImageReader
|
||||
this.imageReader?.close()
|
||||
this.imageReader = null
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -131,27 +156,12 @@ class VideoPipeline(val width: Int,
|
||||
}
|
||||
}
|
||||
|
||||
fun setPreviewOutput(surface: Surface?) {
|
||||
synchronized(this) {
|
||||
Log.i(TAG, "Setting Preview Output...")
|
||||
if (surface != null) {
|
||||
setPreviewOutputSurface(surface)
|
||||
this.previewSurface = surface
|
||||
} else {
|
||||
removePreviewOutputSurface()
|
||||
this.previewSurface = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private external fun getInputTextureId(): Int
|
||||
private external fun onBeforeFrame()
|
||||
private external fun onFrame(transformMatrix: FloatArray)
|
||||
private external fun setFrameProcessor(frameProcessor: FrameProcessor)
|
||||
private external fun removeFrameProcessor()
|
||||
private external fun setFrameProcessorOutputSurface(surface: Any)
|
||||
private external fun removeFrameProcessorOutputSurface()
|
||||
private external fun setRecordingSessionOutputSurface(surface: Any)
|
||||
private external fun removeRecordingSessionOutputSurface()
|
||||
private external fun setPreviewOutputSurface(surface: Any)
|
||||
private external fun removePreviewOutputSurface()
|
||||
private external fun initHybrid(width: Int, height: Int): HybridData
|
||||
}
|
||||
|
Reference in New Issue
Block a user